content stringlengths 5 1.05M |
|---|
import os
MOTHERBRAIN_PATH = os.path.dirname(__file__)
|
import hashlib;
class sha256generator(object):
"""Generate a sha1 value to validate mozu events or form posts"""
def getHash(sharedSecret, date, body):
hashString = sharedSecret+sharedSecret;
m = hashlib.sha256(hashString.encode());
m.digest();
hash1 = m.hexdigest();
hashString = hash1+date+body;
m = hashlib.sha256(hashString.encode());
m.digest();
return m.dexdigest();
|
import sys
from shared import timeit
import numpy as np
import mdtraj as md
from mdtraj.geometry.dihedral import indices_phi, indices_psi
def ramachandran(traj):
md.compute_phi(traj)
md.compute_psi(traj)
pdbfile = sys.argv[1]
repeats = int(sys.argv[2])
traj = md.load(pdbfile)
print(timeit(ramachandran, traj, repeats=repeats))
|
import FWCore.ParameterSet.Config as cms
RecoVertexFEVT = cms.PSet(
outputCommands = cms.untracked.vstring('keep *_offlinePrimaryVertices__*',
'keep *_offlinePrimaryVerticesWithBS_*_*',
'keep *_offlinePrimaryVerticesFromCosmicTracks_*_*',
'keep *_nuclearInteractionMaker_*_*',
'keep *_generalV0Candidates_*_*',
'keep *_inclusiveSecondaryVertices_*_*')
)
#RECO content
RecoVertexRECO = cms.PSet(
outputCommands = cms.untracked.vstring('keep *_offlinePrimaryVertices__*',
'keep *_offlinePrimaryVerticesWithBS_*_*',
'keep *_offlinePrimaryVerticesFromCosmicTracks_*_*',
'keep *_nuclearInteractionMaker_*_*',
'keep *_generalV0Candidates_*_*',
'keep *_inclusiveSecondaryVertices_*_*')
)
#AOD content
RecoVertexAOD = cms.PSet(
outputCommands = cms.untracked.vstring('keep *_offlinePrimaryVertices__*',
'keep *_offlinePrimaryVerticesWithBS_*_*',
'keep *_offlinePrimaryVerticesFromCosmicTracks_*_*',
'keep *_nuclearInteractionMaker_*_*',
'keep *_generalV0Candidates_*_*',
'keep *_inclusiveSecondaryVertices_*_*')
)
from Configuration.Eras.Modifier_phase2_timing_cff import phase2_timing
from Configuration.Eras.Modifier_phase2_timing_layer_tile_cff import phase2_timing_layer_tile
from Configuration.Eras.Modifier_phase2_timing_layer_bar_cff import phase2_timing_layer_bar
_phase2_tktiming_RecoVertexEventContent = [ 'keep *_offlinePrimaryVertices4D__*',
'keep *_offlinePrimaryVertices4DWithBS__*',
'keep *_trackTimeValueMapProducer_*_*' ]
_phase2_tktiming_layer_RecoVertexEventContent = [ 'keep *_offlinePrimaryVertices4DnoPID__*',
'keep *_offlinePrimaryVertices4DnoPIDWithBS__*',
'keep *_tofPID_*_*',
'keep *_offlinePrimaryVertices4Dfastsim__*',
'keep *_offlinePrimaryVertices4DfastsimWithBS__*']
def _phase2_tktiming_AddNewContent(mod):
temp = mod.outputCommands + _phase2_tktiming_RecoVertexEventContent
phase2_timing.toModify( mod, outputCommands = temp )
_phase2_tktiming_AddNewContent(RecoVertexFEVT)
_phase2_tktiming_AddNewContent(RecoVertexRECO)
_phase2_tktiming_AddNewContent(RecoVertexAOD)
def _phase2_tktiming_layer_AddNewContent(mod):
temp = mod.outputCommands + _phase2_tktiming_layer_RecoVertexEventContent
(phase2_timing_layer_tile | phase2_timing_layer_bar).toModify( mod, outputCommands = temp )
_phase2_tktiming_layer_AddNewContent(RecoVertexFEVT)
_phase2_tktiming_layer_AddNewContent(RecoVertexRECO)
_phase2_tktiming_layer_AddNewContent(RecoVertexAOD)
|
"""Provide a class for ODT invisibly tagged chapters and scenes export.
Copyright (c) 2021 Peter Triesberger
For further information see https://github.com/peter88213/PyWriter
Published under the MIT License (https://opensource.org/licenses/mit-license.php)
"""
from pywriter.odt.odt_file import OdtFile
class OdtManuscript(OdtFile):
"""ODT manuscript file representation.
Export a manuscript with invisibly tagged chapters and scenes.
"""
DESCRIPTION = 'Editable manuscript'
SUFFIX = '_manuscript'
fileHeader = OdtFile.CONTENT_XML_HEADER + '''<text:p text:style-name="Title">$Title</text:p>
<text:p text:style-name="Subtitle">$AuthorName</text:p>
'''
partTemplate = '''<text:section text:style-name="Sect1" text:name="ChID:$ID">
<text:h text:style-name="Heading_20_1" text:outline-level="1"><text:a xlink:href="../${ProjectName}_parts.odt#ChID:$ID%7Cregion">$Title</text:a></text:h>
'''
chapterTemplate = '''<text:section text:style-name="Sect1" text:name="ChID:$ID">
<text:h text:style-name="Heading_20_2" text:outline-level="2"><text:a xlink:href="../${ProjectName}_chapters.odt#ChID:$ID%7Cregion">$Title</text:a></text:h>
'''
sceneTemplate = '''<text:section text:style-name="Sect1" text:name="ScID:$ID">
<text:p text:style-name="Text_20_body"><office:annotation>
<dc:creator>scene title</dc:creator>
<text:p>~ ${Title} ~</text:p>
<text:p/>
<text:p><text:a xlink:href="../${ProjectName}_scenes.odt#ScID:$ID%7Cregion">→Summary</text:a></text:p>
</office:annotation>$SceneContent</text:p>
</text:section>
'''
appendedSceneTemplate = '''<text:section text:style-name="Sect1" text:name="ScID:$ID">
<text:p text:style-name="First_20_line_20_indent"><office:annotation>
<dc:creator>scene title</dc:creator>
<text:p>~ ${Title} ~</text:p>
<text:p/>
<text:p><text:a xlink:href="../${ProjectName}_scenes.odt#ScID:$ID%7Cregion">→Summary</text:a></text:p>
</office:annotation>$SceneContent</text:p>
</text:section>
'''
sceneDivider = '<text:p text:style-name="Heading_20_4">* * *</text:p>\n'
#sceneDivider = '<text:p text:style-name="Heading_20_5"></text:p>\n'
chapterEndTemplate = '''</text:section>
'''
fileFooter = OdtFile.CONTENT_XML_FOOTER
def get_chapterMapping(self, chId, chapterNumber):
"""Return a mapping dictionary for a chapter section.
"""
chapterMapping = OdtFile.get_chapterMapping(self, chId, chapterNumber)
if self.chapters[chId].suppressChapterTitle:
chapterMapping['Title'] = ''
return chapterMapping
|
import psycopg2
import databasePasswordDecryption
import json
DATABASE_NAME = "postgres"
DATABASE_USER = "swapstreamAdmin"
DATABASE_HOST = "swapstreamdb.ck8zwecvtffz.us-east-2.rds.amazonaws.com"
DATABASE_PORT = "5432"
DATABASE_PASSWORD = databasePasswordDecryption.getDBPassword()
def get_test_data():
conn = None
try:
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD)
cur = conn.cursor()
cur.execute('SELECT * FROM "userdata"."users"')
users = cur.fetchall()
cur.close()
output = dict()
for user in users:
output[user[0]] = user
return output
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return {"Error": str(error)}
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
def add_user(user_id, user_name, service, pfp):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD)
cur = conn.cursor()
sql = f"INSERT INTO userdata.users(user_id, username, services, pfp) VALUES({user_id}, '{user_name}', '{service}', '{pfp}');"
cur.execute(sql)
conn.commit()
cur.close()
conn.close()
def formatUsers(results):
item = results[0]
retval = dict()
retval['name'] = item[0]
return retval
def get_user(user_id):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD)
cur = conn.cursor()
sql = f"SELECT username FROM userdata.users WHERE user_id = {user_id}"
cur.execute(sql)
results = cur.fetchall()
return formatUsers(results)
def add_playlist(plist_id, user_id, title, service, owner):
conn = psycopg2.connect(
"dbname=postgres user=postgres host=127.0.0.1 port=5432"
)
cur = conn.cursor()
sql = f"INSERT INTO playlists(plist_id, user_id, title, service, owner) VALUES({plist_id}, {user_id}, {title}, {service}, {owner});"
cur.execute(sql)
conn.commit()
cur.close()
conn.close()
def add_song(title, artist, service, url, index, plist_id):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD
)
cur = conn.cursor()
sql = f"INSERT INTO songs(title, artist, service, url, index, plist_id) VALUES({title}, {artist}, {service}, {url}, {index}, {plist_id});"
cur.execute(sql)
conn.commit()
cur.close()
conn.close()
def make_dict(results: list) -> dict:
my_dict = dict()
return my_dict
def get_playlist_by_user(user_id):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD
)
cur = conn.cursor()
sql = f"SELECT songs FROM userdata.playlists, userdata.users WHERE playlists.user_id = {user_id} AND users.user_id=playlists.user_id"
cur.execute(sql)
results = cur.fetchall()
''' insert subroutine to sort it into proper dictionary'''
return results
def format_playlists(results):
res = results
retval = dict()
info = dict()
entry = list()
piece = dict()
lists = list()
for item in res:
piece['name'] = item[0]
piece['id'] = item[1]
piece['service'] = item[2]
entry.append(piece)
info['info'] = entry
info['image'] = item[5]
info['profile_image'] = item[6]
song_list = list()
song_list.append(item[3])
song_list.append(info)
song_list += item[4]['songs']
lists.append(song_list)
retval['playlists'] = lists
return retval
def get_playlist_by_id(plist_id, user_id):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD
)
cur = conn.cursor()
sql = f"SELECT DISTINCT username, users.user_id, services, playlists.name, songs, image, pfp FROM userdata.playlists, userdata.users WHERE plist_id = '{plist_id}' AND userdata.users.user_id = userdata.playlists.user_id AND userdata.users.user_id = {user_id}"
print(sql)
cur.execute(sql)
results = cur.fetchall()
return format_playlists(results)
def get_playlist_by_display_name(username):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD
)
cur = conn.cursor()
sql = f"SELECT DISTINCT username, users.user_id, services, playlists.name, songs, image, pfp FROM userdata.playlists, userdata.users WHERE (username ~* '{username}' or name ~* '{username}') AND users.user_id=playlists.user_id"
print(sql)
cur.execute(sql)
results = cur.fetchall()
return format_playlists(results)
def get_all_playlists():
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD
)
cur = conn.cursor()
sql = f"SELECT username, users.user_id, services, playlists.name, songs, image, pfp FROM userdata.playlists, userdata.users WHERE users.user_id = playlists.user_id"
print(sql)
cur.execute(sql)
results = cur.fetchall()
return format_playlists(results)
def format_plist_id(results):
my_dict = dict()
my_dict['plist_id'] = [items[0] for items in results]
return my_dict
def get_spotify_plist_id(user_id: int):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD
)
cur = conn.cursor()
sql = f"SELECT DISTINCT plist_id FROM userdata.playlists, userdata.users WHERE users.services = 'Spotify' AND users.user_id = {user_id}"
print(sql)
cur.execute(sql)
results = cur.fetchall()
return format_plist_id(results)
def store_playlist(plist_id: int, user_id: int, songs: dict, name: str, image: str):
conn = psycopg2.connect(
host=DATABASE_HOST,
database=DATABASE_NAME,
user=DATABASE_USER,
password=DATABASE_PASSWORD
)
cur = conn.cursor()
insert_songs = list()
for each in songs['songs']:
this_list = list()
new_string = each[0].replace('\'', '\'\'')
this_list.append(new_string)
this_list.append(each[1])
insert_songs.append(this_list)
print(each[0])
print(insert_songs)
new_dict = dict()
new_dict['songs'] = insert_songs
insertion = json.dumps(new_dict)
sql = f"INSERT INTO userdata.playlists(plist_id, user_id, songs, name, image) VALUES('{plist_id}',{user_id}, '{insertion}','{name}','{image}');"
cur.execute(sql)
conn.commit()
cur.close()
conn.close()
def get_userdata():
try:
conn = psycopg2.connect(
host="swapstream-dev.ck8zwecvtffz.us-east-2.rds.amazonaws.com",
database="postgres",
user="swapstreamAdmin",
password="XcLusU7pvLI2PO6ywDYS")
cur = conn.cursor()
cur.execute('SELECT * FROM "userdata"."users"')
user = cur.fetchone()
cur.close()
print('Database connection closed.')
return {"User": str(user)}
except (Exception, psycopg2.DatabaseError) as error:
print(error)
return {"Error": str(error)}
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
|
from compas.geometry import Point
from compas.geometry import Polyline, Bezier
from compas.geometry import NurbsCurve
from compas.artists import Artist
from compas.colors import Color
points = [Point(0, 0, 0), Point(1, 2, 0), Point(2, -2, 0), Point(3, 0, 0)]
bezier = Bezier(points)
points = [Point(4, 0, 0), Point(5, 2, 0), Point(6, -2, 0), Point(7, 0, 0)]
curve1 = NurbsCurve.from_parameters(
points=points,
weights=[1.0, 1.0, 1.0, 1.0],
knots=[0.0, 1.0],
multiplicities=[4, 4],
degree=3
)
curve2 = NurbsCurve.from_parameters(
points=points,
weights=[1.0, 2.0, 2.0, 1.0],
knots=[0.0, 1.0],
multiplicities=[4, 4],
degree=3
)
curve3 = NurbsCurve.from_parameters(
points=points,
weights=[1.0, 1.0, 1.0, 1.0],
knots=[0.0, 1/3, 2/3, 1.0],
multiplicities=[3, 1, 1, 3],
degree=3
)
curve4 = NurbsCurve.from_parameters(
points=points,
weights=[1.0, 1.0, 1.0, 1.0],
knots=[0.0, 1/5, 2/5, 3/5, 4/5, 1.0],
multiplicities=[2, 1, 1, 1, 1, 2],
degree=3
)
curve5 = NurbsCurve.from_parameters(
points=points,
weights=[1.0, 1.0, 1.0, 1.0],
knots=[0.0, 1/7, 2/7, 3/7, 4/7, 5/7, 6/7, 1.0],
multiplicities=[1, 1, 1, 1, 1, 1, 1, 1],
degree=3
)
# curve6 = NurbsCurve.from_parameters(
# points=points,
# weights=[1.0, 1.0, 1.0, 1.0],
# knots=[0.0, 0.5, 1.0],
# multiplicities=[3, 1, 3],
# degree=2
# )
# ==============================================================================
# Visualisation
# ==============================================================================
Artist.clear()
Artist(Polyline(bezier.points)).draw()
Artist(Polyline(bezier.locus())).draw()
Artist(Polyline(curve1.points)).draw(show_points=True)
color = Color.red()
Artist(curve1).draw(color=color)
Artist(curve2).draw(color=color.lightened(factor=20))
Artist(curve3).draw(color=color.lightened(factor=40))
Artist(curve4).draw(color=color.lightened(factor=60))
Artist(curve5).draw(color=color.lightened(factor=80))
# Artist(curve6).draw(color=color.lightened(factor=50))
Artist.redraw()
|
from setuptools import setup
from os import path
from io import open
this_directory = path.abspath(path.dirname(__file__))
def readme():
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
return f.read()
with open(path.join(this_directory, 'requirements.txt'),
encoding='utf-8') as f:
requirements = f.read().splitlines()
setup(
name = 'apollo_ad',
packages = ['apollo_ad'],
version = '0.0.7',
license='MIT',
description = 'Auto Differentiation Tools',
long_description=readme(),
long_description_content_type='text/markdown',
author = 'Connor Capitolo, Haoxin Li, Kexin Huang, Chen Zhang',
author_email = 'cosamhkx@gmail.com',
url = 'https://github.com/West-Coast-Quaranteam/cs107-FinalProject',
keywords = ['Auto-diff'],
install_requires=requirements,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
) |
from itertools import combinations
def part1(numbers):
combos = [pair for pair in combinations(numbers, 2)
if sum(pair) == 2020]
return combos[0][0] * combos[0][1]
def part2(numbers):
combined_sum = 2020
combos = [pair for pair in combinations(numbers, 3)
if sum(pair) == combined_sum]
return combos[0][0] * combos[0][1] *combos[0][2]
if __name__ == '__main__':
with open('./day1/input.txt', 'r') as f:
numbers = [int(n) for n in f.read().split('\n')]
print('Part1 Solution: ', part1(numbers))
print('Part2 Solution: ', part2(numbers))
|
"""
Django settings for ryu project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), ".."),
)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y%h4lu#z$y_!i6!136t7-vz=qyjrv5q!ikjxjp%x5s@zx53v^r'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'south',
'bootstrap3',
'djangobower',
'django_nvd3',
'kurama',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ryu.urls'
WSGI_APPLICATION = 'ryu.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Templates dir search path
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = False
USE_TZ = True
DATE_FORMAT = 'Y-m-d'
DATETIME_FORMAT = 'Y-m-d H i'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
'djangobower.finders.BowerFinder',
)
# Django-bower
# ------------
BOWER_COMPONENTS_ROOT = os.path.join(PROJECT_ROOT, 'components')
BOWER_PATH = '/usr/local/bin/bower'
BOWER_INSTALLED_APPS = (
'jquery',
'bootstrap',
'd3#3.3.6',
'nvd3#1.1.12-beta',
'moment',
'eonasdan-bootstrap-datetimepicker#latest',
)
#IMPORT LOCAL SETTINGS
#=====================
try:
from local_settings import *
except ImportError:
pass
|
import base64
from hashlib import pbkdf2_hmac, sha256
import hmac
import json
from secrets import token_bytes
from pathlib import Path
from src.models import KeyList, Master
# hash and check
def check_master(password, keylist):
hashed = hash_master(
password, keylist.master.salt, keylist.master.iter_count)
return hashed == keylist.master.hashed
def hash_master(password, salt, iter_count):
# salt is assumed to be base64 encoded
salt = base64.b64decode(salt)
hashed = pbkdf2_hmac('sha256', password.encode(), salt, iter_count)
return base64.b64encode(hashed).decode('utf-8')
def generate_password(master, key):
hashed = hmac.new(
master.encode(), msg=key.label.encode(), digestmod=sha256).digest()
hashed = base64.b64encode(hashed).decode('utf-8')
if key.gen_mode.lower() == 'alphanum':
hashed = ''.join([c for c in hashed if c.isalnum()])
if key.max_length is not None:
hashed = hashed[:key.max_length]
return hashed
def generate_salt():
return base64.b64encode(token_bytes(32)).decode('utf-8')
# I/O
def save_master(path, hashed, salt, iter_count):
# this implies a clean slate
master = Master(hashed, salt, iter_count)
keylist = KeyList(master)
save_keylist(path, keylist)
def save_keylist(path, keylist):
with open(path, 'w') as f:
f.write(str(keylist))
save_keylist_path(path)
def save_keylist_path(path):
file = get_meta_directory() / 'keylists.meta.json'
existing = []
if file.exists():
with open(str(file)) as f:
existing = json.loads(f.read()).get('paths', [])
with open(str(file), 'w') as f:
if path in existing:
existing.remove(path)
existing.append(path)
f.write(json.dumps({'paths': existing}))
def get_meta_directory():
return Path.home() / '.passgen'
def get_keylists_meta_path():
return get_meta_directory() / 'keylists.meta.json'
def get_default_path():
return str(get_meta_directory() / 'default.keys.json')
def get_last_used_path():
file = get_keylists_meta_path()
if file.exists():
with open(str(file)) as f:
existing = json.loads(f.read()).get('paths', [])
if len(existing) > 0:
return existing[-1]
return get_default_path()
def load_keylist(path):
with open(path) as f:
json_obj = f.read()
return KeyList.from_json(json_obj)
|
import os
def CSharpSolutionFile( path, **kwargs ):
#remove '.ycm_extra_conf.py' and 'extra-conf-abs' from filename
location=os.path.dirname( __file__ )
location=os.path.dirname( location )
return os.path.join( location, 'testy2.sln' )
|
"""
retrieve the satelite weather forcast map for a location
convert that from png to jpg
display this on the M5 display in a window
uses :
- weather underground API
- free API Key, needs registration
- filestack
- initial free plan (500 conversions)
"""
import urequests, uos as os, gc
def urlencode(string =""):
'''
poor mans urlencode
: Separate protocol (http) from address %3B Y
/ Separate domain and directories %2F Y
# Separate anchors %23 Y
? Separate query string %3F Y
& Separate query elements %24 No
@ Separate username and password from domain %40 Y
% Indicates an encoded character %25 No
+ Indicates a space %2B Y
<space> Not recommended in URLs %20 or + Y
'''
url = string.replace(':','%3A').replace('/','%2F')#.replace('#','%23').replace('?','%3F').replace('+','%2B').replace(' ','+').replace('@','%40')
#.replace('&','%24').replace('%','%25')
return url
def write(text):
"Log text to REPL and to Display"
print(text)
tft.text(tft.LASTX ,tft.LASTY,'{}\r\n'.format(text),tft.RED)
#=========================================
city='Pijnacker'
header('Satellite {}'.format(city))
mainwindow()
tft.text(0,0,"Retrieving map from\n\r")
tft.text(tft.LASTX ,tft.LASTY,"Weather Underground\n\r")
wunderground_APIKey='0c1c0d57eee9edf0'
satURL = 'http://api.wunderground.com/api/{}/satellite/q/KS/{}.png?width={}&height={}&basemap=1'.format(
wunderground_APIKey,city,318,238)
PNGurl = urlencode(satURL)
Filestack_APIKey='AheOzuybQgupZOWlRIZ3Gz'
transform='output=format:jpg/resize=width:{}'.format(318)
#fixme : urequests does not like the url in HTTPS, so we are leaking the API Key
jpgurl = "http://process.filestackapi.com/{}/{}/{}".format( Filestack_APIKey, transform, PNGurl)
filename = '/flash/satview.jpg'
try:
#ask for a PNG transformed to JPG
response = urequests.get(jpgurl)
#todo: get errors out if there is no network connection
gc.collect()
if response.status_code == 200:
#write that to a file
with open(filename,'wb') as file:
_= file.write(response.content)
file.close()
else:
write(b'HTTP Response {} {}'.format(response.status_code, response.reason) )
except OSError as exc:
if exc.args[0] == 28:
write('IO Error') #OSError: 28
else:
write('OS Error {}'.format(exc.args[0]) )
except MemoryError:
write('Ran out of memory')
except:
write('Could not retrieve the map')
finally:
response.close()
tft.image(0, 0, filename )
try:
#remove the file to get some space back
os.remove(filename)
except:
pass
"""
- Possible alternative :
https://cloudconvert.com/pricing - Free
https://api.cloudconvert.com/convert?apikey=oFI8RR73C8W7krLDgCOK0JG7tgS7u9kwiziDYhe20YfY0u1ZfIEjXNwh4bPGiqO6&inputformat=png&outputformat=jpg&input=download&file=http%3A%2F%2Fapi.wunderground.com%2Fapi%2F0c1c0d57eee9edf0%2Fsatellite%2Fq%2FKS%2Fpijnacker.png%3Fwidth%3D318%26height%3D238%26basemap%3D1&wait=true&download=true
#Testing
jpgurl = 'http://api.cloudconvert.com/convert?apikey=oFI8RR73C8W7krLDgCOK0JG7tgS7u9kwiziDYhe20YfY0u1ZfIEjXNwh4bPGiqO6&inputformat=png&outputformat=jpg&input=download&file=http%3A%2F%2Fapi.wunderground.com%2Fapi%2F0c1c0d57eee9edf0%2Fsatellite%2Fq%2FKS%2Fpijnacker.png%3Fwidth%3D318%26height%3D238%26basemap%3D1&wait=true&download=true'
>> urequests.py - NotImplementedError: Redirects not yet supported
"""
|
from ..utils import Object
class SendPassportAuthorizationForm(Object):
"""
Sends a Telegram Passport authorization form, effectively sharing data with the service. This method must be called after getPassportAuthorizationFormAvailableElements if some previously available elements need to be used
Attributes:
ID (:obj:`str`): ``SendPassportAuthorizationForm``
Args:
autorization_form_id (:obj:`int`):
Authorization form identifier
types (List of :class:`telegram.api.types.PassportElementType`):
Types of Telegram Passport elements chosen by user to complete the authorization form
Returns:
Ok
Raises:
:class:`telegram.Error`
"""
ID = "sendPassportAuthorizationForm"
def __init__(self, autorization_form_id, types, extra=None, **kwargs):
self.extra = extra
self.autorization_form_id = autorization_form_id # int
self.types = types # list of PassportElementType
@staticmethod
def read(q: dict, *args) -> "SendPassportAuthorizationForm":
autorization_form_id = q.get('autorization_form_id')
types = [Object.read(i) for i in q.get('types', [])]
return SendPassportAuthorizationForm(autorization_form_id, types)
|
import discord
import re
import uuid
from threading import Thread
import asyncio
import socket
import os
import config
import downloader
downloadDir = "tmp/"
mentionPattern = "\s*((<@&?\d{18}>)|@(everyone|here))\s*"
client = discord.Client()
def formatMsg(content):
return re.sub(mentionPattern, "", content).strip()
@client.event
async def on_ready():
print(client.user.name + " has logged in")
@client.event
async def on_message(message):
if (message.server is not None):
# in server
if (client.user.id in message.raw_mentions):
a = asyncio.get_event_loop()
a.create_task(downloader.downloadMessage(formatMsg(message.content), downloadDir, client, message.channel))
else:
# in pm
# ??? error is thrown but message is still sent to user
a = asyncio.get_event_loop()
a.create_task(downloader.downloadMessage(formatMsg(message.content), downloadDir, client, message.author))
pass
client.run(config.botSecret)
try:
port = os.getenv("PORT", 5000) # heroku bullshit
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("", port))
except:
pass |
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey and OneToOneField has `on_delete` set to the desired behavior
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class Events(models.Model):
id = models.PositiveIntegerField(primary_key=True)
ra = models.FloatField()
decl = models.FloatField()
ra_original = models.FloatField()
decl_original = models.FloatField()
date_inserted = models.DateTimeField()
date_updated = models.DateTimeField(blank=True, null=True)
year = models.PositiveSmallIntegerField()
base26suffix = models.CharField(max_length=20)
htm16id = models.BigIntegerField(db_column='htm16ID') # Field name made lowercase.
class Meta:
managed = False
db_table = 'events'
unique_together = (('year', 'base26suffix'),)
class Akas(models.Model):
id = models.AutoField(primary_key=True)
event_id = models.ForeignKey(Events, to_field='id', db_column='event_id', on_delete = models.CASCADE)
object_id = models.BigIntegerField()
aka = models.CharField(max_length=30, blank=True, null=True)
ra = models.FloatField()
decl = models.FloatField()
survey_database = models.CharField(max_length=50)
user_id = models.CharField(max_length=50)
source_ip = models.CharField(max_length=20, blank=True, null=True)
original_flag_date = models.DateField()
date_inserted = models.DateTimeField()
htm16id = models.BigIntegerField(db_column='htm16ID') # Field name made lowercase.
class Meta:
managed = False
db_table = 'akas'
unique_together = (('object_id', 'survey_database'),)
class AbstractYear(models.Model):
id = models.AutoField(primary_key=True)
object_id = models.BigIntegerField()
ra = models.FloatField()
decl = models.FloatField()
survey_database = models.CharField(max_length=50)
user_id = models.CharField(max_length=50)
source_ip = models.CharField(max_length=20, blank=True, null=True)
date_inserted = models.DateTimeField()
htm16id = models.BigIntegerField(db_column='htm16ID') # Field name made lowercase.
class Meta:
abstract = True
class Y2013(AbstractYear):
class Meta:
managed = False
db_table = 'y2013'
unique_together = (('object_id', 'survey_database'),)
class Y2014(AbstractYear):
class Meta:
managed = False
db_table = 'y2014'
unique_together = (('object_id', 'survey_database'),)
class Y2015(AbstractYear):
class Meta:
managed = False
db_table = 'y2015'
unique_together = (('object_id', 'survey_database'),)
class Y2016(AbstractYear):
class Meta:
managed = False
db_table = 'y2016'
unique_together = (('object_id', 'survey_database'),)
class Y2017(AbstractYear):
class Meta:
managed = False
db_table = 'y2017'
unique_together = (('object_id', 'survey_database'),)
class Y2018(AbstractYear):
class Meta:
managed = False
db_table = 'y2018'
unique_together = (('object_id', 'survey_database'),)
class Y2019(AbstractYear):
class Meta:
managed = False
db_table = 'y2019'
unique_together = (('object_id', 'survey_database'),)
class Y2020(AbstractYear):
class Meta:
managed = False
db_table = 'y2020'
unique_together = (('object_id', 'survey_database'),)
class Y2021(AbstractYear):
class Meta:
managed = False
db_table = 'y2021'
unique_together = (('object_id', 'survey_database'),)
class Y2022(AbstractYear):
class Meta:
managed = False
db_table = 'y2022'
unique_together = (('object_id', 'survey_database'),)
class Y2023(AbstractYear):
class Meta:
managed = False
db_table = 'y2023'
unique_together = (('object_id', 'survey_database'),)
class Y2024(AbstractYear):
class Meta:
managed = False
db_table = 'y2024'
unique_together = (('object_id', 'survey_database'),)
class Y2025(AbstractYear):
class Meta:
managed = False
db_table = 'y2025'
unique_together = (('object_id', 'survey_database'),)
class Y2026(AbstractYear):
class Meta:
managed = False
db_table = 'y2026'
unique_together = (('object_id', 'survey_database'),)
class Y2027(AbstractYear):
class Meta:
managed = False
db_table = 'y2027'
unique_together = (('object_id', 'survey_database'),)
class Y2028(AbstractYear):
class Meta:
managed = False
db_table = 'y2028'
unique_together = (('object_id', 'survey_database'),)
class Y2029(AbstractYear):
class Meta:
managed = False
db_table = 'y2029'
unique_together = (('object_id', 'survey_database'),)
class Y2030(AbstractYear):
class Meta:
managed = False
db_table = 'y2030'
unique_together = (('object_id', 'survey_database'),)
|
# -*- coding: utf-8 -*-
# Base imports
import os
import kivy
import gettext
import json
from os import listdir
from os.path import isfile, join
# Import library for html generate
from yattag import Doc
from yattag import indent
# Import for working with DB when new test was made
import psycopg2
# This will be used to send parameters when callbacks is called
from functools import partial
kivy.require("1.10.1")
# Basic import
from kivy.app import App
# Import config to set up kivy
from kivy.config import Config
# Import will be need
from kivy.core.window import Window
# UI elemtns import
from kivy.uix.popup import Popup
from kivy.uix.gridlayout import GridLayout
from kivy.uix.screenmanager import Screen, ScreenManager
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.button import Button
from kivy.uix.scrollview import ScrollView
from kivy.uix.label import Label
from kivy.uix.textinput import TextInput
from kivy.uix.checkbox import CheckBox
# Varibles for localization
lng = None
_ = None
# Setting up all configs
Config.set("kivy", "log_dir", os.getcwd() + "\\log")
Config.set("kivy", "log_level", "warning")
Config.set("kivy", "log_maxfiles", 10)
Config.set("kivy", "desktop", 1)
Config.set("kivy", "window_icon", "icon.png")
Config.set("graphics", "max_fps", 60)
Config.write()
# List of question and subject
questions = {}
answers = {}
subject = None
php_file = ""
ans = ""
# Manager
sm = ScreenManager()
# All screens
langscreen = Screen(name="Lang")
subscreen = Screen(name="Subject")
makescreen = Screen(name="Making")
editscreen = Screen(name="Edit")
readyscreen = Screen(name="Ready")
def generateFile():
"""Generates final php page."""
begin = (
"<?php\ndefine('PROJECT_DIR', realpath('../'));\ndefine('LOCALE_DIR', PROJECT_DIR . '\\Locale');\ndefine('DEFAULT_LOCALE', 'en');\n\nrequire('../GetText/gettext.inc');\n\n$encoding = 'UTF-8';\n\n$locale = (isset($_COOKIE['lang'])) ? $_COOKIE['lang'] : DEFAULT_LOCALE;\n\nT_setlocale(LC_MESSAGES, $locale);\n\nT_bindtextdomain($locale, LOCALE_DIR);\nT_bind_textdomain_codeset($locale, $encoding);\nT_textdomain($locale);\n\nrequire('../postgresql.php');\n$number = basename(__FILE__, '.php');\n$title = '';\n$stmt = getTests('"
+ str(subject)
+ "');\nwhile ($row = $stmt->fetch(PDO::FETCH_ASSOC)) {\n if ($row['id'] == $number) {\n $title = $row['name'];\n break;\n }\n}\nrequire('../Templates/head.php');\n?>\n"
)
end = "\n<?php\nrequire('../Templates/foot.php');\n?>"
# pylint: disable=unused-variable
doc, tag, text, line = Doc().ttl()
with tag("form", action="../Pages/checker", method="post", autocomplete="off"):
doc.line("input", "", type="hidden", name="Lang", value=str(subject))
doc.line("input", "", type="hidden", name="Name", value=str(Make.name.text))
num = 0
for i in questions:
with tag("fieldset"):
doc.line(
"input",
"",
type="hidden",
name="Count[]",
value=str(len(questions[i])),
)
doc.line("h2", i)
with tag("ol"):
for j in range(len(questions[i])):
with tag("li"):
doc.line(
"input",
questions[i][j],
type="checkbox",
name=str(num) + "[]",
value=str(j),
)
num += 1
doc.stag("input", type="submit", text="send")
global php_file
php_file = begin + indent(doc.getvalue(), indentation=" ", newline="\r") + end
def readyTest(number, imp=False):
if not imp:
generateFile()
out = open(
os.getcwd()[0 : os.getcwd().find("program")]
+ str(subject)
+ "/"
+ str(number)
+ ".php",
"wb",
)
global php_file
out.write(php_file.encode("UTF-8"))
out.close()
def export(*args):
"""Trying to get all fields in base"""
Ready()
changeScreen("Ready")
if Make.name.text == "":
Ready.label.text = _("Test name can't be blank!")
Ready.layout.add_widget(Ready.back)
return
elif len(questions) == 0:
Ready.label.text = _("You didn't configure questions!")
Ready.layout.add_widget(Ready.back)
return
generateFile()
to_export = {}
to_export["Test_name"] = Make.name.text
to_export["Test_description"] = Make.description.text
to_export["subject"] = subject
global ans
for i in answers.values():
for j in i:
ans += str(int(j))
to_export["answer"] = ans
to_export["file"] = php_file
out = open("tests/" + Make.name.text + ".json", "w")
out.write(json.dumps(to_export))
out.close()
Ready.label.text = "OK!"
def imp(*args):
all_tests = [f for f in listdir("tests") if isfile(join("tests", f))]
for i in all_tests:
input_file = open("tests/" + i, "r")
to_import = json.loads(input_file.read())
global subject
subject = to_import["subject"]
Make.name.text = to_import["Test_name"]
Make.description.text = to_import["Test_description"]
global ans
ans = to_import["answer"]
global php_file
php_file = to_import["file"]
lastScreen(True)
def lastScreen(imp=False, *args):
"""Trying to get all fields in base"""
Ready()
changeScreen("Ready")
if Make.name.text == "":
Ready.label.text = _("Test name can't be blank!")
Ready.layout.add_widget(Ready.back)
return
elif len(questions) == 0 and not imp:
Ready.label.text = _("You didn't configure questions!")
Ready.layout.add_widget(Ready.back)
return
conn = None
try:
conn = psycopg2.connect(
dbname="Tests",
user="TestingSystem",
password="postgresql",
host="localhost",
)
except Exception as e:
if "could not connect to server" in str(e):
Ready.label.text += _(
"Check if server is running. Try again or ask for help."
)
else:
Ready.label.text += str(e)
Ready.layout.add_widget(Ready.back)
return
try:
cursor = conn.cursor()
global ans
if not imp:
for i in answers.values():
for j in i:
ans += str(int(j))
cursor.execute(
"INSERT INTO "
+ str(subject)
+ " (name, description, answer) values ('"
+ str(Make.name.text)
+ "', '"
+ str(Make.description.text)
+ "', '"
+ ans
+ "');"
)
cursor.close()
except Exception as e:
if "duplicate key value violates unique" in str(e):
Ready.label.text += _(
"Test with this name already exists, change the name of the test"
)
else:
Ready.label.text += (
str(e)
+ "\n "
+ _("Check if server is running. Try again or ask for help.")
)
Ready.layout.add_widget(Ready.back)
return
conn.commit()
number = 0
try:
cursor = conn.cursor()
cursor.execute("SELECT id FROM " + str(subject))
response = cursor.fetchall()
number = int(response[-1][-1])
cursor.close()
except Exception as e:
Ready.label.text += (
str(e) + "\n " + _("Check if server is running. Try again or ask for help.")
)
Ready.layout.add_widget(Ready.back)
return
readyTest(number, imp)
conn.close()
Ready.label.text = "OK!"
def Ready():
# Clear screen to prevent text-on-text situation
readyscreen.clear_widgets()
Ready.layout = FloatLayout(size=(300, 300))
Ready.label = Label(text="", size_hint=(0.5, 0.1), pos_hint={"x": 0.25, "y": 0.6})
Ready.back = Button(
text=_("Back"), size_hint=(0.5, 0.1), pos_hint={"x": 0.25, "y": 0.8}
)
Ready.back.bind(on_release=partial(changeScreen, "Making"))
Ready.layout.add_widget(Ready.label)
readyscreen.add_widget(Ready.layout)
def changeSubject(sub):
"""This function changes current subject. It will be used above, when it will generate test"""
global subject
if sub == _("Math"):
subject = "Math"
elif sub == _("Russian"):
subject = "Rus"
elif sub == _("Belarussian"):
subject = "Bel"
elif sub == _("English"):
subject = "Eng"
elif sub == _("Geography"):
subject = "Geo"
elif sub == _("Informatics"):
subject = "Inf"
elif sub == _("Physics"):
subject = "Phy"
elif sub == _("Biology"):
subject = "Bio"
def changeLanguage(lang):
"""This function defines app language. It is used in gettext function which is defined like _"""
global lng
global _
if lang == "English":
lng = gettext.translation("main", localedir="locale", languages=["en"])
elif lang == "Русский":
lng = gettext.translation("main", localedir="locale", languages=["ru"])
elif lang == "Беларуская":
lng = gettext.translation("main", localedir="locale", languages=["be"])
lng.install()
_ = lng.gettext
def changeScreen(screen, *args):
"""Changes current screen"""
sm.current = screen
def select(button):
"""This function changes current screen to subject selection"""
changeLanguage(button.text)
subscreen.clear_widgets()
Subject()
changeScreen("Subject")
def subjectChange(button):
"""This function changes current screen to making test"""
changeSubject(button.text)
makescreen.clear_widgets()
Make()
changeScreen("Making")
def Lang():
"""First screen. Language select"""
Lang.layout = FloatLayout()
Lang.label = Label(
text="Language/Мова/Язык", size_hint=(0.5, 0.1), pos_hint={"x": 0.25, "y": 0.6}
)
Lang.layout.add_widget(Lang.label)
Lang.lng1 = Button(
text="English", size_hint=(0.5, 0.1), pos_hint={"x": 0.25, "y": 0.5}
)
Lang.lng1.bind(on_release=select)
Lang.lng2 = Button(
text="Беларуская", size_hint=(0.5, 0.1), pos_hint={"x": 0.25, "y": 0.4}
)
Lang.lng2.bind(on_release=select)
Lang.lng3 = Button(
text="Русский", size_hint=(0.5, 0.1), pos_hint={"x": 0.25, "y": 0.3}
)
Lang.lng3.bind(on_release=select)
# Adding all widgets to layout
Lang.layout.add_widget(Lang.lng1)
Lang.layout.add_widget(Lang.lng2)
Lang.layout.add_widget(Lang.lng3)
langscreen.add_widget(Lang.layout)
def Subject():
"""Second screen. Subject selection"""
Subject.view = ScrollView(
size_hint=(1, None), size=(Window.width, Window.height), bar_width=7
)
Subject.layout = GridLayout(cols=1, spacing=10, size_hint_y=None)
Subject.layout.bind(minimum_height=Subject.layout.setter("height"))
# Label
Subject.label = Label(text=_("Choose subject"), size_hint_y=None, height=60)
Subject.layout.add_widget(Subject.label)
# Subject buttons
Subject.sub1 = Button(text=_("English"), size_hint_y=None, height=60)
Subject.sub2 = Button(text=_("Russian"), size_hint_y=None, height=60)
Subject.sub3 = Button(text=_("Belarussian"), size_hint_y=None, height=60)
Subject.sub4 = Button(text=_("Math"), size_hint_y=None, height=60)
Subject.sub5 = Button(text=_("Geography"), size_hint_y=None, height=60)
Subject.sub6 = Button(text=_("Informatics"), size_hint_y=None, height=60)
Subject.sub7 = Button(text=_("Physics"), size_hint_y=None, height=60)
Subject.sub8 = Button(text=_("Biology"), size_hint_y=None, height=60)
# binding subject buttons
Subject.sub1.bind(on_release=subjectChange)
Subject.sub2.bind(on_release=subjectChange)
Subject.sub3.bind(on_release=subjectChange)
Subject.sub4.bind(on_release=subjectChange)
Subject.sub5.bind(on_release=subjectChange)
Subject.sub6.bind(on_release=subjectChange)
Subject.sub7.bind(on_release=subjectChange)
Subject.sub8.bind(on_release=subjectChange)
Subject.back = Button(text=_("Back"), size_hint_y=None, height=60)
Subject.back.bind(on_release=partial(changeScreen, "Lang"))
# Adding all widgets to layout
Subject.layout.add_widget(Subject.sub1)
Subject.layout.add_widget(Subject.sub2)
Subject.layout.add_widget(Subject.sub3)
Subject.layout.add_widget(Subject.sub4)
Subject.layout.add_widget(Subject.sub5)
Subject.layout.add_widget(Subject.sub6)
Subject.layout.add_widget(Subject.sub7)
Subject.layout.add_widget(Subject.sub8)
Subject.layout.add_widget(Subject.back)
Subject.view.add_widget(Subject.layout)
# Adding screen to ScreenManager
subscreen.add_widget(Subject.view)
def readyQuest(*args):
"""Confirming Question add"""
for i in range(len(editQuest.input)):
questions[editQuest.quest.text][i] = editQuest.input[i].text
for i in range(len(editQuest.check)):
answers[editQuest.quest.text][i] = editQuest.check[i].active
changeScreen("Making")
def editQuest(inst):
"""Open view of Question editor"""
# Clearing current view.
editscreen.clear_widgets()
# Adding main view.
view = ScrollView(
size_hint=(1, None), size=(Window.width, Window.height), bar_width=7
)
# Adding layout.
layout = GridLayout(cols=1, spacing=10, size_hint_y=None)
layout.bind(minimum_height=layout.setter("height"))
# Getting ID from button.
id = int(inst.id)
# Back button.
back = Button(text=_("Back"), size_hint_y=None, height=60)
back.bind(on_release=partial(changeScreen, "Making"))
# Question label.
editQuest.quest = Label(text=inst.text, size_hint_y=None)
# Adding UI components to layout.
layout.add_widget(back)
layout.add_widget(editQuest.quest)
# All our arrays for generating html file
editQuest.input = []
editQuest.check = []
editQuest.subgrid = []
# Allocating memory for answers.
if not editQuest.quest.text in questions:
questions[editQuest.quest.text] = [""] * len(Make.variants[id])
# Allocating memory for correct/incorrect fields.
if not editQuest.quest.text in answers:
answers[editQuest.quest.text] = [False] * len(Make.variants[id])
# Displaying all answers.
for i in range(len(Make.variants[id])):
# Layout to place checkbox and label in one line.
subgrid = GridLayout(cols=2, spacing=0, size_hint_y=None, height=50)
# Input for answer text.
inp = TextInput(
height=50,
size_hint_y=None,
text=questions[editQuest.quest.text][i],
write_tab=False,
)
# Checkbox for correct/incorrect answer flag.
check = CheckBox(size_hint_x=None, width=50, active=answers[editQuest.quest.text][i])
# Label with number of question.
lbl = Label(text=_("Answer number:") + " " + str(i + 1), height=50)
# Adding to our arrays.
editQuest.subgrid.append(subgrid)
editQuest.input.append(inp)
editQuest.check.append(check)
# Adding checkbox and label to subgrid.
subgrid.add_widget(check)
subgrid.add_widget(lbl)
# Adding all UI elements to layout.
layout.add_widget(subgrid)
layout.add_widget(inp)
# Adding to view our layout.
view.add_widget(layout)
# Adding view to screen.
editscreen.add_widget(view)
# Ready button.
ready = Button(text=_("Ready"), size_hint_y=None, height=60)
ready.bind(on_release=readyQuest)
# Adding button.
layout.add_widget(ready)
# Changing current screen.
changeScreen("Edit")
def addQuestionWithAnswers(txt, num, *args):
"""Our popup for configuring question"""
# If number is incorrect don't do anything.
try:
num = int(num.text)
except ValueError:
return
# If length of question is equal to 0 don't do anything.
if len(txt.text) == 0:
return
# Add array of answers.
if not hasattr(Make, "variants"):
Make.variants = []
# Adding button to edit question.
btn = Button(text=txt.text, size_hint_y=None, height=60, id=str(len(Make.variants)))
btn.bind(on_release=editQuest)
# Adding place for answers.
Make.variants.append([""] * num)
# Adding all UI elements to layout.
Make.layout.add_widget(btn)
Make.layout.remove_widget(Make.ready)
Make.layout.remove_widget(Make.export)
Make.layout.add_widget(Make.export)
Make.layout.add_widget(Make.ready)
# Disabling popups.
addVariants.popup.dismiss()
addQuest.popup.dismiss()
def addVariants(btn):
"""Question with some answers. Primitive."""
# Main layout.
addVariants.layout = GridLayout(cols=1, rows=6, spacing=5)
# Our popup with input fields.
addVariants.popup = Popup(
title=_("Configuring question"),
content=addVariants.layout,
size_hint=(None, None),
size=(600, 400),
auto_dismiss=False,
)
# Label which inidicates where input for question is located.
addVariants.label = Label(text=_("Write question here:"), size_hint=(1, 0.2))
# Text input for question.
addVariants.text = TextInput(write_tab=False)
# Label which indicates where input for number of answer is located.
addVariants.number = Label(text=_("Number of answers:"), size_hint=(1, 0.2))
# Text input for number of question.
addVariants.num = TextInput(
input_filter="int", size_hint=(1, 0.4), multiline=False, write_tab=False
)
# Close button.
addVariants.button = Button(text=_("Close"), size_hint=(1, 0.5))
addVariants.button.bind(on_release=addVariants.popup.dismiss)
# Button to continue.
addVariants.nxt = Button(text=_("Next"), size_hint=(1, 0.5))
addVariants.nxt.bind(
on_release=partial(addQuestionWithAnswers, addVariants.text, addVariants.num)
)
# Adding all UI componets to layout.
addVariants.layout.add_widget(addVariants.label)
addVariants.layout.add_widget(addVariants.text)
addVariants.layout.add_widget(addVariants.number)
addVariants.layout.add_widget(addVariants.num)
addVariants.layout.add_widget(addVariants.nxt)
addVariants.layout.add_widget(addVariants.button)
# Showing popup.
addVariants.popup.open()
def addQuest(btn):
"""Popun where we can select type of question."""
# Main layout.
addQuest.layout = GridLayout(cols=1, spacing=5, size_hint_y=None)
# Popup window.
addQuest.popup = Popup(
title=_("Choose type"),
content=addQuest.layout,
size_hint=(None, None),
size=(400, 400),
auto_dismiss=False,
)
# Close button.
addQuest.button = Button(text=_("Close"), size_hint_y=None, height=40)
addQuest.button.bind(on_release=addQuest.popup.dismiss)
# Our types of question.
addQuest.variants = Button(
text=_("Question with answer variants"), size_hint_y=None, height=40
)
addQuest.variants.bind(on_release=addVariants)
# Adding all to layout.
addQuest.layout.add_widget(addQuest.variants)
addQuest.layout.add_widget(addQuest.button)
# Showing popup.
addQuest.popup.open()
def Make():
"""Screen for configuring test"""
# ScrollView, so you can see a lot of questions in window.
Make.view = ScrollView(
size_hint=(1, None), size=(Window.width, Window.height), bar_width=7
)
# Main layout where all UI objects will be placed.
Make.layout = GridLayout(cols=1, spacing=10, size_hint_y=None)
# Option for ScrollView.
Make.layout.bind(minimum_height=Make.layout.setter("height"))
# Import button.
Make.imp = Button(text=_("Import"), size_hint_y=None, height=70)
Make.imp.bind(on_release=imp)
# Back button.
Make.back = Button(text=_("Back"), size_hint_y=None, height=60)
Make.back.bind(on_release=partial(changeScreen, "Subject"))
# Label which indicates where test name input locates.
Make.name_text = Label(text=_("Name of test"), size_hint_y=None, height=40)
# Test name input.
Make.name = TextInput(size_hint_y=None, height=40, write_tab=False)
# Label which indicates where test description input locates.
Make.description_text = Label(
text=_("Test description"), size_hint_y=None, height=40
)
# Test description input.
Make.description = TextInput(size_hint_y=None, height=60, write_tab=False)
# More button.
Make.new = Button(text=_("More"), size_hint_y=None, height=60)
Make.new.bind(on_release=addQuest)
# Ready button to finish test "baking".
Make.ready = Button(text=_("Ready"), size_hint_y=None, height=60)
Make.ready.bind(on_release=partial(lastScreen, False))
Make.export = Button(text=_("Export"), size_hint_y=None, height=60)
Make.export.bind(on_release=export)
# Adding all UI elements to UI. Order is important, for more info read Kivy docs.
Make.layout.add_widget(Make.imp)
Make.layout.add_widget(Make.back)
Make.layout.add_widget(Make.name_text)
Make.layout.add_widget(Make.name)
Make.layout.add_widget(Make.description_text)
Make.layout.add_widget(Make.description)
Make.layout.add_widget(Make.new)
Make.layout.add_widget(Make.export)
Make.layout.add_widget(Make.ready)
# Adding layout to view.
Make.view.add_widget(Make.layout)
# Adding view to screen.
makescreen.add_widget(Make.view)
# Function call, to initialize program (generate first screen)
Lang()
# Adding screens to Screen manager
sm.add_widget(subscreen)
sm.add_widget(langscreen)
sm.add_widget(makescreen)
sm.add_widget(editscreen)
sm.add_widget(readyscreen)
class MaketestApp(App):
"""Main class. Default App class in Kivy."""
title = "Making test"
def build(self):
self.icon = "icon.png"
sm.current = "Lang"
return sm
def on_pause(self):
return True
if __name__ == "__main__":
"""Run!"""
MaketestApp().run()
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for Volume Code."""
import datetime
import enum
import io
import time
from unittest import mock
import castellan
from castellan.common import exception as castellan_exception
from castellan import key_manager
import ddt
import eventlet
import os_brick.initiator.connectors.iscsi
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import imageutils
from taskflow.engines.action_engine import engine
from cinder.api import common
from cinder import context
from cinder import coordination
from cinder import db
from cinder import exception
from cinder.message import message_field
from cinder import objects
from cinder.objects import fields
from cinder.policies import volumes as vol_policy
from cinder import quota
from cinder.tests import fake_driver
from cinder.tests.unit.api.v2 import fakes as v2_fakes
from cinder.tests.unit import conf_fixture
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit.keymgr import fake as fake_keymgr
from cinder.tests.unit import utils as tests_utils
from cinder.tests.unit import volume as base
from cinder import utils
import cinder.volume
from cinder.volume import driver
from cinder.volume import manager as vol_manager
from cinder.volume import rpcapi as volume_rpcapi
import cinder.volume.targets.tgt
from cinder.volume import volume_types
QUOTAS = quota.QUOTAS
CONF = cfg.CONF
ENCRYPTION_PROVIDER = 'nova.volume.encryptors.cryptsetup.CryptsetupEncryptor'
fake_opt = [
cfg.StrOpt('fake_opt1', default='fake', help='fake opts')
]
def create_snapshot(volume_id, size=1, metadata=None, ctxt=None,
**kwargs):
"""Create a snapshot object."""
metadata = metadata or {}
snap = objects.Snapshot(ctxt or context.get_admin_context())
snap.volume_size = size
snap.user_id = fake.USER_ID
snap.project_id = fake.PROJECT_ID
snap.volume_id = volume_id
snap.status = fields.SnapshotStatus.CREATING
if metadata is not None:
snap.metadata = metadata
snap.update(kwargs)
snap.create()
return snap
class KeyObject(object):
def get_encoded(self):
return "asdf".encode('utf-8')
class KeyObject2(object):
def get_encoded(self):
return "qwert".encode('utf-8')
@ddt.ddt
class VolumeTestCase(base.BaseVolumeTestCase):
def setUp(self):
super(VolumeTestCase, self).setUp()
self.patch('cinder.volume.volume_utils.clear_volume', autospec=True)
self.expected_status = 'available'
self.service_id = 1
self.user_context = context.RequestContext(user_id=fake.USER_ID,
project_id=fake.PROJECT_ID)
elevated = context.get_admin_context()
db.volume_type_create(elevated,
v2_fakes.fake_default_type_get(
id=fake.VOLUME_TYPE2_ID))
self.vol_type = db.volume_type_get_by_name(elevated, '__DEFAULT__')
self._setup_volume_types()
def _create_volume(self, context, **kwargs):
return tests_utils.create_volume(
context,
volume_type_id=volume_types.get_default_volume_type()['id'],
**kwargs)
@mock.patch('cinder.objects.service.Service.get_minimum_rpc_version')
@mock.patch('cinder.objects.service.Service.get_minimum_obj_version')
@mock.patch('cinder.rpc.LAST_RPC_VERSIONS', {'cinder-scheduler': '1.3'})
@mock.patch('cinder.rpc.LAST_OBJ_VERSIONS', {'cinder-scheduler': '1.4'})
def test_reset(self, get_min_obj, get_min_rpc):
vol_mgr = vol_manager.VolumeManager()
scheduler_rpcapi = vol_mgr.scheduler_rpcapi
self.assertEqual('1.3', scheduler_rpcapi.client.version_cap)
self.assertEqual('1.4',
scheduler_rpcapi.client.serializer._base.version_cap)
get_min_obj.return_value = objects.base.OBJ_VERSIONS.get_current()
vol_mgr.reset()
scheduler_rpcapi = vol_mgr.scheduler_rpcapi
self.assertEqual(get_min_rpc.return_value,
scheduler_rpcapi.client.version_cap)
self.assertEqual(get_min_obj.return_value,
scheduler_rpcapi.client.serializer._base.version_cap)
self.assertIsNone(scheduler_rpcapi.client.serializer._base.manifest)
@mock.patch('oslo_utils.importutils.import_object')
def test_backend_availability_zone(self, mock_import_object):
# NOTE(smcginnis): This isn't really the best place for this test,
# but we don't currently have a pure VolumeManager test class. So
# until we create a good suite for that class, putting here with
# other tests that use VolumeManager.
opts = {
'backend_availability_zone': 'caerbannog'
}
def conf_get(option):
if option in opts:
return opts[option]
return None
mock_driver = mock.Mock()
mock_driver.configuration.safe_get.side_effect = conf_get
mock_driver.configuration.extra_capabilities = 'null'
def import_obj(*args, **kwargs):
return mock_driver
mock_import_object.side_effect = import_obj
manager = vol_manager.VolumeManager(volume_driver=mock_driver)
self.assertIsNotNone(manager)
self.assertEqual(opts['backend_availability_zone'],
manager.availability_zone)
@mock.patch('cinder.volume.manager.VolumeManager._append_volume_stats',
mock.Mock())
@mock.patch.object(vol_manager.VolumeManager,
'update_service_capabilities')
def test_report_filter_goodness_function(self, mock_update):
manager = vol_manager.VolumeManager()
manager.driver.set_initialized()
myfilterfunction = "myFilterFunction"
mygoodnessfunction = "myGoodnessFunction"
expected = {'name': 'cinder-volumes',
'storage_protocol': 'iSCSI',
'cacheable': True,
'filter_function': myfilterfunction,
'goodness_function': mygoodnessfunction,
}
with mock.patch.object(manager.driver,
'get_volume_stats') as m_get_stats:
with mock.patch.object(manager.driver,
'get_goodness_function') as m_get_goodness:
with mock.patch.object(manager.driver,
'get_filter_function') as m_get_filter:
m_get_stats.return_value = {'name': 'cinder-volumes',
'storage_protocol': 'iSCSI',
}
m_get_filter.return_value = myfilterfunction
m_get_goodness.return_value = mygoodnessfunction
manager._report_driver_status(context.get_admin_context())
self.assertTrue(m_get_stats.called)
mock_update.assert_called_once_with(expected)
def test_is_working(self):
# By default we have driver mocked to be initialized...
self.assertTrue(self.volume.is_working())
# ...lets switch it and check again!
self.volume.driver._initialized = False
self.assertFalse(self.volume.is_working())
def _create_min_max_size_dict(self, min_size, max_size):
return {volume_types.MIN_SIZE_KEY: min_size,
volume_types.MAX_SIZE_KEY: max_size}
def _setup_volume_types(self):
"""Creates 2 types, one with size limits, one without."""
spec_dict = self._create_min_max_size_dict(2, 4)
sized_vol_type_dict = {'name': 'limit',
'extra_specs': spec_dict}
db.volume_type_create(self.context, sized_vol_type_dict)
self.sized_vol_type = db.volume_type_get_by_name(
self.context, sized_vol_type_dict['name'])
unsized_vol_type_dict = {'name': 'unsized', 'extra_specs': {}}
db.volume_type_create(context.get_admin_context(),
unsized_vol_type_dict)
self.unsized_vol_type = db.volume_type_get_by_name(
self.context, unsized_vol_type_dict['name'])
@mock.patch('cinder.tests.unit.fake_notifier.FakeNotifier._notify')
@mock.patch.object(QUOTAS, 'reserve')
@mock.patch.object(QUOTAS, 'commit')
@mock.patch.object(QUOTAS, 'rollback')
def test_create_driver_not_initialized(self, reserve, commit, rollback,
mock_notify):
self.volume.driver._initialized = False
def fake_reserve(context, expire=None, project_id=None, **deltas):
return ["RESERVATION"]
def fake_commit_and_rollback(context, reservations, project_id=None):
pass
reserve.return_value = fake_reserve
commit.return_value = fake_commit_and_rollback
rollback.return_value = fake_commit_and_rollback
volume = tests_utils.create_volume(
self.context,
availability_zone=CONF.storage_availability_zone,
**self.volume_params)
volume_id = volume['id']
self.assertIsNone(volume['encryption_key_id'])
self.assertRaises(exception.DriverNotInitialized,
self.volume.create_volume, self.context, volume)
volume = db.volume_get(context.get_admin_context(), volume_id)
self.assertEqual("error", volume.status)
db.volume_destroy(context.get_admin_context(), volume_id)
def test_create_driver_not_initialized_rescheduling(self):
self.volume.driver._initialized = False
mock_delete = self.mock_object(self.volume.driver, 'delete_volume')
volume = tests_utils.create_volume(
self.context,
availability_zone=CONF.storage_availability_zone,
**self.volume_params)
volume_id = volume['id']
self.assertRaises(exception.DriverNotInitialized,
self.volume.create_volume,
self.context, volume,
{'volume_properties': self.volume_params},
{'retry': {'num_attempts': 1, 'host': []}})
# NOTE(dulek): Volume should be rescheduled as we passed request_spec
# and filter_properties, assert that it wasn't counted in
# allocated_capacity tracking.
self.assertEqual({'_pool0': {'allocated_capacity_gb': 0}},
self.volume.stats['pools'])
# NOTE(dulek): As we've rescheduled, make sure delete_volume was
# called.
self.assertTrue(mock_delete.called)
db.volume_destroy(context.get_admin_context(), volume_id)
def test_create_non_cinder_exception_rescheduling(self):
params = self.volume_params
del params['host']
volume = tests_utils.create_volume(
self.context,
availability_zone=CONF.storage_availability_zone,
**params)
volume_id = volume['id']
with mock.patch.object(self.volume.driver, 'create_volume',
side_effect=processutils.ProcessExecutionError):
self.assertRaises(processutils.ProcessExecutionError,
self.volume.create_volume,
self.context, volume,
{'volume_properties': params},
{'retry': {'num_attempts': 1, 'host': []}})
# NOTE(dulek): Volume should be rescheduled as we passed request_spec
# and filter_properties, assert that it wasn't counted in
# allocated_capacity tracking.
self.assertEqual({'_pool0': {'allocated_capacity_gb': 0}},
self.volume.stats['pools'])
db.volume_destroy(context.get_admin_context(), volume_id)
@mock.patch('cinder.tests.unit.fake_notifier.FakeNotifier._notify')
@mock.patch.object(QUOTAS, 'rollback')
@mock.patch.object(QUOTAS, 'commit')
@mock.patch.object(QUOTAS, 'reserve')
def test_delete_driver_not_initialized(self, reserve, commit, rollback,
mock_notify):
self.volume.driver._initialized = False
def fake_reserve(context, expire=None, project_id=None, **deltas):
return ["RESERVATION"]
def fake_commit_and_rollback(context, reservations, project_id=None):
pass
reserve.return_value = fake_reserve
commit.return_value = fake_commit_and_rollback
rollback.return_value = fake_commit_and_rollback
volume = tests_utils.create_volume(
self.context,
availability_zone=CONF.storage_availability_zone,
**self.volume_params)
self.assertIsNone(volume['encryption_key_id'])
self.assertRaises(exception.DriverNotInitialized,
self.volume.delete_volume, self.context, volume)
volume = objects.Volume.get_by_id(self.context, volume.id)
self.assertEqual("error_deleting", volume.status)
volume.destroy()
@mock.patch('cinder.utils.clean_volume_file_locks')
@mock.patch('cinder.tests.unit.fake_notifier.FakeNotifier._notify')
@mock.patch('cinder.quota.QUOTAS.rollback', new=mock.Mock())
@mock.patch('cinder.quota.QUOTAS.commit', new=mock.Mock())
@mock.patch('cinder.quota.QUOTAS.reserve', return_value=['RESERVATION'])
def test_create_delete_volume(self, _mock_reserve, mock_notify,
mock_clean):
"""Test volume can be created and deleted."""
volume = tests_utils.create_volume(
self.context,
availability_zone=CONF.storage_availability_zone,
**self.volume_params)
volume_id = volume['id']
self.assertIsNone(volume['encryption_key_id'])
self.volume.create_volume(self.context, volume)
self.assert_notify_called(mock_notify,
(['INFO', 'volume.create.start'],
['INFO', 'volume.create.end']),
any_order=True)
self.assertEqual({'_pool0': {'allocated_capacity_gb': 1}},
self.volume.stats['pools'])
self.volume.delete_volume(self.context, volume)
vol = db.volume_get(context.get_admin_context(read_deleted='yes'),
volume_id)
self.assertEqual(vol['status'], 'deleted')
self.assert_notify_called(mock_notify,
(['INFO', 'volume.create.start'],
['INFO', 'volume.create.end'],
['INFO', 'volume.delete.start'],
['INFO', 'volume.delete.end']),
any_order=True)
self.assertEqual({'_pool0': {'allocated_capacity_gb': 0}},
self.volume.stats['pools'])
self.assertRaises(exception.NotFound,
db.volume_get,
self.context,
volume_id)
mock_clean.assert_called_once_with(volume_id, self.volume.driver)
def test_create_delete_volume_with_metadata(self):
"""Test volume can be created with metadata and deleted."""
test_meta = {'fake_key': 'fake_value'}
volume = tests_utils.create_volume(self.context, metadata=test_meta,
**self.volume_params)
volume_id = volume['id']
self.volume.create_volume(self.context, volume)
self.assertEqual(test_meta, volume.metadata)
self.volume.delete_volume(self.context, volume)
self.assertRaises(exception.NotFound,
db.volume_get,
self.context,
volume_id)
@mock.patch('cinder.utils.clean_volume_file_locks')
def test_delete_volume_frozen(self, mock_clean):
service = tests_utils.create_service(self.context, {'frozen': True})
volume = tests_utils.create_volume(self.context, host=service.host)
self.assertRaises(exception.InvalidInput,
self.volume_api.delete, self.context, volume)
mock_clean.assert_not_called()
def test_delete_volume_another_cluster_fails(self):
"""Test delete of volume from another cluster fails."""
self.volume.cluster = 'mycluster'
volume = tests_utils.create_volume(self.context, status='available',
size=1, host=CONF.host + 'fake',
cluster_name=self.volume.cluster)
self.volume.delete_volume(self.context, volume)
self.assertRaises(exception.NotFound,
db.volume_get,
self.context,
volume.id)
@mock.patch('cinder.db.volume_metadata_update')
def test_create_volume_metadata(self, metadata_update):
metadata = {'fake_key': 'fake_value'}
metadata_update.return_value = metadata
volume = tests_utils.create_volume(self.context, **self.volume_params)
res = self.volume_api.create_volume_metadata(self.context,
volume, metadata)
metadata_update.assert_called_once_with(self.context, volume.id,
metadata, False,
common.METADATA_TYPES.user)
self.assertEqual(metadata, res)
@ddt.data('maintenance', 'uploading')
def test_create_volume_metadata_maintenance(self, status):
metadata = {'fake_key': 'fake_value'}
volume = tests_utils.create_volume(self.context, **self.volume_params)
volume['status'] = status
self.assertRaises(exception.InvalidVolume,
self.volume_api.create_volume_metadata,
self.context,
volume,
metadata)
def test_update_volume_metadata_with_metatype(self):
"""Test update volume metadata with different metadata type."""
test_meta1 = {'fake_key1': 'fake_value1'}
test_meta2 = {'fake_key1': 'fake_value2'}
FAKE_METADATA_TYPE = enum.Enum('METADATA_TYPES', 'fake_type')
volume = tests_utils.create_volume(self.context, metadata=test_meta1,
**self.volume_params)
self.volume.create_volume(self.context, volume)
# update user metadata associated with the volume.
result_meta = self.volume_api.update_volume_metadata(
self.context,
volume,
test_meta2,
False,
common.METADATA_TYPES.user)
self.assertEqual(test_meta2, result_meta)
# create image metadata associated with the volume.
result_meta = self.volume_api.update_volume_metadata(
self.context,
volume,
test_meta1,
False,
common.METADATA_TYPES.image)
self.assertEqual(test_meta1, result_meta)
# update image metadata associated with the volume.
result_meta = self.volume_api.update_volume_metadata(
self.context,
volume,
test_meta2,
False,
common.METADATA_TYPES.image)
self.assertEqual(test_meta2, result_meta)
# update volume metadata with invalid metadta type.
self.assertRaises(exception.InvalidMetadataType,
self.volume_api.update_volume_metadata,
self.context,
volume,
test_meta1,
False,
FAKE_METADATA_TYPE.fake_type)
def test_update_volume_metadata_maintenance(self):
"""Test update volume metadata with different metadata type."""
test_meta1 = {'fake_key1': 'fake_value1'}
FAKE_METADATA_TYPE = enum.Enum('METADATA_TYPES', 'fake_type')
volume = tests_utils.create_volume(self.context, metadata=test_meta1,
**self.volume_params)
volume['status'] = 'maintenance'
self.assertRaises(exception.InvalidVolume,
self.volume_api.update_volume_metadata,
self.context,
volume,
test_meta1,
False,
FAKE_METADATA_TYPE.fake_type)
@mock.patch('cinder.db.volume_update')
def test_update_with_ovo(self, volume_update):
"""Test update volume using oslo_versionedobject."""
volume = tests_utils.create_volume(self.context, **self.volume_params)
updates = {'display_name': 'foobbar'}
self.volume_api.update(self.context, volume, updates)
volume_update.assert_called_once_with(self.context, volume.id,
updates)
self.assertEqual('foobbar', volume.display_name)
def test_delete_volume_metadata_with_metatype(self):
"""Test delete volume metadata with different metadata type."""
test_meta1 = {'fake_key1': 'fake_value1', 'fake_key2': 'fake_value2'}
test_meta2 = {'fake_key1': 'fake_value1'}
FAKE_METADATA_TYPE = enum.Enum('METADATA_TYPES', 'fake_type')
volume = tests_utils.create_volume(self.context, metadata=test_meta1,
**self.volume_params)
volume_id = volume['id']
self.volume.create_volume(self.context, volume)
# delete user metadata associated with the volume.
self.volume_api.delete_volume_metadata(
self.context,
volume,
'fake_key2',
common.METADATA_TYPES.user)
self.assertEqual(test_meta2,
db.volume_metadata_get(self.context, volume_id))
# create image metadata associated with the volume.
result_meta = self.volume_api.update_volume_metadata(
self.context,
volume,
test_meta1,
False,
common.METADATA_TYPES.image)
self.assertEqual(test_meta1, result_meta)
# delete image metadata associated with the volume.
self.volume_api.delete_volume_metadata(
self.context,
volume,
'fake_key2',
common.METADATA_TYPES.image)
# parse the result to build the dict.
rows = db.volume_glance_metadata_get(self.context, volume_id)
result = {}
for row in rows:
result[row['key']] = row['value']
self.assertEqual(test_meta2, result)
# delete volume metadata with invalid metadta type.
self.assertRaises(exception.InvalidMetadataType,
self.volume_api.delete_volume_metadata,
self.context,
volume,
'fake_key1',
FAKE_METADATA_TYPE.fake_type)
@mock.patch('cinder.utils.clean_volume_file_locks')
def test_delete_volume_metadata_maintenance(self, mock_clean):
"""Test delete volume metadata in maintenance."""
FAKE_METADATA_TYPE = enum.Enum('METADATA_TYPES', 'fake_type')
test_meta1 = {'fake_key1': 'fake_value1', 'fake_key2': 'fake_value2'}
volume = tests_utils.create_volume(self.context, metadata=test_meta1,
**self.volume_params)
volume['status'] = 'maintenance'
self.assertRaises(exception.InvalidVolume,
self.volume_api.delete_volume_metadata,
self.context,
volume,
'fake_key1',
FAKE_METADATA_TYPE.fake_type)
mock_clean.assert_not_called()
def test_accept_transfer_maintenance(self):
"""Test accept transfer in maintenance."""
test_meta1 = {'fake_key1': 'fake_value1', 'fake_key2': 'fake_value2'}
volume = tests_utils.create_volume(self.context, metadata=test_meta1,
**self.volume_params)
volume['status'] = 'maintenance'
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidVolume,
volume_api.accept_transfer,
self.context,
volume,
None, None)
@mock.patch.object(cinder.volume.api.API, 'list_availability_zones')
def test_create_volume_uses_default_availability_zone(self, mock_list_az):
"""Test setting availability_zone correctly during volume create."""
mock_list_az.return_value = ({'name': 'az1', 'available': True},
{'name': 'az2', 'available': True},
{'name': 'default-az', 'available': True})
volume_api = cinder.volume.api.API()
# Test backwards compatibility, default_availability_zone not set
self.override_config('storage_availability_zone', 'az2')
volume = volume_api.create(self.context,
1,
'name',
'description',
volume_type=self.vol_type)
self.assertEqual('az2', volume['availability_zone'])
self.override_config('default_availability_zone', 'default-az')
volume = volume_api.create(self.context,
1,
'name',
'description',
volume_type=self.vol_type)
self.assertEqual('default-az', volume['availability_zone'])
def test_create_volume_with_default_type_misconfigured(self):
"""Test volume creation with non-existent default volume type."""
volume_api = cinder.volume.api.API()
self.flags(default_volume_type='fake_type')
# Create volume with default volume type while default
# volume type doesn't exist
self.assertRaises(exception.VolumeTypeDefaultMisconfiguredError,
volume_api.create, self.context, 1,
'name', 'description')
@mock.patch('cinder.quota.QUOTAS.rollback', new=mock.MagicMock())
@mock.patch('cinder.quota.QUOTAS.commit', new=mock.MagicMock())
@mock.patch('cinder.quota.QUOTAS.reserve', return_value=["RESERVATION"])
def test_create_volume_with_volume_type(self, _mock_reserve):
"""Test volume creation with default volume type."""
volume_api = cinder.volume.api.API()
# Create volume with default volume type while default
# volume type doesn't exist, volume_type_id should be NULL
volume = volume_api.create(self.context,
1,
'name',
'description',
volume_type=self.vol_type)
self.assertIsNone(volume['encryption_key_id'])
# Create default volume type
vol_type = conf_fixture.def_vol_type
db_vol_type = db.volume_type_get_by_name(context.get_admin_context(),
vol_type)
# Create volume with default volume type
volume = volume_api.create(self.context,
1,
'name',
'description')
self.assertEqual(db_vol_type.get('id'), volume['volume_type_id'])
self.assertIsNone(volume['encryption_key_id'])
# Create volume with specific volume type
vol_type = 'test'
db.volume_type_create(context.get_admin_context(),
{'name': vol_type, 'extra_specs': {}})
db_vol_type = db.volume_type_get_by_name(context.get_admin_context(),
vol_type)
volume = volume_api.create(self.context,
1,
'name',
'description',
volume_type=db_vol_type)
self.assertEqual(db_vol_type.get('id'), volume['volume_type_id'])
@mock.patch('cinder.quota.QUOTAS.rollback', new=mock.MagicMock())
@mock.patch('cinder.quota.QUOTAS.commit', new=mock.MagicMock())
@mock.patch('cinder.quota.QUOTAS.reserve', return_value=["RESERVATION"])
def test_create_volume_with_volume_type_size_limits(self, _mock_reserve):
"""Test that volume type size limits are enforced."""
volume_api = cinder.volume.api.API()
volume = volume_api.create(self.context,
2,
'name',
'description',
volume_type=self.sized_vol_type)
self.assertEqual(self.sized_vol_type['id'], volume['volume_type_id'])
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
1,
'name',
'description',
volume_type=self.sized_vol_type)
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
5,
'name',
'description',
volume_type=self.sized_vol_type)
def test_create_volume_with_multiattach_volume_type(self):
"""Test volume creation with multiattach volume type."""
elevated = context.get_admin_context()
volume_api = cinder.volume.api.API()
especs = dict(multiattach="<is> True")
volume_types.create(elevated,
"multiattach-type",
especs,
description="test-multiattach")
foo = objects.VolumeType.get_by_name_or_id(elevated,
"multiattach-type")
vol = volume_api.create(self.context,
1,
'admin-vol',
'description',
volume_type=foo)
self.assertEqual(foo['id'], vol['volume_type_id'])
self.assertTrue(vol['multiattach'])
def test_create_volume_with_multiattach_flag(self):
"""Tests creating a volume with multiattach=True but no special type.
This tests the pre 3.50 microversion behavior of being able to create
a volume with the multiattach request parameter regardless of a
multiattach-capable volume type.
"""
volume_api = cinder.volume.api.API()
volume = volume_api.create(
self.context, 1, 'name', 'description', multiattach=True,
volume_type=self.vol_type)
self.assertTrue(volume.multiattach)
def _fail_multiattach_policy_authorize(self, policy):
if policy == vol_policy.MULTIATTACH_POLICY:
raise exception.PolicyNotAuthorized(action='Test')
def test_create_volume_with_multiattach_volume_type_not_authorized(self):
"""Test policy unauthorized create with multiattach volume type."""
elevated = context.get_admin_context()
volume_api = cinder.volume.api.API()
especs = dict(multiattach="<is> True")
volume_types.create(elevated,
"multiattach-type",
especs,
description="test-multiattach")
foo = objects.VolumeType.get_by_name_or_id(elevated,
"multiattach-type")
with mock.patch.object(self.context, 'authorize') as mock_auth:
mock_auth.side_effect = self._fail_multiattach_policy_authorize
self.assertRaises(exception.PolicyNotAuthorized,
volume_api.create, self.context,
1, 'admin-vol', 'description',
volume_type=foo)
def test_create_volume_with_multiattach_flag_not_authorized(self):
"""Test policy unauthorized create with multiattach flag."""
volume_api = cinder.volume.api.API()
with mock.patch.object(self.context, 'authorize') as mock_auth:
mock_auth.side_effect = self._fail_multiattach_policy_authorize
self.assertRaises(exception.PolicyNotAuthorized,
volume_api.create, self.context, 1, 'name',
'description', multiattach=True)
@mock.patch.object(key_manager, 'API', fake_keymgr.fake_api)
def test_create_volume_with_encrypted_volume_type_multiattach(self):
ctxt = context.get_admin_context()
cipher = 'aes-xts-plain64'
key_size = 256
control_location = 'front-end'
db.volume_type_create(ctxt,
{'id': '61298380-0c12-11e3-bfd6-4b48424183be',
'name': 'LUKS',
'extra_specs': {'multiattach': '<is> True'}})
db.volume_type_encryption_create(
ctxt,
'61298380-0c12-11e3-bfd6-4b48424183be',
{'control_location': control_location,
'provider': ENCRYPTION_PROVIDER,
'cipher': cipher,
'key_size': key_size})
volume_api = cinder.volume.api.API()
db_vol_type = db.volume_type_get_by_name(ctxt, 'LUKS')
self.assertRaises(exception.InvalidVolume,
volume_api.create,
self.context,
1,
'name',
'description',
volume_type=db_vol_type)
@ddt.data({'cipher': 'blowfish-cbc', 'algo': 'blowfish', 'length': 32},
{'cipher': 'aes-xts-plain64', 'algo': 'aes', 'length': 256})
@ddt.unpack
@mock.patch.object(key_manager, 'API', fake_keymgr.fake_api)
def test_create_volume_with_encrypted_volume_types(
self, cipher, algo, length):
ctxt = context.get_admin_context()
key_size = length
control_location = 'front-end'
db.volume_type_create(ctxt,
{'id': '61298380-0c12-11e3-bfd6-4b48424183be',
'name': 'LUKS'})
db.volume_type_encryption_create(
ctxt,
'61298380-0c12-11e3-bfd6-4b48424183be',
{'control_location': control_location,
'provider': ENCRYPTION_PROVIDER,
'cipher': cipher,
'key_size': key_size})
volume_api = cinder.volume.api.API()
db_vol_type = db.volume_type_get_by_name(ctxt, 'LUKS')
volume = volume_api.create(self.context,
1,
'name',
'description',
volume_type=db_vol_type)
key_manager = volume_api.key_manager
key = key_manager.get(self.context, volume['encryption_key_id'])
self.assertEqual(key_size, len(key.get_encoded()) * 8)
self.assertEqual(algo, key.algorithm)
metadata = db.volume_encryption_metadata_get(self.context, volume.id)
self.assertEqual(db_vol_type.get('id'), volume['volume_type_id'])
self.assertEqual(cipher, metadata.get('cipher'))
self.assertEqual(key_size, metadata.get('key_size'))
self.assertIsNotNone(volume['encryption_key_id'])
def test_create_volume_with_provider_id(self):
volume_params_with_provider_id = dict(provider_id=fake.PROVIDER_ID,
**self.volume_params)
volume = tests_utils.create_volume(self.context,
**volume_params_with_provider_id)
self.volume.create_volume(self.context, volume)
self.assertEqual(fake.PROVIDER_ID, volume['provider_id'])
def test_create_volume_with_admin_metadata(self):
with mock.patch.object(
self.volume.driver, 'create_volume',
return_value={'admin_metadata': {'foo': 'bar'}}):
volume = tests_utils.create_volume(self.user_context)
self.volume.create_volume(self.user_context, volume)
self.assertEqual({'foo': 'bar'}, volume['admin_metadata'])
@mock.patch.object(key_manager, 'API', new=fake_keymgr.fake_api)
def test_create_delete_volume_with_encrypted_volume_type(self):
cipher = 'aes-xts-plain64'
key_size = 256
db.volume_type_create(self.context,
{'id': fake.VOLUME_TYPE_ID, 'name': 'LUKS'})
db.volume_type_encryption_create(
self.context, fake.VOLUME_TYPE_ID,
{'control_location': 'front-end', 'provider': ENCRYPTION_PROVIDER,
'cipher': cipher, 'key_size': key_size})
db_vol_type = db.volume_type_get_by_name(self.context, 'LUKS')
volume = self.volume_api.create(self.context,
1,
'name',
'description',
volume_type=db_vol_type)
self.assertIsNotNone(volume.get('encryption_key_id', None))
self.assertEqual(db_vol_type.get('id'), volume['volume_type_id'])
volume['host'] = 'fake_host'
volume['status'] = 'available'
db.volume_update(self.context, volume['id'], {'status': 'available'})
self.volume_api.delete(self.context, volume)
volume = objects.Volume.get_by_id(self.context, volume.id)
while volume.status == 'available':
# Must wait for volume_api delete request to process enough to
# change the volume status.
time.sleep(0.5)
volume.refresh()
self.assertEqual('deleting', volume['status'])
db.volume_destroy(self.context, volume['id'])
self.assertRaises(exception.NotFound,
db.volume_get,
self.context,
volume['id'])
@mock.patch.object(key_manager, 'API', fake_keymgr.fake_api)
def test_delete_encrypted_volume_fail_deleting_key(self):
cipher = 'aes-xts-plain64'
key_size = 256
db.volume_type_create(self.context,
{'id': fake.VOLUME_TYPE_ID, 'name': 'LUKS'})
db.volume_type_encryption_create(
self.context, fake.VOLUME_TYPE_ID,
{'control_location': 'front-end', 'provider': ENCRYPTION_PROVIDER,
'cipher': cipher, 'key_size': key_size})
db_vol_type = db.volume_type_get_by_name(self.context, 'LUKS')
volume = self.volume_api.create(self.context,
1,
'name',
'description',
volume_type=db_vol_type)
volume_id = volume['id']
volume['host'] = 'fake_host'
volume['status'] = 'available'
db.volume_update(self.context, volume_id, {'status': 'available'})
with mock.patch.object(
self.volume_api.key_manager,
'delete',
side_effect=Exception):
self.assertRaises(exception.InvalidVolume,
self.volume_api.delete,
self.context,
volume)
volume = objects.Volume.get_by_id(self.context, volume_id)
self.assertEqual("error_deleting", volume.status)
volume.destroy()
@mock.patch.object(key_manager, 'API', fake_keymgr.fake_api)
def test_delete_encrypted_volume_key_not_found(self):
cipher = 'aes-xts-plain64'
key_size = 256
db.volume_type_create(self.context,
{'id': fake.VOLUME_TYPE_ID, 'name': 'LUKS'})
db.volume_type_encryption_create(
self.context, fake.VOLUME_TYPE_ID,
{'control_location': 'front-end', 'provider': ENCRYPTION_PROVIDER,
'cipher': cipher, 'key_size': key_size})
db_vol_type = db.volume_type_get_by_name(self.context, 'LUKS')
volume = self.volume_api.create(self.context,
1,
'name',
'description',
volume_type=db_vol_type)
volume_id = volume['id']
volume['host'] = 'fake_host'
volume['status'] = 'available'
db.volume_update(self.context, volume_id, {'status': 'available'})
with mock.patch.object(
self.volume_api.key_manager,
'delete',
side_effect=castellan_exception.ManagedObjectNotFoundError(
uuid=fake.ENCRYPTION_KEY_ID)):
self.volume_api.delete(self.context, volume)
volume = objects.Volume.get_by_id(self.context, volume_id)
self.assertEqual("deleting", volume.status)
volume.destroy()
@mock.patch('cinder.utils.clean_volume_file_locks')
def test_delete_busy_volume(self, mock_clean):
"""Test volume survives deletion if driver reports it as busy."""
volume = tests_utils.create_volume(self.context, **self.volume_params)
volume_id = volume['id']
self.volume.create_volume(self.context, volume)
with mock.patch.object(self.volume.driver, 'delete_volume',
side_effect=exception.VolumeIsBusy(
volume_name='fake')
) as mock_del_vol:
self.volume.delete_volume(self.context, volume)
volume_ref = db.volume_get(context.get_admin_context(), volume_id)
self.assertEqual(volume_id, volume_ref.id)
self.assertEqual("available", volume_ref.status)
mock_del_vol.assert_called_once_with(volume)
mock_clean.assert_not_called()
@mock.patch('cinder.utils.clean_volume_file_locks')
def test_unmanage_encrypted_volume_fails(self, mock_clean):
volume = tests_utils.create_volume(
self.context,
encryption_key_id=fake.ENCRYPTION_KEY_ID,
**self.volume_params)
self.volume.create_volume(self.context, volume)
manager = vol_manager.VolumeManager()
self.assertRaises(exception.Invalid,
manager.delete_volume,
self.context,
volume,
unmanage_only=True)
mock_clean.assert_not_called()
self.volume.delete_volume(self.context, volume)
def test_unmanage_cascade_delete_fails(self):
volume = tests_utils.create_volume(
self.context,
**self.volume_params)
self.volume.create_volume(self.context, volume)
manager = vol_manager.VolumeManager()
self.assertRaises(exception.Invalid,
manager.delete_volume,
self.context,
volume,
unmanage_only=True,
cascade=True)
self.volume.delete_volume(self.context, volume)
def test_get_volume_different_tenant(self):
"""Test can't get volume of another tenant when viewable_admin_meta."""
volume = tests_utils.create_volume(self.context,
**self.volume_params)
volume_id = volume['id']
self.volume.create_volume(self.context, volume)
another_context = context.RequestContext('another_user_id',
'another_project_id',
is_admin=False)
self.assertNotEqual(another_context.project_id,
self.context.project_id)
volume_api = cinder.volume.api.API()
self.assertRaises(exception.VolumeNotFound, volume_api.get,
another_context, volume_id, viewable_admin_meta=True)
self.assertEqual(volume_id,
volume_api.get(self.context, volume_id)['id'])
self.volume.delete_volume(self.context, volume)
def test_get_all_limit_bad_value(self):
"""Test value of 'limit' is numeric and >= 0"""
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidInput,
volume_api.get_all,
self.context,
limit="A")
self.assertRaises(exception.InvalidInput,
volume_api.get_all,
self.context,
limit="-1")
def test_get_all_tenants_volume_list(self):
"""Validate when the volume list for all tenants is returned"""
volume_api = cinder.volume.api.API()
with mock.patch.object(volume_api.db,
'volume_get_all_by_project') as by_project:
with mock.patch.object(volume_api.db,
'volume_get_all') as get_all:
db_volume = {'volume_type_id': fake.VOLUME_TYPE_ID,
'name': 'fake_name',
'host': 'fake_host',
'id': fake.VOLUME_ID}
volume = fake_volume.fake_db_volume(**db_volume)
by_project.return_value = [volume]
get_all.return_value = [volume]
volume_api.get_all(self.context, filters={'all_tenants': '0'})
self.assertTrue(by_project.called)
by_project.called = False
self.context.is_admin = False
volume_api.get_all(self.context, filters={'all_tenants': '1'})
self.assertTrue(by_project.called)
# check for volume list of all tenants
self.context.is_admin = True
volume_api.get_all(self.context, filters={'all_tenants': '1'})
self.assertTrue(get_all.called)
@mock.patch('cinder.utils.clean_volume_file_locks')
def test_delete_volume_in_error_extending(self, mock_clean):
"""Test volume can be deleted in error_extending stats."""
# create a volume
volume = tests_utils.create_volume(self.context, **self.volume_params)
self.volume.create_volume(self.context, volume)
# delete 'error_extending' volume
db.volume_update(self.context, volume['id'],
{'status': 'error_extending'})
self.volume.delete_volume(self.context, volume)
self.assertRaises(exception.NotFound, db.volume_get,
self.context, volume['id'])
mock_clean.assert_called_once_with(volume.id, self.volume.driver)
@mock.patch('cinder.utils.clean_volume_file_locks')
@mock.patch.object(db.sqlalchemy.api, 'volume_get',
side_effect=exception.VolumeNotFound(
volume_id='12345678-1234-5678-1234-567812345678'))
def test_delete_volume_not_found(self, mock_get_volume, mock_clean):
"""Test delete volume moves on if the volume does not exist."""
volume_id = '12345678-1234-5678-1234-567812345678'
volume = objects.Volume(self.context, status='available', id=volume_id)
self.volume.delete_volume(self.context, volume)
self.assertTrue(mock_get_volume.called)
mock_clean.assert_called_once_with(volume_id, self.volume.driver)
@mock.patch('cinder.volume.drivers.lvm.LVMVolumeDriver.'
'create_volume_from_snapshot')
def test_create_volume_from_snapshot(self, mock_create_from_snap):
"""Test volume can be created from a snapshot."""
volume_src = tests_utils.create_volume(self.context,
**self.volume_params)
self.volume.create_volume(self.context, volume_src)
snapshot_id = create_snapshot(volume_src['id'],
size=volume_src['size'])['id']
snapshot_obj = objects.Snapshot.get_by_id(self.context, snapshot_id)
self.volume.create_snapshot(self.context, snapshot_obj)
volume_dst = tests_utils.create_volume(self.context,
snapshot_id=snapshot_id,
**self.volume_params)
self.volume.create_volume(self.context, volume_dst)
self.assertEqual(volume_dst['id'],
db.volume_get(
context.get_admin_context(),
volume_dst['id']).id)
self.assertEqual(snapshot_id,
db.volume_get(context.get_admin_context(),
volume_dst['id']).snapshot_id)
self.volume.delete_volume(self.context, volume_dst)
self.volume.delete_snapshot(self.context, snapshot_obj)
self.volume.delete_volume(self.context, volume_src)
@mock.patch('cinder.volume.flows.api.create_volume.get_flow')
@mock.patch('cinder.objects.volume.Volume.get_by_id')
def test_create_volume_from_snapshot_with_types(
self, _get_by_id, _get_flow):
"""Test volume create from snapshot with types including mistmatch."""
volume_api = cinder.volume.api.API()
foo_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE_ID,
name='foo',
extra_specs={'volume_backend_name': 'dev_1'})
biz_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE2_ID,
name='foo',
extra_specs={'volume_backend_name': 'dev_2'})
source_vol = fake_volume.fake_volume_obj(
self.context,
id=fake.VOLUME_ID,
status='available',
volume_size=10,
volume_type_id=biz_type.id)
source_vol.volume_type = biz_type
snapshot = {'id': fake.SNAPSHOT_ID,
'status': fields.SnapshotStatus.AVAILABLE,
'volume_size': 10,
'volume_type_id': biz_type.id}
snapshot_obj = fake_snapshot.fake_snapshot_obj(self.context,
**snapshot)
snapshot_obj.volume = source_vol
# Make sure the case of specifying a type that
# doesn't match the snapshots type fails
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
snapshot=snapshot_obj)
# Make sure that trying to specify a type
# when the snapshots type is None fails
snapshot_obj.volume_type_id = None
snapshot_obj.volume.volume_type_id = None
snapshot_obj.volume.volume_type = None
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
snapshot=snapshot_obj)
snapshot_obj.volume_type_id = foo_type.id
snapshot_obj.volume.volume_type_id = foo_type.id
snapshot_obj.volume.volume_type = foo_type
volume_api.create(self.context, size=1, name='fake_name',
description='fake_desc', volume_type=foo_type,
snapshot=snapshot_obj)
@mock.patch('cinder.volume.flows.api.create_volume.get_flow')
@mock.patch('cinder.objects.volume.Volume.get_by_id')
def test_create_volume_from_source_with_types(
self, _get_by_id, _get_flow):
"""Test volume create from source with types including mistmatch."""
volume_api = cinder.volume.api.API()
foo_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE_ID,
name='foo',
extra_specs={'volume_backend_name': 'dev_1'})
biz_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE2_ID,
name='biz',
extra_specs={'volume_backend_name': 'dev_2'})
source_vol = fake_volume.fake_volume_obj(
self.context,
id=fake.VOLUME_ID,
status='available',
volume_size=0,
volume_type_id=biz_type.id)
source_vol.volume_type = biz_type
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
source_volume=source_vol)
# Make sure that trying to specify a type
# when the source type is None fails
source_vol.volume_type_id = None
source_vol.volume_type = None
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
source_volume=source_vol)
source_vol.volume_type_id = biz_type.id
source_vol.volume_type = biz_type
volume_api.create(self.context, size=1, name='fake_name',
description='fake_desc', volume_type=biz_type,
source_volume=source_vol)
@mock.patch('cinder.volume.flows.api.create_volume.get_flow')
@mock.patch('cinder.objects.volume.Volume.get_by_id')
def test_create_volume_from_source_with_same_backend(
self, _get_by_id, _get_flow):
"""Test volume create from source with type mismatch same backend."""
volume_api = cinder.volume.api.API()
foo_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE_ID,
name='foo',
qos_specs_id=None,
deleted=False,
created_at=datetime.datetime(2015, 5, 8, 0, 40, 5, 408232),
updated_at=None,
extra_specs={'volume_backend_name': 'dev_1'},
is_public=True,
deleted_at=None,
description=None)
biz_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE2_ID,
name='biz',
qos_specs_id=None,
deleted=False,
created_at=datetime.datetime(2015, 5, 8, 0, 20, 5, 408232),
updated_at=None,
extra_specs={'volume_backend_name': 'dev_1'},
is_public=True,
deleted_at=None,
description=None)
source_vol = fake_volume.fake_volume_obj(
self.context,
id=fake.VOLUME_ID,
status='available',
volume_size=10,
volume_type_id=biz_type.id)
source_vol.volume_type = biz_type
volume_api.create(self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
source_volume=source_vol)
@mock.patch('cinder.volume.flows.api.create_volume.get_flow')
@mock.patch('cinder.objects.volume.Volume.get_by_id')
def test_create_from_source_and_snap_only_one_backend(
self, _get_by_id, _get_flow):
"""Test create from source and snap with type mismatch one backend."""
volume_api = cinder.volume.api.API()
foo_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE_ID,
name='foo',
qos_specs_id=None,
deleted=False,
created_at=datetime.datetime(2015, 5, 8, 0, 40, 5, 408232),
updated_at=None,
extra_specs={'some_key': 3},
is_public=True,
deleted_at=None,
description=None)
biz_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE2_ID,
name='biz',
qos_specs_id=None,
deleted=False,
created_at=datetime.datetime(2015, 5, 8, 0, 20, 5, 408232),
updated_at=None,
extra_specs={'some_other_key': 4},
is_public=True,
deleted_at=None,
description=None)
source_vol = fake_volume.fake_volume_obj(
self.context,
id=fake.VOLUME_ID,
status='available',
volume_size=10,
volume_type_id=biz_type.id)
source_vol.volume_type = biz_type
snapshot = {'id': fake.SNAPSHOT_ID,
'status': fields.SnapshotStatus.AVAILABLE,
'volume_size': 10,
'volume_type_id': biz_type['id']}
snapshot_obj = fake_snapshot.fake_snapshot_obj(self.context,
**snapshot)
snapshot_obj.volume = source_vol
with mock.patch('cinder.db.service_get_all') as mock_get_service, \
mock.patch.object(volume_api,
'list_availability_zones') as mock_get_azs:
mock_get_service.return_value = [
{'host': 'foo',
'uuid': 'a3a593da-7f8d-4bb7-8b4c-f2bc1e0b4824'}]
mock_get_azs.return_value = {}
volume_api.create(self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
source_volume=source_vol)
volume_api.create(self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
snapshot=snapshot_obj)
def _test_create_from_source_snapshot_encryptions(
self, is_snapshot=False):
volume_api = cinder.volume.api.API()
foo_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE_ID,
name='foo',
extra_specs={'volume_backend_name': 'dev_1'})
biz_type = fake_volume.fake_volume_type_obj(
self.context,
id=fake.VOLUME_TYPE2_ID,
name='biz',
extra_specs={'volume_backend_name': 'dev_1'})
source_vol = fake_volume.fake_volume_obj(
self.context,
id=fake.VOLUME_ID,
status='available',
volume_size=1,
volume_type_id=biz_type.id)
source_vol.volume_type = biz_type
snapshot = {'id': fake.SNAPSHOT_ID,
'status': fields.SnapshotStatus.AVAILABLE,
'volume_size': 1,
'volume_type_id': biz_type['id']}
snapshot_obj = fake_snapshot.fake_snapshot_obj(self.context,
**snapshot)
snapshot_obj.volume = source_vol
with mock.patch.object(
cinder.volume.volume_types,
'volume_types_encryption_changed') as mock_encryption_changed:
mock_encryption_changed.return_value = True
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
volume_type=foo_type,
source_volume=(
source_vol if not is_snapshot else None),
snapshot=snapshot_obj if is_snapshot else None)
def test_create_from_source_encryption_changed(self):
self._test_create_from_source_snapshot_encryptions()
def test_create_from_snapshot_encryption_changed(self):
self._test_create_from_source_snapshot_encryptions(is_snapshot=True)
def _mock_synchronized(self, name, *s_args, **s_kwargs):
def inner_sync1(f):
def inner_sync2(*args, **kwargs):
self.called.append('lock-%s' % (name))
ret = f(*args, **kwargs)
self.called.append('unlock-%s' % (name))
return ret
return inner_sync2
return inner_sync1
def _fake_execute(self, *cmd, **kwargs):
pass
@mock.patch.object(coordination.Coordinator, 'get_lock')
@mock.patch.object(fake_driver.FakeLoggingVolumeDriver,
'create_volume_from_snapshot')
def test_create_volume_from_snapshot_check_locks(
self, mock_lvm_create, mock_lock):
orig_flow = engine.ActionEngine.run
def mock_flow_run(*args, **kwargs):
# ensure the lock has been taken
mock_lock.assert_called_with('%s-delete_snapshot' % snap_id)
# now proceed with the flow.
ret = orig_flow(*args, **kwargs)
return ret
# create source volume
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
# no lock
self.volume.create_volume(self.context, src_vol)
snap_id = create_snapshot(src_vol.id,
size=src_vol['size'])['id']
snapshot_obj = objects.Snapshot.get_by_id(self.context, snap_id)
# no lock
self.volume.create_snapshot(self.context, snapshot_obj)
dst_vol = tests_utils.create_volume(self.context,
snapshot_id=snap_id,
**self.volume_params)
admin_ctxt = context.get_admin_context()
# mock the flow runner so we can do some checks
self.mock_object(engine.ActionEngine, 'run', mock_flow_run)
# locked
self.volume.create_volume(self.context, dst_vol,
request_spec={'snapshot_id': snap_id})
mock_lock.assert_called_with('%s-delete_snapshot' % snap_id)
self.assertEqual(dst_vol.id, db.volume_get(admin_ctxt, dst_vol.id).id)
self.assertEqual(snap_id,
db.volume_get(admin_ctxt, dst_vol.id).snapshot_id)
# locked
self.volume.delete_volume(self.context, dst_vol)
mock_lock.assert_called_with('%s-delete_volume' % dst_vol.id)
# locked
self.volume.delete_snapshot(self.context, snapshot_obj)
mock_lock.assert_called_with('%s-delete_snapshot' % snap_id)
# locked
self.volume.delete_volume(self.context, src_vol)
mock_lock.assert_called_with('%s-delete_volume' % src_vol.id)
self.assertTrue(mock_lvm_create.called)
@mock.patch.object(coordination.Coordinator, 'get_lock')
def test_create_volume_from_volume_check_locks(self, mock_lock):
# mock the synchroniser so we can record events
self.mock_object(utils, 'execute', self._fake_execute)
orig_flow = engine.ActionEngine.run
def mock_flow_run(*args, **kwargs):
# ensure the lock has been taken
mock_lock.assert_called_with('%s-delete_volume' % src_vol_id)
# now proceed with the flow.
ret = orig_flow(*args, **kwargs)
return ret
# create source volume
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
src_vol_id = src_vol['id']
# no lock
self.volume.create_volume(self.context, src_vol)
self.assertEqual(0, mock_lock.call_count)
dst_vol = tests_utils.create_volume(self.context,
source_volid=src_vol_id,
**self.volume_params)
dst_vol_id = dst_vol['id']
admin_ctxt = context.get_admin_context()
# mock the flow runner so we can do some checks
self.mock_object(engine.ActionEngine, 'run', mock_flow_run)
# locked
self.volume.create_volume(self.context, dst_vol,
request_spec={'source_volid': src_vol_id})
mock_lock.assert_called_with('%s-delete_volume' % src_vol_id)
self.assertEqual(dst_vol_id, db.volume_get(admin_ctxt, dst_vol_id).id)
self.assertEqual(src_vol_id,
db.volume_get(admin_ctxt, dst_vol_id).source_volid)
# locked
self.volume.delete_volume(self.context, dst_vol)
mock_lock.assert_called_with('%s-delete_volume' % dst_vol_id)
# locked
self.volume.delete_volume(self.context, src_vol)
mock_lock.assert_called_with('%s-delete_volume' % src_vol_id)
def _raise_metadata_copy_failure(self, method, dst_vol):
# MetadataCopyFailure exception will be raised if DB service is Down
# while copying the volume glance metadata
with mock.patch.object(db, method) as mock_db:
mock_db.side_effect = exception.MetadataCopyFailure(
reason="Because of DB service down.")
self.assertRaises(exception.MetadataCopyFailure,
self.volume.create_volume,
self.context,
dst_vol)
# ensure that status of volume is 'error'
vol = db.volume_get(self.context, dst_vol.id)
self.assertEqual('error', vol['status'])
# cleanup resource
db.volume_destroy(self.context, dst_vol.id)
@mock.patch('cinder.utils.execute')
def test_create_volume_from_volume_with_glance_volume_metadata_none(
self, mock_execute):
# create source volume
mock_execute.return_value = None
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
src_vol_id = src_vol['id']
self.volume.create_volume(self.context, src_vol)
# set bootable flag of volume to True
db.volume_update(self.context, src_vol['id'], {'bootable': True})
# create volume from source volume
dst_vol = tests_utils.create_volume(self.context,
source_volid=src_vol_id,
**self.volume_params)
self.volume.create_volume(self.context, dst_vol)
self.assertRaises(exception.GlanceMetadataNotFound,
db.volume_glance_metadata_copy_from_volume_to_volume,
self.context, src_vol_id, dst_vol['id'])
# ensure that status of volume is 'available'
vol = db.volume_get(self.context, dst_vol['id'])
self.assertEqual('available', vol['status'])
# cleanup resource
db.volume_destroy(self.context, src_vol_id)
db.volume_destroy(self.context, dst_vol['id'])
@mock.patch('cinder.utils.execute')
def test_create_volume_from_volume_raise_metadata_copy_failure(
self, mock_execute):
# create source volume
mock_execute.return_value = None
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
src_vol_id = src_vol['id']
self.volume.create_volume(self.context, src_vol)
# set bootable flag of volume to True
db.volume_update(self.context, src_vol['id'], {'bootable': True})
# create volume from source volume
dst_vol = tests_utils.create_volume(self.context,
source_volid=src_vol_id,
**self.volume_params)
self._raise_metadata_copy_failure(
'volume_glance_metadata_copy_from_volume_to_volume',
dst_vol)
# cleanup resource
db.volume_destroy(self.context, src_vol_id)
@mock.patch('cinder.utils.execute')
def test_create_volume_from_snapshot_raise_metadata_copy_failure(
self, mock_execute):
# create source volume
mock_execute.return_value = None
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
src_vol_id = src_vol['id']
self.volume.create_volume(self.context, src_vol)
# set bootable flag of volume to True
db.volume_update(self.context, src_vol['id'], {'bootable': True})
# create volume from snapshot
snapshot_id = create_snapshot(src_vol['id'])['id']
snapshot_obj = objects.Snapshot.get_by_id(self.context, snapshot_id)
self.volume.create_snapshot(self.context, snapshot_obj)
# ensure that status of snapshot is 'available'
self.assertEqual(fields.SnapshotStatus.AVAILABLE, snapshot_obj.status)
dst_vol = tests_utils.create_volume(self.context,
snapshot_id=snapshot_id,
**self.volume_params)
self._raise_metadata_copy_failure(
'volume_glance_metadata_copy_to_volume',
dst_vol)
# cleanup resource
snapshot_obj.destroy()
db.volume_destroy(self.context, src_vol_id)
@mock.patch('cinder.utils.execute')
def test_create_volume_from_snapshot_with_glance_volume_metadata_none(
self, mock_execute):
# create source volume
mock_execute.return_value = None
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
src_vol_id = src_vol['id']
self.volume.create_volume(self.context, src_vol)
# set bootable flag of volume to True
db.volume_update(self.context, src_vol['id'], {'bootable': True})
volume = db.volume_get(self.context, src_vol_id)
# create snapshot of volume
snapshot_id = create_snapshot(volume['id'])['id']
snapshot_obj = objects.Snapshot.get_by_id(self.context, snapshot_id)
self.volume.create_snapshot(self.context, snapshot_obj)
# ensure that status of snapshot is 'available'
self.assertEqual(fields.SnapshotStatus.AVAILABLE, snapshot_obj.status)
# create volume from snapshot
dst_vol = tests_utils.create_volume(self.context,
snapshot_id=snapshot_id,
**self.volume_params)
self.volume.create_volume(self.context, dst_vol)
self.assertRaises(exception.GlanceMetadataNotFound,
db.volume_glance_metadata_copy_to_volume,
self.context, dst_vol['id'], snapshot_id)
# ensure that status of volume is 'available'
vol = db.volume_get(self.context, dst_vol['id'])
self.assertEqual('available', vol['status'])
# cleanup resource
snapshot_obj.destroy()
db.volume_destroy(self.context, src_vol_id)
db.volume_destroy(self.context, dst_vol['id'])
@ddt.data({'connector_class':
os_brick.initiator.connectors.iscsi.ISCSIConnector,
'rekey_supported': True,
'already_encrypted': 'yes'},
{'connector_class':
os_brick.initiator.connectors.iscsi.ISCSIConnector,
'rekey_supported': True,
'already_encrypted': 'no'},
{'connector_class':
os_brick.initiator.connectors.rbd.RBDConnector,
'rekey_supported': False,
'already_encrypted': 'no'})
@ddt.unpack
@mock.patch('cinder.volume.volume_utils.delete_encryption_key')
@mock.patch('cinder.volume.flows.manager.create_volume.'
'CreateVolumeFromSpecTask._setup_encryption_keys')
@mock.patch('cinder.db.sqlalchemy.api.volume_encryption_metadata_get')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.volume.driver.VolumeDriver._detach_volume')
@mock.patch('cinder.volume.driver.VolumeDriver._attach_volume')
@mock.patch('cinder.volume.volume_utils.brick_get_connector_properties')
@mock.patch('cinder.utils.execute')
def test_create_volume_from_volume_with_enc(
self, mock_execute, mock_brick_gcp, mock_at, mock_det,
mock_qemu_img_info, mock_enc_metadata_get, mock_setup_enc_keys,
mock_del_enc_key, connector_class=None, rekey_supported=None,
already_encrypted=None):
# create source volume
mock_execute.return_value = ('', '')
mock_enc_metadata_get.return_value = {'cipher': 'aes-xts-plain64',
'key_size': 256,
'provider': 'luks'}
mock_setup_enc_keys.return_value = (
'qwert',
'asdfg',
fake.ENCRYPTION_KEY2_ID)
params = {'status': 'creating',
'size': 1,
'host': CONF.host,
'encryption_key_id': fake.ENCRYPTION_KEY_ID}
src_vol = tests_utils.create_volume(self.context, **params)
src_vol_id = src_vol['id']
self.volume.create_volume(self.context, src_vol)
db.volume_update(self.context,
src_vol['id'],
{'encryption_key_id': fake.ENCRYPTION_KEY_ID})
# create volume from source volume
params['encryption_key_id'] = fake.ENCRYPTION_KEY2_ID
attach_info = {
'connector': connector_class(None),
'device': {'path': '/some/device/thing'}}
mock_at.return_value = (attach_info, src_vol)
img_info = imageutils.QemuImgInfo()
if already_encrypted:
# defaults to None when not encrypted
img_info.encrypted = 'yes'
img_info.file_format = 'raw'
mock_qemu_img_info.return_value = img_info
dst_vol = tests_utils.create_volume(self.context,
source_volid=src_vol_id,
**params)
self.volume.create_volume(self.context, dst_vol)
# ensure that status of volume is 'available'
vol = db.volume_get(self.context, dst_vol['id'])
self.assertEqual('available', vol['status'])
# cleanup resource
db.volume_destroy(self.context, src_vol_id)
db.volume_destroy(self.context, dst_vol['id'])
if rekey_supported:
mock_setup_enc_keys.assert_called_once_with(
mock.ANY,
src_vol,
{'key_size': 256,
'provider': 'luks',
'cipher': 'aes-xts-plain64'}
)
if already_encrypted:
mock_execute.assert_called_once_with(
'cryptsetup', 'luksChangeKey',
'/some/device/thing',
'--force-password',
log_errors=processutils.LOG_ALL_ERRORS,
process_input='qwert\nasdfg\n',
run_as_root=True)
else:
mock_execute.assert_called_once_with(
'cryptsetup', '--batch-mode', 'luksFormat',
'--type', 'luks1',
'--cipher', 'aes-xts-plain64', '--key-size', '256',
'--key-file=-', '/some/device/thing',
process_input='asdfg',
run_as_root=True)
mock_del_enc_key.assert_called_once_with(mock.ANY, # context
mock.ANY, # keymgr
fake.ENCRYPTION_KEY2_ID)
else:
mock_setup_enc_keys.assert_not_called()
mock_execute.assert_not_called()
mock_del_enc_key.assert_not_called()
mock_at.assert_called()
mock_det.assert_called()
@mock.patch('cinder.db.sqlalchemy.api.volume_encryption_metadata_get')
def test_setup_encryption_keys(self, mock_enc_metadata_get):
key_mgr = fake_keymgr.fake_api()
self.mock_object(castellan.key_manager, 'API', return_value=key_mgr)
key_id = key_mgr.store(self.context, KeyObject())
key2_id = key_mgr.store(self.context, KeyObject2())
params = {'status': 'creating',
'size': 1,
'host': CONF.host,
'encryption_key_id': key_id}
vol = tests_utils.create_volume(self.context, **params)
self.volume.create_volume(self.context, vol)
db.volume_update(self.context,
vol['id'],
{'encryption_key_id': key_id})
mock_enc_metadata_get.return_value = {'cipher': 'aes-xts-plain64',
'key_size': 256,
'provider': 'luks'}
ctxt = context.get_admin_context()
enc_info = {'encryption_key_id': key_id}
with mock.patch('cinder.volume.volume_utils.create_encryption_key',
return_value=key2_id):
r = cinder.volume.flows.manager.create_volume.\
CreateVolumeFromSpecTask._setup_encryption_keys(ctxt,
vol,
enc_info)
(source_pass, new_pass, new_key_id) = r
self.assertNotEqual(source_pass, new_pass)
self.assertEqual(new_key_id, key2_id)
@mock.patch.object(key_manager, 'API', fake_keymgr.fake_api)
def test_create_volume_from_snapshot_with_encryption(self):
"""Test volume can be created from a snapshot of an encrypted volume"""
ctxt = context.get_admin_context()
cipher = 'aes-xts-plain64'
key_size = 256
db.volume_type_create(ctxt,
{'id': '61298380-0c12-11e3-bfd6-4b48424183be',
'name': 'LUKS'})
db.volume_type_encryption_create(
ctxt,
'61298380-0c12-11e3-bfd6-4b48424183be',
{'control_location': 'front-end', 'provider': ENCRYPTION_PROVIDER,
'cipher': cipher, 'key_size': key_size})
volume_api = cinder.volume.api.API()
db_vol_type = db.volume_type_get_by_name(context.get_admin_context(),
'LUKS')
volume_src = volume_api.create(self.context,
1,
'name',
'description',
volume_type=db_vol_type)
db.volume_update(self.context, volume_src['id'],
{'host': 'fake_host@fake_backend',
'status': 'available'})
volume_src = objects.Volume.get_by_id(self.context, volume_src['id'])
snapshot_ref = volume_api.create_snapshot_force(self.context,
volume_src,
'name',
'description')
snapshot_ref['status'] = fields.SnapshotStatus.AVAILABLE
# status must be available
volume_dst = volume_api.create(self.context,
1,
'name',
'description',
snapshot=snapshot_ref)
self.assertEqual(volume_dst['id'],
db.volume_get(
context.get_admin_context(),
volume_dst['id']).id)
self.assertEqual(snapshot_ref['id'],
db.volume_get(context.get_admin_context(),
volume_dst['id']).snapshot_id)
# ensure encryption keys match
self.assertIsNotNone(volume_src['encryption_key_id'])
self.assertIsNotNone(volume_dst['encryption_key_id'])
key_manager = volume_api.key_manager # must use *same* key manager
volume_src_key = key_manager.get(self.context,
volume_src['encryption_key_id'])
volume_dst_key = key_manager.get(self.context,
volume_dst['encryption_key_id'])
self.assertEqual(volume_src_key, volume_dst_key)
def test_create_volume_from_encrypted_volume(self):
"""Test volume can be created from an encrypted volume."""
self.mock_object(key_manager, 'API', fake_keymgr.fake_api)
cipher = 'aes-xts-plain64'
key_size = 256
volume_api = cinder.volume.api.API()
ctxt = context.get_admin_context()
db.volume_type_create(ctxt,
{'id': '61298380-0c12-11e3-bfd6-4b48424183be',
'name': 'LUKS'})
db.volume_type_encryption_create(
ctxt,
'61298380-0c12-11e3-bfd6-4b48424183be',
{'control_location': 'front-end', 'provider': ENCRYPTION_PROVIDER,
'cipher': cipher, 'key_size': key_size})
db_vol_type = db.volume_type_get_by_name(context.get_admin_context(),
'LUKS')
volume_src = volume_api.create(self.context,
1,
'name',
'description',
volume_type=db_vol_type)
db.volume_update(self.context, volume_src['id'],
{'host': 'fake_host@fake_backend',
'status': 'available'})
volume_src = objects.Volume.get_by_id(self.context, volume_src['id'])
volume_dst = volume_api.create(self.context,
1,
'name',
'description',
source_volume=volume_src)
self.assertEqual(volume_dst['id'],
db.volume_get(context.get_admin_context(),
volume_dst['id']).id)
self.assertEqual(volume_src['id'],
db.volume_get(context.get_admin_context(),
volume_dst['id']).source_volid)
# ensure encryption keys match
self.assertIsNotNone(volume_src['encryption_key_id'])
self.assertIsNotNone(volume_dst['encryption_key_id'])
km = volume_api.key_manager # must use *same* key manager
volume_src_key = km.get(self.context,
volume_src['encryption_key_id'])
volume_dst_key = km.get(self.context,
volume_dst['encryption_key_id'])
self.assertEqual(volume_src_key, volume_dst_key)
def test_delete_invalid_status_fails(self):
self.volume_params['status'] = 'invalid1234'
volume = tests_utils.create_volume(self.context,
**self.volume_params)
vol_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidVolume,
vol_api.delete,
self.context,
volume)
def test_create_volume_from_snapshot_fail_bad_size(self):
"""Test volume can't be created from snapshot with bad volume size."""
volume_api = cinder.volume.api.API()
snapshot = {'id': fake.SNAPSHOT_ID,
'status': fields.SnapshotStatus.AVAILABLE,
'volume_size': 10}
snapshot_obj = fake_snapshot.fake_snapshot_obj(self.context,
**snapshot)
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
snapshot=snapshot_obj)
def test_create_volume_from_snapshot_fail_wrong_az(self):
"""Test volume can't be created from snapshot in a different az."""
volume_api = cinder.volume.api.API()
def fake_list_availability_zones(enable_cache=False):
return ({'name': 'nova', 'available': True},
{'name': 'az2', 'available': True})
self.mock_object(volume_api,
'list_availability_zones',
fake_list_availability_zones)
volume_src = tests_utils.create_volume(self.context,
availability_zone='az2',
**self.volume_params)
self.volume.create_volume(self.context, volume_src)
snapshot = create_snapshot(volume_src['id'])
self.volume.create_snapshot(self.context, snapshot)
volume_dst = volume_api.create(self.context,
size=1,
name='fake_name',
description='fake_desc',
snapshot=snapshot)
self.assertEqual('az2', volume_dst['availability_zone'])
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
snapshot=snapshot,
availability_zone='nova')
def test_create_volume_with_invalid_exclusive_options(self):
"""Test volume create with multiple exclusive options fails."""
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
1,
'name',
'description',
snapshot=fake.SNAPSHOT_ID,
image_id=fake.IMAGE_ID,
source_volume=fake.VOLUME_ID)
def test_reserve_volume_success(self):
volume = tests_utils.create_volume(self.context, status='available')
cinder.volume.api.API().reserve_volume(self.context, volume)
volume_db = db.volume_get(self.context, volume.id)
self.assertEqual('attaching', volume_db.status)
db.volume_destroy(self.context, volume.id)
def test_reserve_volume_in_attaching(self):
self._test_reserve_volume_bad_status('attaching')
def test_reserve_volume_in_maintenance(self):
self._test_reserve_volume_bad_status('maintenance')
def _test_reserve_volume_bad_status(self, status):
volume = tests_utils.create_volume(self.context, status=status)
self.assertRaises(exception.InvalidVolume,
cinder.volume.api.API().reserve_volume,
self.context,
volume)
db.volume_destroy(self.context, volume.id)
def test_attachment_reserve_with_bootable_volume(self):
# test the private _attachment_reserve method with a bootable,
# in-use, multiattach volume.
instance_uuid = fake.UUID1
volume = tests_utils.create_volume(self.context, status='in-use')
tests_utils.attach_volume(self.context, volume.id, instance_uuid,
'attached_host', 'mountpoint', mode='rw')
volume.multiattach = True
volume.bootable = True
attachment = self.volume_api._attachment_reserve(
self.context, volume, instance_uuid)
self.assertEqual(attachment.attach_status, 'reserved')
def test_attachment_reserve_conditional_update_attach_race(self):
# Tests a scenario where two instances are racing to attach the
# same multiattach=False volume. One updates the volume status to
# "reserved" but the other fails the conditional update which is
# then validated to not be the same instance that is already attached
# to the multiattach=False volume which triggers a failure.
volume = tests_utils.create_volume(self.context)
# Assert that we're not dealing with a multiattach volume and that
# it does not have any existing attachments.
self.assertFalse(volume.multiattach)
self.assertEqual(0, len(volume.volume_attachment))
# Attach the first instance which is OK and should update the volume
# status to 'reserved'.
self.volume_api._attachment_reserve(self.context, volume, fake.UUID1)
# Try attaching a different instance to the same volume which should
# fail.
ex = self.assertRaises(exception.InvalidVolume,
self.volume_api._attachment_reserve,
self.context, volume, fake.UUID2)
self.assertIn("status must be available or downloading", str(ex))
def test_attachment_reserve_with_instance_uuid_error_volume(self):
# Tests that trying to create an attachment (with an instance_uuid
# provided) on a volume that's not 'available' or 'downloading' status
# will fail if the volume does not have any attachments, similar to how
# the volume reserve action works.
volume = tests_utils.create_volume(self.context, status='error')
# Assert that we're not dealing with a multiattach volume and that
# it does not have any existing attachments.
self.assertFalse(volume.multiattach)
self.assertEqual(0, len(volume.volume_attachment))
# Try attaching an instance to the volume which should fail based on
# the volume status.
ex = self.assertRaises(exception.InvalidVolume,
self.volume_api._attachment_reserve,
self.context, volume, fake.UUID1)
self.assertIn("status must be available or downloading", str(ex))
def test_unreserve_volume_success_in_use(self):
volume = tests_utils.create_volume(self.context, status='attaching')
tests_utils.attach_volume(self.context, volume.id, fake.INSTANCE_ID,
'attached_host', 'mountpoint', mode='rw')
cinder.volume.api.API().unreserve_volume(self.context, volume)
db_volume = db.volume_get(self.context, volume.id)
self.assertEqual('in-use', db_volume.status)
def test_unreserve_volume_success_available(self):
volume = tests_utils.create_volume(self.context, status='attaching')
cinder.volume.api.API().unreserve_volume(self.context, volume)
db_volume = db.volume_get(self.context, volume.id)
self.assertEqual('available', db_volume.status)
def test_multi_node(self):
# TODO(termie): Figure out how to test with two nodes,
# each of them having a different FLAG for storage_node
# This will allow us to test cross-node interactions
pass
def test_cannot_delete_volume_in_use(self):
"""Test volume can't be deleted in in-use status."""
self._test_cannot_delete_volume('in-use')
def test_cannot_delete_volume_maintenance(self):
"""Test volume can't be deleted in maintenance status."""
self._test_cannot_delete_volume('maintenance')
@mock.patch('cinder.utils.clean_volume_file_locks')
def _test_cannot_delete_volume(self, status, mock_clean):
"""Test volume can't be deleted in invalid stats."""
# create a volume and assign to host
volume = tests_utils.create_volume(self.context, CONF.host,
status=status)
# 'in-use' status raises InvalidVolume
self.assertRaises(exception.InvalidVolume,
self.volume_api.delete,
self.context,
volume)
mock_clean.assert_not_called()
# clean up
self.volume.delete_volume(self.context, volume)
def test_force_delete_volume(self):
"""Test volume can be forced to delete."""
# create a volume and assign to host
self.volume_params['status'] = 'error_deleting'
volume = tests_utils.create_volume(self.context, **self.volume_params)
# 'error_deleting' volumes can't be deleted
self.assertRaises(exception.InvalidVolume,
self.volume_api.delete,
self.context,
volume)
# delete with force
self.volume_api.delete(self.context, volume, force=True)
# status is deleting
volume = objects.Volume.get_by_id(context.get_admin_context(),
volume.id)
self.assertEqual('deleting', volume.status)
# clean up
self.volume.delete_volume(self.context, volume)
def test_cannot_force_delete_attached_volume(self):
"""Test volume can't be force delete in attached state."""
volume = tests_utils.create_volume(self.context, CONF.host,
status='in-use',
attach_status=
fields.VolumeAttachStatus.ATTACHED)
self.assertRaises(exception.InvalidVolume,
self.volume_api.delete,
self.context,
volume,
force=True)
db.volume_destroy(self.context, volume.id)
@mock.patch('cinder.utils.clean_volume_file_locks')
def test__revert_to_snapshot_generic_failed(self, mock_clean):
fake_volume = tests_utils.create_volume(self.context,
status='available')
fake_snapshot = tests_utils.create_snapshot(self.context,
fake_volume.id)
with mock.patch.object(
self.volume.driver,
'_create_temp_volume_from_snapshot') as mock_temp, \
mock.patch.object(
self.volume.driver,
'delete_volume') as mock_driver_delete, \
mock.patch.object(
self.volume, '_copy_volume_data') as mock_copy:
temp_volume = tests_utils.create_volume(self.context,
status='available')
mock_copy.side_effect = [exception.VolumeDriverException('error')]
mock_temp.return_value = temp_volume
self.assertRaises(exception.VolumeDriverException,
self.volume._revert_to_snapshot_generic,
self.context, fake_volume, fake_snapshot)
mock_copy.assert_called_once_with(
self.context, temp_volume, fake_volume)
mock_driver_delete.assert_called_once_with(temp_volume)
mock_clean.assert_called_once_with(temp_volume.id,
self.volume.driver)
@mock.patch('cinder.utils.clean_volume_file_locks')
def test__revert_to_snapshot_generic(self, mock_clean):
fake_volume = tests_utils.create_volume(self.context,
status='available')
fake_snapshot = tests_utils.create_snapshot(self.context,
fake_volume.id)
with mock.patch.object(
self.volume.driver,
'_create_temp_volume_from_snapshot') as mock_temp,\
mock.patch.object(
self.volume.driver, 'delete_volume') as mock_driver_delete,\
mock.patch.object(
self.volume, '_copy_volume_data') as mock_copy:
temp_volume = tests_utils.create_volume(self.context,
status='available')
mock_temp.return_value = temp_volume
self.volume._revert_to_snapshot_generic(
self.context, fake_volume, fake_snapshot)
mock_copy.assert_called_once_with(
self.context, temp_volume, fake_volume)
mock_driver_delete.assert_called_once_with(temp_volume)
mock_clean.assert_called_once_with(temp_volume.id,
self.volume.driver)
@ddt.data({'driver_error': True},
{'driver_error': False})
@ddt.unpack
def test__revert_to_snapshot(self, driver_error):
mock.patch.object(self.volume, '_notify_about_snapshot_usage')
with mock.patch.object(self.volume.driver,
'revert_to_snapshot') as driver_revert, \
mock.patch.object(self.volume, '_notify_about_volume_usage'), \
mock.patch.object(self.volume, '_notify_about_snapshot_usage'),\
mock.patch.object(self.volume,
'_revert_to_snapshot_generic') as generic_revert:
if driver_error:
driver_revert.side_effect = [NotImplementedError]
else:
driver_revert.return_value = None
self.volume._revert_to_snapshot(self.context, {}, {})
driver_revert.assert_called_once_with(self.context, {}, {})
if driver_error:
generic_revert.assert_called_once_with(self.context, {}, {})
@ddt.data({},
{'has_snapshot': True},
{'use_temp_snapshot': True},
{'use_temp_snapshot': True, 'has_snapshot': True})
@ddt.unpack
def test_revert_to_snapshot(self, has_snapshot=False,
use_temp_snapshot=False):
fake_volume = tests_utils.create_volume(self.context,
status='reverting',
project_id='123',
size=2)
fake_snapshot = tests_utils.create_snapshot(self.context,
fake_volume['id'],
status='restoring',
volume_size=1)
with mock.patch.object(self.volume,
'_revert_to_snapshot') as _revert,\
mock.patch.object(self.volume,
'_create_backup_snapshot') as _create_snapshot,\
mock.patch.object(self.volume,
'delete_snapshot') as _delete_snapshot, \
mock.patch.object(self.volume.driver,
'snapshot_revert_use_temp_snapshot') as \
_use_temp_snap:
_revert.return_value = None
_use_temp_snap.return_value = use_temp_snapshot
if has_snapshot:
_create_snapshot.return_value = {'id': 'fake_snapshot'}
else:
_create_snapshot.return_value = None
self.volume.revert_to_snapshot(self.context, fake_volume,
fake_snapshot)
_revert.assert_called_once_with(self.context, fake_volume,
fake_snapshot)
if not use_temp_snapshot:
_create_snapshot.assert_not_called()
else:
_create_snapshot.assert_called_once_with(self.context,
fake_volume)
if use_temp_snapshot and has_snapshot:
_delete_snapshot.assert_called_once_with(
self.context, {'id': 'fake_snapshot'}, handle_quota=False)
else:
_delete_snapshot.assert_not_called()
fake_volume.refresh()
fake_snapshot.refresh()
self.assertEqual('available', fake_volume['status'])
self.assertEqual('available', fake_snapshot['status'])
self.assertEqual(2, fake_volume['size'])
def test_revert_to_snapshot_failed(self):
fake_volume = tests_utils.create_volume(self.context,
status='reverting',
project_id='123',
size=2)
fake_snapshot = tests_utils.create_snapshot(self.context,
fake_volume['id'],
status='restoring',
volume_size=1)
with mock.patch.object(self.volume,
'_revert_to_snapshot') as _revert, \
mock.patch.object(self.volume,
'_create_backup_snapshot'), \
mock.patch.object(self.volume,
'delete_snapshot') as _delete_snapshot:
_revert.side_effect = [exception.VolumeDriverException(
message='fake_message')]
self.assertRaises(exception.VolumeDriverException,
self.volume.revert_to_snapshot,
self.context, fake_volume,
fake_snapshot)
_revert.assert_called_once_with(self.context, fake_volume,
fake_snapshot)
_delete_snapshot.assert_not_called()
fake_volume.refresh()
fake_snapshot.refresh()
self.assertEqual('error', fake_volume['status'])
self.assertEqual('available', fake_snapshot['status'])
self.assertEqual(2, fake_volume['size'])
def test_cannot_revert_to_snapshot_in_use(self):
"""Test volume can't be reverted to snapshot in in-use status."""
fake_volume = tests_utils.create_volume(self.context,
status='in-use')
fake_snapshot = tests_utils.create_snapshot(self.context,
fake_volume.id,
status='available')
self.assertRaises(exception.InvalidVolume,
self.volume_api.revert_to_snapshot,
self.context,
fake_volume,
fake_snapshot)
def test_cannot_delete_volume_with_snapshots(self):
"""Test volume can't be deleted with dependent snapshots."""
volume = tests_utils.create_volume(self.context, **self.volume_params)
self.volume.create_volume(self.context, volume)
snapshot = create_snapshot(volume['id'], size=volume['size'])
self.volume.create_snapshot(self.context, snapshot)
self.assertEqual(
snapshot.id, objects.Snapshot.get_by_id(self.context,
snapshot.id).id)
volume['status'] = 'available'
volume['host'] = 'fakehost'
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidVolume,
volume_api.delete,
self.context,
volume)
self.volume.delete_snapshot(self.context, snapshot)
self.volume.delete_volume(self.context, volume)
def test_can_delete_errored_snapshot(self):
"""Test snapshot can be created and deleted."""
volume = tests_utils.create_volume(self.context, CONF.host)
snapshot = create_snapshot(volume.id, size=volume['size'],
ctxt=self.context,
status=fields.SnapshotStatus.ERROR)
self.volume_api.delete_snapshot(self.context, snapshot)
self.assertEqual(fields.SnapshotStatus.DELETING, snapshot.status)
self.volume.delete_volume(self.context, volume)
def test_create_snapshot_set_worker(self):
volume = tests_utils.create_volume(self.context)
snapshot = create_snapshot(volume.id, size=volume['size'],
ctxt=self.context,
status=fields.SnapshotStatus.CREATING)
self.volume.create_snapshot(self.context, snapshot)
volume.set_worker.assert_called_once_with()
def test_cannot_delete_snapshot_with_bad_status(self):
volume = tests_utils.create_volume(self.context, CONF.host)
snapshot = create_snapshot(volume.id, size=volume['size'],
ctxt=self.context,
status=fields.SnapshotStatus.CREATING)
self.assertRaises(exception.InvalidSnapshot,
self.volume_api.delete_snapshot,
self.context,
snapshot)
snapshot.status = fields.SnapshotStatus.ERROR
snapshot.save()
self.volume_api.delete_snapshot(self.context, snapshot)
self.assertEqual(fields.SnapshotStatus.DELETING, snapshot.status)
self.volume.delete_volume(self.context, volume)
@mock.patch.object(QUOTAS, "rollback")
@mock.patch.object(QUOTAS, "commit")
@mock.patch.object(QUOTAS, "reserve", return_value=["RESERVATION"])
def _do_test_create_volume_with_size(self, size, *_unused_quota_mocks):
volume_api = cinder.volume.api.API()
volume = volume_api.create(self.context,
size,
'name',
'description',
volume_type=self.vol_type)
self.assertEqual(int(size), volume['size'])
def test_create_volume_int_size(self):
"""Test volume creation with int size."""
self._do_test_create_volume_with_size(2)
def test_create_volume_string_size(self):
"""Test volume creation with string size."""
self._do_test_create_volume_with_size('2')
@mock.patch.object(QUOTAS, "rollback")
@mock.patch.object(QUOTAS, "commit")
@mock.patch.object(QUOTAS, "reserve", return_value=["RESERVATION"])
def test_create_volume_with_bad_size(self, *_unused_quota_mocks):
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
'2Gb',
'name',
'description')
def test_create_volume_with_float_fails(self):
"""Test volume creation with invalid float size."""
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
'1.5',
'name',
'description')
def test_create_volume_with_zero_size_fails(self):
"""Test volume creation with string size."""
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
'0',
'name',
'description')
def test_begin_detaching_fails_available(self):
volume_api = cinder.volume.api.API()
volume = tests_utils.create_volume(self.context, status='available')
# Volume status is 'available'.
self.assertRaises(exception.InvalidVolume, volume_api.begin_detaching,
self.context, volume)
db.volume_update(self.context, volume.id,
{'status': 'in-use',
'attach_status':
fields.VolumeAttachStatus.DETACHED})
# Should raise an error since not attached
self.assertRaises(exception.InvalidVolume, volume_api.begin_detaching,
self.context, volume)
db.volume_update(self.context, volume.id,
{'attach_status':
fields.VolumeAttachStatus.ATTACHED})
# Ensure when attached no exception raised
volume_api.begin_detaching(self.context, volume)
volume_api.update(self.context, volume, {'status': 'maintenance'})
self.assertRaises(exception.InvalidVolume, volume_api.begin_detaching,
self.context, volume)
db.volume_destroy(self.context, volume.id)
def test_begin_roll_detaching_volume(self):
"""Test begin_detaching and roll_detaching functions."""
instance_uuid = '12345678-1234-5678-1234-567812345678'
volume = tests_utils.create_volume(self.context, **self.volume_params)
attachment = db.volume_attach(self.context,
{'volume_id': volume['id'],
'attached_host': 'fake-host'})
db.volume_attached(self.context, attachment['id'], instance_uuid,
'fake-host', 'vdb')
volume_api = cinder.volume.api.API()
volume_api.begin_detaching(self.context, volume)
volume = volume_api.get(self.context, volume['id'])
self.assertEqual("detaching", volume['status'])
volume_api.roll_detaching(self.context, volume)
volume = volume_api.get(self.context, volume['id'])
self.assertEqual("in-use", volume['status'])
def test_volume_api_update(self):
# create a raw vol
volume = tests_utils.create_volume(self.context, **self.volume_params)
# use volume.api to update name
volume_api = cinder.volume.api.API()
update_dict = {'display_name': 'test update name'}
volume_api.update(self.context, volume, update_dict)
# read changes from db
vol = db.volume_get(context.get_admin_context(), volume['id'])
self.assertEqual('test update name', vol['display_name'])
def test_volume_api_update_maintenance(self):
# create a raw vol
volume = tests_utils.create_volume(self.context, **self.volume_params)
volume['status'] = 'maintenance'
# use volume.api to update name
volume_api = cinder.volume.api.API()
update_dict = {'display_name': 'test update name'}
self.assertRaises(exception.InvalidVolume, volume_api.update,
self.context, volume, update_dict)
def test_volume_api_get_list_volumes_image_metadata(self):
"""Test get_list_volumes_image_metadata in volume API."""
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 'fake1', 'status': 'available',
'host': 'test', 'provider_location': '',
'size': 1,
'volume_type_id': fake.VOLUME_TYPE_ID})
db.volume_glance_metadata_create(ctxt, 'fake1', 'key1', 'value1')
db.volume_glance_metadata_create(ctxt, 'fake1', 'key2', 'value2')
db.volume_create(ctxt, {'id': 'fake2', 'status': 'available',
'host': 'test', 'provider_location': '',
'size': 1,
'volume_type_id': fake.VOLUME_TYPE_ID})
db.volume_glance_metadata_create(ctxt, 'fake2', 'key3', 'value3')
db.volume_glance_metadata_create(ctxt, 'fake2', 'key4', 'value4')
volume_api = cinder.volume.api.API()
results = volume_api.get_list_volumes_image_metadata(ctxt, ['fake1',
'fake2'])
expect_results = {'fake1': {'key1': 'value1', 'key2': 'value2'},
'fake2': {'key3': 'value3', 'key4': 'value4'}}
self.assertEqual(expect_results, results)
@mock.patch.object(QUOTAS, 'limit_check')
@mock.patch.object(QUOTAS, 'reserve')
def test_extend_attached_volume(self, reserve, limit_check):
volume = self._create_volume(self.context, size=2,
status='available', host=CONF.host)
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidVolume,
volume_api._extend,
self.context,
volume, 3, attached=True)
db.volume_update(self.context, volume.id, {'status': 'in-use'})
volume.refresh()
reserve.return_value = ["RESERVATION"]
volume_api._extend(self.context, volume, 3, attached=True)
volume.refresh()
self.assertEqual('extending', volume.status)
self.assertEqual('in-use', volume.previous_status)
reserve.assert_called_once_with(self.context, gigabytes=1,
gigabytes___DEFAULT__=1,
project_id=volume.project_id)
limit_check.side_effect = None
reserve.side_effect = None
db.volume_update(self.context, volume.id, {'status': 'in-use'})
volume_api.scheduler_rpcapi = mock.MagicMock()
volume_api.scheduler_rpcapi.extend_volume = mock.MagicMock()
volume_api._extend(self.context, volume, 3, attached=True)
request_spec = {
'volume_properties': volume,
'volume_type': self.vol_type,
'volume_id': volume.id
}
volume_api.scheduler_rpcapi.extend_volume.assert_called_once_with(
self.context, volume, 3, ["RESERVATION"], request_spec)
# clean up
self.volume.delete_volume(self.context, volume)
@mock.patch.object(QUOTAS, 'limit_check')
@mock.patch.object(QUOTAS, 'reserve')
def test_extend_volume(self, reserve, limit_check):
"""Test volume can be extended at API level."""
# create a volume and assign to host
volume = self._create_volume(self.context, size=2,
status='in-use', host=CONF.host)
volume_api = cinder.volume.api.API()
# Extend fails when status != available
self.assertRaises(exception.InvalidVolume,
volume_api._extend,
self.context,
volume,
3)
db.volume_update(self.context, volume.id, {'status': 'available'})
volume.refresh()
# Extend fails when new_size < orig_size
self.assertRaises(exception.InvalidInput,
volume_api._extend,
self.context,
volume,
1)
# Extend fails when new_size == orig_size
self.assertRaises(exception.InvalidInput,
volume_api._extend,
self.context,
volume,
2)
# works when new_size > orig_size
reserve.return_value = ["RESERVATION"]
volume_api._extend(self.context, volume, 3)
volume.refresh()
self.assertEqual('extending', volume.status)
self.assertEqual('available', volume.previous_status)
reserve.assert_called_once_with(self.context, gigabytes=1,
gigabytes___DEFAULT__=1,
project_id=volume.project_id)
# Test the quota exceeded
db.volume_update(self.context, volume.id, {'status': 'available'})
reserve.side_effect = exception.OverQuota(overs=['gigabytes'],
quotas={'gigabytes': 20},
usages={'gigabytes':
{'reserved': 5,
'in_use': 15}})
self.assertRaises(exception.VolumeSizeExceedsAvailableQuota,
volume_api._extend, self.context,
volume, 3)
db.volume_update(self.context, volume.id, {'status': 'available'})
limit_check.side_effect = exception.OverQuota(
overs=['per_volume_gigabytes'], quotas={'per_volume_gigabytes': 2})
self.assertRaises(exception.VolumeSizeExceedsLimit,
volume_api._extend, self.context,
volume, 3)
# Test scheduler path
limit_check.side_effect = None
reserve.side_effect = None
db.volume_update(self.context, volume.id, {'status': 'available'})
volume_api.scheduler_rpcapi = mock.MagicMock()
volume_api.scheduler_rpcapi.extend_volume = mock.MagicMock()
volume_api._extend(self.context, volume, 3)
request_spec = {
'volume_properties': volume,
'volume_type': self.vol_type,
'volume_id': volume.id
}
volume_api.scheduler_rpcapi.extend_volume.assert_called_once_with(
self.context, volume, 3, ["RESERVATION"], request_spec)
# clean up
self.volume.delete_volume(self.context, volume)
@mock.patch.object(QUOTAS, 'limit_check')
@mock.patch.object(QUOTAS, 'reserve')
def test_extend_volume_with_volume_type_limit(self, reserve, limit_check):
"""Test volume can be extended at API level."""
volume_api = cinder.volume.api.API()
volume = tests_utils.create_volume(
self.context, size=2,
volume_type_id=self.sized_vol_type['id'])
volume_api.scheduler_rpcapi = mock.MagicMock()
volume_api.scheduler_rpcapi.extend_volume = mock.MagicMock()
volume_api._extend(self.context, volume, 3)
self.assertRaises(exception.InvalidInput,
volume_api._extend,
self.context,
volume,
5)
def test_extend_volume_driver_not_initialized(self):
"""Test volume can be extended at API level."""
# create a volume and assign to host
fake_reservations = ['RESERVATION']
volume = tests_utils.create_volume(self.context, size=2,
status='available',
host=CONF.host)
self.volume.create_volume(self.context, volume)
self.volume.driver._initialized = False
self.assertRaises(exception.DriverNotInitialized,
self.volume.extend_volume,
self.context, volume, 3,
fake_reservations)
volume.refresh()
self.assertEqual('error_extending', volume.status)
# lets cleanup the mess.
self.volume.driver._initialized = True
self.volume.delete_volume(self.context, volume)
def _test_extend_volume_manager_fails_with_exception(self, volume):
fake_reservations = ['RESERVATION']
# Test driver exception
with mock.patch.object(
self.volume.driver, 'extend_volume',
side_effect=exception.CinderException('fake exception')):
with mock.patch.object(
self.volume.message_api, 'create') as mock_create:
volume['status'] = 'extending'
self.volume.extend_volume(self.context, volume, '4',
fake_reservations)
volume.refresh()
self.assertEqual(2, volume.size)
self.assertEqual('error_extending', volume.status)
mock_create.assert_called_once_with(
self.context,
message_field.Action.EXTEND_VOLUME,
resource_uuid=volume.id,
detail=message_field.Detail.DRIVER_FAILED_EXTEND)
@mock.patch('cinder.compute.API')
def _test_extend_volume_manager_successful(self, volume, nova_api):
"""Test volume can be extended at the manager level."""
def fake_extend(volume, new_size):
volume['size'] = new_size
nova_extend_volume = nova_api.return_value.extend_volume
fake_reservations = ['RESERVATION']
orig_status = volume.status
# Test driver success
with mock.patch.object(self.volume.driver,
'extend_volume') as extend_volume:
with mock.patch.object(QUOTAS, 'commit') as quotas_commit:
extend_volume.return_value = fake_extend
volume.status = 'extending'
self.volume.extend_volume(self.context, volume, '4',
fake_reservations)
volume.refresh()
self.assertEqual(4, volume.size)
self.assertEqual(orig_status, volume.status)
quotas_commit.assert_called_with(
self.context,
['RESERVATION'],
project_id=volume.project_id)
if orig_status == 'in-use':
instance_uuids = [
attachment.instance_uuid
for attachment in volume.volume_attachment]
nova_extend_volume.assert_called_with(
self.context, instance_uuids, volume.id)
def test_extend_volume_manager_available_fails_with_exception(self):
volume = tests_utils.create_volume(self.context, size=2,
status='creating', host=CONF.host)
self.volume.create_volume(self.context, volume)
self._test_extend_volume_manager_fails_with_exception(volume)
self.volume.delete_volume(self.context, volume)
def test_extend_volume_manager_available_successful(self):
volume = tests_utils.create_volume(self.context, size=2,
status='creating', host=CONF.host)
self.volume.create_volume(self.context, volume)
self._test_extend_volume_manager_successful(volume)
self.volume.delete_volume(self.context, volume)
def test_extend_volume_manager_in_use_fails_with_exception(self):
volume = tests_utils.create_volume(self.context, size=2,
status='creating', host=CONF.host)
self.volume.create_volume(self.context, volume)
instance_uuid = '12345678-1234-5678-1234-567812345678'
attachment = db.volume_attach(self.context,
{'volume_id': volume.id,
'attached_host': 'fake-host'})
db.volume_attached(self.context, attachment.id, instance_uuid,
'fake-host', 'vdb')
volume.refresh()
self._test_extend_volume_manager_fails_with_exception(volume)
self.volume.detach_volume(self.context, volume.id, attachment.id)
self.volume.delete_volume(self.context, volume)
def test_extend_volume_manager_in_use_successful(self):
volume = tests_utils.create_volume(self.context, size=2,
status='creating', host=CONF.host)
self.volume.create_volume(self.context, volume)
instance_uuid = '12345678-1234-5678-1234-567812345678'
attachment = db.volume_attach(self.context,
{'volume_id': volume.id,
'attached_host': 'fake-host'})
db.volume_attached(self.context, attachment.id, instance_uuid,
'fake-host', 'vdb')
volume.refresh()
self._test_extend_volume_manager_successful(volume)
self.volume.detach_volume(self.context, volume.id, attachment.id)
self.volume.delete_volume(self.context, volume)
@mock.patch('cinder.volume.rpcapi.VolumeAPI.extend_volume')
def test_extend_volume_with_volume_type(self, mock_rpc_extend):
elevated = context.get_admin_context()
project_id = self.context.project_id
db.volume_type_create(elevated, {'name': 'type', 'extra_specs': {}})
vol_type = db.volume_type_get_by_name(elevated, 'type')
volume_api = cinder.volume.api.API()
volume = volume_api.create(self.context, 100, 'name', 'description',
volume_type=vol_type)
try:
usage = db.quota_usage_get(elevated, project_id, 'gigabytes_type')
volumes_in_use = usage.in_use
except exception.QuotaUsageNotFound:
volumes_in_use = 0
self.assertEqual(100, volumes_in_use)
db.volume_update(self.context, volume.id, {'status': 'available'})
volume_api._extend(self.context, volume, 200)
mock_rpc_extend.called_once_with(self.context, volume, 200, mock.ANY)
try:
usage = db.quota_usage_get(elevated, project_id, 'gigabytes_type')
volumes_reserved = usage.reserved
except exception.QuotaUsageNotFound:
volumes_reserved = 0
self.assertEqual(100, volumes_reserved)
def test_create_volume_from_sourcevol(self):
"""Test volume can be created from a source volume."""
def fake_create_cloned_volume(volume, src_vref):
pass
self.mock_object(self.volume.driver, 'create_cloned_volume',
fake_create_cloned_volume)
volume_src = tests_utils.create_volume(self.context,
**self.volume_params)
self.volume.create_volume(self.context, volume_src)
volume_dst = tests_utils.create_volume(self.context,
source_volid=volume_src['id'],
**self.volume_params)
self.volume.create_volume(self.context, volume_dst)
volume_dst.refresh()
self.assertEqual('available', volume_dst.status)
self.volume.delete_volume(self.context, volume_dst)
self.volume.delete_volume(self.context, volume_src)
def test_create_volume_from_sourcevol_fail_bad_size(self):
"""Test cannot clone volume with bad volume size."""
volume_src = tests_utils.create_volume(self.context,
size=3,
status='available',
host=CONF.host)
self.assertRaises(exception.InvalidInput,
self.volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
source_volume=volume_src)
@mock.patch('cinder.volume.api.API.list_availability_zones',
return_value=({'name': 'nova', 'available': True},
{'name': 'az2', 'available': True}))
def test_create_volume_from_sourcevol_fail_wrong_az(self, _mock_laz):
"""Test volume can't be cloned from an other volume in different az."""
volume_api = cinder.volume.api.API()
volume_src = self._create_volume(self.context,
availability_zone='az2',
**self.volume_params)
self.volume.create_volume(self.context, volume_src)
volume_src = db.volume_get(self.context, volume_src['id'])
volume_dst = volume_api.create(self.context,
size=1,
name='fake_name',
description='fake_desc',
source_volume=volume_src,
volume_type=
objects.VolumeType.get_by_name_or_id(
self.context,
self.vol_type['id']))
self.assertEqual('az2', volume_dst['availability_zone'])
self.assertRaises(exception.InvalidInput,
volume_api.create,
self.context,
size=1,
name='fake_name',
description='fake_desc',
source_volume=volume_src,
availability_zone='nova')
@mock.patch('cinder.image.image_utils.qemu_img_info')
def test_create_volume_from_sourcevol_with_glance_metadata(
self, mock_qemu_info):
"""Test glance metadata can be correctly copied to new volume."""
def fake_create_cloned_volume(volume, src_vref):
pass
self.mock_object(self.volume.driver, 'create_cloned_volume',
fake_create_cloned_volume)
image_info = imageutils.QemuImgInfo()
image_info.virtual_size = '1073741824'
mock_qemu_info.return_value = image_info
volume_src = self._create_volume_from_image()
self.volume.create_volume(self.context, volume_src)
volume_dst = tests_utils.create_volume(self.context,
source_volid=volume_src['id'],
**self.volume_params)
self.volume.create_volume(self.context, volume_dst)
self.assertEqual('available',
db.volume_get(context.get_admin_context(),
volume_dst['id']).status)
# TODO: review all tests in this file to make sure they are
# using the defined db.api to access stuff rather than taking
# shortcuts like the following (see LP Bug #1860817):
# src_glancemeta = db.volume_get(context.get_admin_context(),
# volume_src['id']).volume_glance_metadata
src_glancemeta = db.volume_glance_metadata_get(
context.get_admin_context(), volume_src['id'])
dst_glancemeta = db.volume_glance_metadata_get(
context.get_admin_context(), volume_dst['id'])
for meta_src in src_glancemeta:
for meta_dst in dst_glancemeta:
if meta_dst.key == meta_src.key:
self.assertEqual(meta_src.value, meta_dst.value)
self.volume.delete_volume(self.context, volume_src)
self.volume.delete_volume(self.context, volume_dst)
def test_create_volume_from_sourcevol_failed_clone(self):
"""Test src vol status will be restore by error handling code."""
def fake_error_create_cloned_volume(volume, src_vref):
db.volume_update(self.context, src_vref['id'], {'status': 'error'})
raise exception.CinderException('fake exception')
self.mock_object(self.volume.driver, 'create_cloned_volume',
fake_error_create_cloned_volume)
volume_src = tests_utils.create_volume(self.context,
**self.volume_params)
self.assertEqual('creating', volume_src.status)
self.volume.create_volume(self.context, volume_src)
self.assertEqual('available', volume_src.status)
volume_dst = tests_utils.create_volume(self.context,
source_volid=volume_src['id'],
**self.volume_params)
self.assertEqual('creating', volume_dst.status)
self.assertRaises(exception.CinderException,
self.volume.create_volume,
self.context,
volume_dst)
# Source volume's status is still available and dst is set to error
self.assertEqual('available', volume_src.status)
self.assertEqual('error', volume_dst.status)
self.volume.delete_volume(self.context, volume_dst)
self.volume.delete_volume(self.context, volume_src)
def test_clean_temporary_volume(self):
def fake_delete_volume(ctxt, volume):
volume.destroy()
fake_volume = tests_utils.create_volume(self.context, size=1,
host=CONF.host,
migration_status='migrating')
fake_new_volume = tests_utils.create_volume(self.context, size=1,
host=CONF.host)
# 1. Only clean the db
self.volume._clean_temporary_volume(self.context, fake_volume,
fake_new_volume,
clean_db_only=True)
self.assertRaises(exception.VolumeNotFound,
db.volume_get, self.context,
fake_new_volume.id)
# 2. Delete the backend storage
fake_new_volume = tests_utils.create_volume(self.context, size=1,
host=CONF.host)
with mock.patch.object(volume_rpcapi.VolumeAPI, 'delete_volume') as \
mock_delete_volume:
mock_delete_volume.side_effect = fake_delete_volume
self.volume._clean_temporary_volume(self.context,
fake_volume,
fake_new_volume,
clean_db_only=False)
self.assertRaises(exception.VolumeNotFound,
db.volume_get, self.context,
fake_new_volume.id)
# Check when the migrated volume is not in migration
fake_new_volume = tests_utils.create_volume(self.context, size=1,
host=CONF.host)
fake_volume.migration_status = 'non-migrating'
fake_volume.save()
self.volume._clean_temporary_volume(self.context, fake_volume,
fake_new_volume)
volume = db.volume_get(context.get_admin_context(),
fake_new_volume.id)
self.assertIsNone(volume.migration_status)
def test_check_volume_filters_true(self):
"""Test bootable as filter for true"""
volume_api = cinder.volume.api.API()
filters = {'bootable': 'TRUE'}
# To convert filter value to True or False
volume_api.check_volume_filters(filters)
# Confirming converted filter value against True
self.assertTrue(filters['bootable'])
def test_check_volume_filters_false(self):
"""Test bootable as filter for false"""
volume_api = cinder.volume.api.API()
filters = {'bootable': 'false'}
# To convert filter value to True or False
volume_api.check_volume_filters(filters)
# Confirming converted filter value against False
self.assertEqual(False, filters['bootable'])
def test_check_volume_filters_invalid(self):
"""Test bootable as filter"""
volume_api = cinder.volume.api.API()
filters = {'bootable': 'invalid'}
# To convert filter value to True or False
volume_api.check_volume_filters(filters)
# Confirming converted filter value against invalid value
self.assertTrue(filters['bootable'])
def test_update_volume_readonly_flag(self):
"""Test volume readonly flag can be updated at API level."""
# create a volume and assign to host
volume = tests_utils.create_volume(self.context,
admin_metadata={'readonly': 'True'},
**self.volume_params)
self.volume.create_volume(self.context, volume)
volume.status = 'in-use'
def sort_func(obj):
return obj['name']
volume_api = cinder.volume.api.API()
# Update fails when status != available
self.assertRaises(exception.InvalidVolume,
volume_api.update_readonly_flag,
self.context,
volume,
False)
volume.status = 'available'
# works when volume in 'available' status
volume_api.update_readonly_flag(self.context, volume, False)
volume.refresh()
self.assertEqual('available', volume.status)
admin_metadata = volume.volume_admin_metadata
self.assertEqual(1, len(admin_metadata))
self.assertEqual('readonly', admin_metadata[0]['key'])
self.assertEqual('False', admin_metadata[0]['value'])
# clean up
self.volume.delete_volume(self.context, volume)
def test_secure_file_operations_enabled(self):
"""Test secure file operations setting for base driver.
General, non network file system based drivers do not have
anything to do with "secure_file_operations". This test verifies that
calling the method always returns False.
"""
ret_flag = self.volume.driver.secure_file_operations_enabled()
self.assertFalse(ret_flag)
@mock.patch.object(driver.BaseVD, 'secure_file_operations_enabled')
def test_secure_file_operations_enabled_2(self, mock_secure):
mock_secure.return_value = True
vol = tests_utils.create_volume(self.context)
result = self.volume.secure_file_operations_enabled(self.context,
vol)
mock_secure.assert_called_once_with()
self.assertTrue(result)
@mock.patch('cinder.volume.flows.common.make_pretty_name',
new=mock.MagicMock())
@mock.patch('cinder.scheduler.rpcapi.SchedulerAPI.create_volume',
return_value=None)
@mock.patch('cinder.volume.flows.manager.create_volume.'
'CreateVolumeFromSpecTask.execute',
side_effect=exception.DriverNotInitialized())
def test_create_volume_raise_rescheduled_exception(self, mock_execute,
mock_reschedule):
# Create source volume
test_vol = tests_utils.create_volume(self.context,
**self.volume_params)
test_vol_id = test_vol['id']
self.assertRaises(exception.DriverNotInitialized,
self.volume.create_volume,
self.context, test_vol,
{'volume_properties': self.volume_params},
{'retry': {'num_attempts': 1, 'host': []}})
self.assertTrue(mock_reschedule.called)
volume = db.volume_get(context.get_admin_context(), test_vol_id)
self.assertEqual('creating', volume['status'])
# We increase the stats on entering the create method, but we must
# have cleared them on reschedule.
self.assertEqual({'_pool0': {'allocated_capacity_gb': 0}},
self.volume.stats['pools'])
@mock.patch('cinder.volume.flows.manager.create_volume.'
'CreateVolumeFromSpecTask.execute')
def test_create_volume_raise_unrescheduled_exception(self, mock_execute):
# create source volume
test_vol = tests_utils.create_volume(self.context,
**self.volume_params)
test_vol_id = test_vol['id']
mock_execute.side_effect = exception.VolumeNotFound(
volume_id=test_vol_id)
self.assertRaises(exception.VolumeNotFound,
self.volume.create_volume,
self.context, test_vol,
{'volume_properties': self.volume_params,
'source_volid': fake.VOLUME_ID},
{'retry': {'num_attempts': 1, 'host': []}})
volume = db.volume_get(context.get_admin_context(), test_vol_id)
self.assertEqual('error', volume['status'])
self.assertEqual({'_pool0': {'allocated_capacity_gb': 1}},
self.volume.stats['pools'])
@mock.patch('cinder.utils.api_clean_volume_file_locks')
def test_cascade_delete_volume_with_snapshots(self, mock_api_clean):
"""Test volume deletion with dependent snapshots."""
volume = tests_utils.create_volume(self.context, **self.volume_params)
self.volume.create_volume(self.context, volume)
snapshot = create_snapshot(volume['id'], size=volume['size'])
self.volume.create_snapshot(self.context, snapshot)
self.assertEqual(
snapshot.id, objects.Snapshot.get_by_id(self.context,
snapshot.id).id)
volume['status'] = 'available'
volume['host'] = 'fakehost'
volume_api = cinder.volume.api.API()
volume_api.delete(self.context,
volume,
cascade=True)
mock_api_clean.assert_called_once_with(volume.id)
@mock.patch('cinder.utils.api_clean_volume_file_locks')
def test_cascade_delete_volume_with_snapshots_error(self, mock_api_clean):
"""Test volume deletion with dependent snapshots."""
volume = tests_utils.create_volume(self.context, **self.volume_params)
self.volume.create_volume(self.context, volume)
snapshot = create_snapshot(volume['id'], size=volume['size'])
self.volume.create_snapshot(self.context, snapshot)
self.assertEqual(
snapshot.id, objects.Snapshot.get_by_id(self.context,
snapshot.id).id)
snapshot.update({'status': fields.SnapshotStatus.CREATING})
snapshot.save()
volume['status'] = 'available'
volume['host'] = 'fakehost'
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidVolume,
volume_api.delete,
self.context,
volume,
cascade=True)
mock_api_clean.assert_not_called()
@mock.patch('cinder.utils.api_clean_volume_file_locks')
def test_cascade_force_delete_volume_with_snapshots_error(self,
mock_api_clean):
"""Test volume force deletion with errored dependent snapshots."""
volume = tests_utils.create_volume(self.context,
host='fakehost')
snapshot = create_snapshot(volume.id,
size=volume.size,
status=fields.SnapshotStatus.ERROR_DELETING)
self.volume.create_snapshot(self.context, snapshot)
volume_api = cinder.volume.api.API()
volume_api.delete(self.context, volume, cascade=True, force=True)
snapshot = objects.Snapshot.get_by_id(self.context, snapshot.id)
self.assertEqual('deleting', snapshot.status)
volume = objects.Volume.get_by_id(self.context, volume.id)
self.assertEqual('deleting', volume.status)
mock_api_clean.assert_called_once_with(volume.id)
def test_cascade_delete_volume_with_snapshots_in_other_project(self):
"""Test volume deletion with dependent snapshots in other project."""
volume = tests_utils.create_volume(self.user_context,
**self.volume_params)
snapshot = create_snapshot(volume['id'], size=volume['size'],
project_id=fake.PROJECT2_ID)
self.volume.create_snapshot(self.context, snapshot)
self.assertEqual(
snapshot.id, objects.Snapshot.get_by_id(self.context,
snapshot.id).id)
volume['status'] = 'available'
volume['host'] = 'fakehost'
volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidVolume,
volume_api.delete,
self.user_context,
volume,
cascade=True)
@mock.patch.object(driver.BaseVD, 'get_backup_device')
@mock.patch.object(driver.BaseVD, 'secure_file_operations_enabled')
def test_get_backup_device(self, mock_secure, mock_get_backup):
vol = tests_utils.create_volume(self.context)
backup = tests_utils.create_backup(self.context, vol['id'])
mock_secure.return_value = False
mock_get_backup.return_value = (vol, False)
result = self.volume.get_backup_device(self.context,
backup)
mock_get_backup.assert_called_once_with(self.context, backup)
mock_secure.assert_called_once_with()
expected_result = {'backup_device': vol, 'secure_enabled': False,
'is_snapshot': False}
self.assertEqual(expected_result, result)
@mock.patch.object(driver.BaseVD, 'get_backup_device')
@mock.patch.object(driver.BaseVD, 'secure_file_operations_enabled')
def test_get_backup_device_want_objects(self, mock_secure,
mock_get_backup):
vol = tests_utils.create_volume(self.context)
backup = tests_utils.create_backup(self.context, vol['id'])
mock_secure.return_value = False
mock_get_backup.return_value = (vol, False)
result = self.volume.get_backup_device(self.context,
backup, want_objects=True)
mock_get_backup.assert_called_once_with(self.context, backup)
mock_secure.assert_called_once_with()
expected_result = objects.BackupDeviceInfo.from_primitive(
{'backup_device': vol, 'secure_enabled': False,
'is_snapshot': False},
self.context)
self.assertEqual(expected_result, result)
@mock.patch('cinder.tests.fake_driver.FakeLoggingVolumeDriver.'
'SUPPORTS_ACTIVE_ACTIVE', True)
def test_set_resource_host_different(self):
manager = vol_manager.VolumeManager(host='localhost-1@ceph',
cluster='mycluster@ceph')
volume = tests_utils.create_volume(self.user_context,
host='localhost-2@ceph#ceph',
cluster_name='mycluster@ceph')
manager._set_resource_host(volume)
volume.refresh()
self.assertEqual('localhost-1@ceph#ceph', volume.host)
@mock.patch('cinder.tests.fake_driver.FakeLoggingVolumeDriver.'
'SUPPORTS_ACTIVE_ACTIVE', True)
def test_set_resource_host_equal(self):
manager = vol_manager.VolumeManager(host='localhost-1@ceph',
cluster='mycluster@ceph')
volume = tests_utils.create_volume(self.user_context,
host='localhost-1@ceph#ceph',
cluster_name='mycluster@ceph')
with mock.patch.object(volume, 'save') as save_mock:
manager._set_resource_host(volume)
save_mock.assert_not_called()
def test_volume_attach_attaching(self):
"""Test volume_attach."""
instance_uuid = '12345678-1234-5678-1234-567812345678'
volume = tests_utils.create_volume(self.context, **self.volume_params)
attachment = db.volume_attach(self.context,
{'volume_id': volume['id'],
'attached_host': 'fake-host'})
db.volume_attached(self.context, attachment['id'], instance_uuid,
'fake-host', 'vdb', mark_attached=False)
volume_api = cinder.volume.api.API()
volume = volume_api.get(self.context, volume['id'])
self.assertEqual("attaching", volume['status'])
self.assertEqual("attaching", volume['attach_status'])
def test__append_volume_stats_with_pools(self):
manager = vol_manager.VolumeManager()
manager.stats = {'pools': {'pool1': {'allocated_capacity_gb': 20},
'pool2': {'allocated_capacity_gb': 10}}}
vol_stats = {'vendor_name': 'Open Source', 'pools': [
{'pool_name': 'pool1', 'provisioned_capacity_gb': 31},
{'pool_name': 'pool2', 'provisioned_capacity_gb': 21}]}
manager._append_volume_stats(vol_stats)
expected = {'vendor_name': 'Open Source', 'pools': [
{'pool_name': 'pool1', 'provisioned_capacity_gb': 31,
'allocated_capacity_gb': 20},
{'pool_name': 'pool2', 'provisioned_capacity_gb': 21,
'allocated_capacity_gb': 10}]}
self.assertDictEqual(expected, vol_stats)
def test__append_volume_stats_no_pools(self):
manager = vol_manager.VolumeManager()
manager.stats = {'pools': {'backend': {'allocated_capacity_gb': 20}}}
vol_stats = {'provisioned_capacity_gb': 30}
manager._append_volume_stats(vol_stats)
expected = {'provisioned_capacity_gb': 30, 'allocated_capacity_gb': 20}
self.assertDictEqual(expected, vol_stats)
def test__append_volume_stats_no_pools_no_volumes(self):
manager = vol_manager.VolumeManager()
# This is what gets set on c-vol manager's init_host method
manager.stats = {'pools': {}, 'allocated_capacity_gb': 0}
vol_stats = {'provisioned_capacity_gb': 30}
manager._append_volume_stats(vol_stats)
expected = {'provisioned_capacity_gb': 30, 'allocated_capacity_gb': 0}
self.assertDictEqual(expected, vol_stats)
def test__append_volume_stats_driver_error(self):
manager = vol_manager.VolumeManager()
self.assertRaises(exception.ProgrammingError,
manager._append_volume_stats, {'pools': 'bad_data'})
def test_default_tpool_size(self):
self.skipTest("Bug 1811663")
"""Test we can set custom tpool size."""
eventlet.tpool._nthreads = 10
self.assertListEqual([], eventlet.tpool._threads)
vol_manager.VolumeManager()
self.assertEqual(20, eventlet.tpool._nthreads)
self.assertListEqual([], eventlet.tpool._threads)
def test_tpool_size(self):
self.skipTest("Bug 1811663")
"""Test we can set custom tpool size."""
self.assertNotEqual(100, eventlet.tpool._nthreads)
self.assertListEqual([], eventlet.tpool._threads)
self.override_config('backend_native_threads_pool_size', 100,
group='backend_defaults')
vol_manager.VolumeManager()
self.assertEqual(100, eventlet.tpool._nthreads)
self.assertListEqual([], eventlet.tpool._threads)
eventlet.tpool._nthreads = 20
class VolumeTestCaseLocks(base.BaseVolumeTestCase):
MOCK_TOOZ = False
def test_create_volume_from_volume_delete_lock_taken(self):
# create source volume
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
src_vol_id = src_vol['id']
# no lock
self.volume.create_volume(self.context, src_vol)
dst_vol = tests_utils.create_volume(self.context,
source_volid=src_vol_id,
**self.volume_params)
orig_elevated = self.context.elevated
gthreads = []
def mock_elevated(*args, **kwargs):
# unset mock so it is only called once
self.mock_object(self.context, 'elevated', orig_elevated)
# we expect this to block and then fail
t = eventlet.spawn(self.volume.create_volume,
self.context,
volume=dst_vol,
request_spec={'source_volid': src_vol_id})
gthreads.append(t)
return orig_elevated(*args, **kwargs)
# mock something from early on in the delete operation and within the
# lock so that when we do the create we expect it to block.
self.mock_object(self.context, 'elevated', mock_elevated)
# locked
self.volume.delete_volume(self.context, src_vol)
# we expect the volume create to fail with the following err since the
# source volume was deleted while the create was locked. Note that the
# volume is still in the db since it was created by the test prior to
# calling manager.create_volume.
with mock.patch('sys.stderr', new=io.StringIO()):
self.assertRaises(exception.VolumeNotFound, gthreads[0].wait)
def test_create_volume_from_snapshot_delete_lock_taken(self):
# create source volume
src_vol = tests_utils.create_volume(self.context, **self.volume_params)
# no lock
self.volume.create_volume(self.context, src_vol)
# create snapshot
snap_id = create_snapshot(src_vol.id,
size=src_vol['size'])['id']
snapshot_obj = objects.Snapshot.get_by_id(self.context, snap_id)
# no lock
self.volume.create_snapshot(self.context, snapshot_obj)
# create vol from snapshot...
dst_vol = tests_utils.create_volume(self.context,
snapshot_id=snap_id,
source_volid=src_vol.id,
**self.volume_params)
orig_elevated = self.context.elevated
gthreads = []
def mock_elevated(*args, **kwargs):
# unset mock so it is only called once
self.mock_object(self.context, 'elevated', orig_elevated)
# We expect this to block and then fail
t = eventlet.spawn(self.volume.create_volume, self.context,
volume=dst_vol,
request_spec={'snapshot_id': snap_id})
gthreads.append(t)
return orig_elevated(*args, **kwargs)
# mock something from early on in the delete operation and within the
# lock so that when we do the create we expect it to block.
self.mock_object(self.context, 'elevated', mock_elevated)
# locked
self.volume.delete_snapshot(self.context, snapshot_obj)
# we expect the volume create to fail with the following err since the
# snapshot was deleted while the create was locked. Note that the
# volume is still in the db since it was created by the test prior to
# calling manager.create_volume.
with mock.patch('sys.stderr', new=io.StringIO()):
self.assertRaises(exception.SnapshotNotFound, gthreads[0].wait)
# locked
self.volume.delete_volume(self.context, src_vol)
# make sure it is gone
self.assertRaises(exception.VolumeNotFound, db.volume_get,
self.context, src_vol.id)
|
from __future__ import annotations
import typing
import attr
import enum
class TlFileLineType(enum.Enum):
'''
describes the two types of lines, either
a comment or a line defining a type or function
'''
COMMENT = "comment"
DEFINITION = "definition"
class TlClassTypeEnum(enum.Enum):
''' describes the `types` that the TL file has
some are listed explicitly, so they are concrete, and others are implied
by what the concrete types extend
'''
CONCRETE = "concrete"
ABSTRACT = "abstract"
class TlFileSectionType(enum.Enum):
''' describes the sections of the TL file
'''
TYPES = "types"
FUNCTIONS = "functions"
@attr.s(auto_attribs=True, frozen=True)
class TlParameter:
'''
describes a parameter to either a type or function
'''
param_name:str = attr.ib()
param_type:str = attr.ib()
# as of right now, only TlRootObject makes use of these
required:bool = attr.ib(default=True)
default_value:typing.Any = attr.ib(default=None)
@attr.s(auto_attribs=True, frozen=True)
class TlTypeDefinition:
'''
describes a description of a type defined in the TL file
'''
class_name:str = attr.ib()
parameters:typing.Sequence[TlParameter] = attr.ib()
extends_from:typing.Optional[str] = attr.ib()
source_line:str = attr.ib()
source_line_number:int = attr.ib()
class_type:TlClassTypeEnum = attr.ib()
comments:typing.Sequence[TlComment] = attr.ib()
@attr.s(auto_attribs=True, frozen=True)
class TlFunctionDefinition:
'''
describes a description of a function defined in the TL file
'''
function_name:str = attr.ib()
parameters:typing.Sequence[TlParameter] = attr.ib()
return_type:str = attr.ib()
source_line:str = attr.ib()
source_line_number:int = attr.ib()
comments:typing.sequence[TlComment] = attr.ib()
@attr.s(auto_attribs=True, frozen=True)
class TlComment:
'''
describes a comment in the TL file
'''
comment_text:str = attr.ib()
source_line_number:str = attr.ib()
@attr.s(auto_attribs=True, frozen=True)
class TlFileDefinition:
'''
describes the definition of the entire TL file after parsing
'''
types:typing.Sequence[TlTypeDefinition] = attr.ib()
functions:typing.Sequence[TlFunctionDefinition] = attr.ib()
|
import logging
import requests
import boto3
from random import random
from django.core.management.base import BaseCommand
from django.db.models import Q
from botocore.exceptions import ClientError
from user.models import User
from django.conf import settings
from PIL import Image
from io import BytesIO
SMALL_IMAGE_SIZE = 64, 64
AWS_REGION = 'us-east-1'
client = boto3.client('s3', region_name=AWS_REGION)
logger = logging.getLogger('django')
class Command(BaseCommand):
help = 'Script to resize all large profile images and reupload to S3.'
def handle(self, *args, **options):
try:
users = User.objects.all().exclude(large_profile_image_url='').filter(
Q(small_profile_image_url__isnull=True) |
Q(small_profile_image_url__exact='')
)
for user in users:
response = requests.get(user.large_profile_image_url)
if response.status_code == 200:
image = Image.open(BytesIO(response.content))
image.thumbnail(SMALL_IMAGE_SIZE)
resized_image_content = BytesIO()
image.save(resized_image_content, format='PNG')
nonce = int(random() * 1000)
bucket = 'assets.bounties.network'
key = '{}/userimages/{}-{}.jpg'.format(settings.ENVIRONMENT, user.public_address, nonce)
try:
client.put_object(
Body=resized_image_content.getvalue(),
ContentType=response.headers['content-type'],
CacheControl='max-age=31536000',
Bucket=bucket,
ACL='public-read',
Key=key)
user.small_profile_image_url = 'https://{}/{}'.format(bucket, key)
user.save()
logger.info('uploaded resize profile image for: {}'.format(user.public_address))
except ClientError as e:
logger.error(e.response['Error']['Message'])
except Exception as e:
# goes to rollbar
logger.exception(e)
raise e
|
#! /usr/bin/env python
class DataStore():
"""An agnostic data storage unit that holds and queries all data points, by all indices.
"""
def __init__(self,Training_Set_Lines,Test_Set_Lines,Patient_Status_Lines):
"""Assumes no headers are included. Assumes column 0 is the gene names for both training and testing data.
Each data value is stored in a "record" that is a dictionary who's keys include all the indices I could think of for this data set. That is:
RECORDS = {'UniqueProbeID':UniqueProbeID,'Patient_ID':Patient_ID,'Status':Status,'DataSet':DataSet,'Value':Value.strip()}
Each Index Type E.G. Patient ID is stored in a dictionary with an obvious name E.G. self.PatientDict, these dictionaries values are lists of RECORDS where each list corresponds to all the RECORDS that are relevant to a given index. That is if I want all RECORDS relating to patient 23 I look in self.PatientDict['23'], if I want all RECORDS relating to probe 1013 I look in self.ProbeDict['1013']. If I want all records of the training class I look in self.TrainvsTestDict['TRAIN'].
"""
Training_Set_Lines.sort()# Since there are the same number of lines in both files I presume that this ordering is safe for purposes of assigning the same unique probe id to both sets. Okay, _FINE_ I'll check... yup, they're the same.
Test_Set_Lines.sort()# And now they're in the same order.
self.PatientDict = {}
self.ProbeDict = {}
self.StatusDict = {}
self.TrainvsTestDict = {}
self.ProbeNametoNumberDict = {}
self.ProbeNumbertoNameDict = {}
TempStatus = {}
"""This mapping is used to take patient --> status data from a file and populate RECORDS with the correct mapping data."""
for line in Patient_Status_Lines:
sline = line.split()
TempStatus[sline[0].strip()] = sline[1].strip()
for UniqueProbeID, line in enumerate(Training_Set_Lines):
"""This loop consumes data line by line from the training_set.tab derived line list, parses the lines and populates the various dictionaries in the class."""
DataSet = 'TRAIN'
UniqueProbeID = str(UniqueProbeID + 1)
line = line.strip()
sline = line.split('\t')
ProbeName = sline[0].strip()
self.ProbeNametoNumberDict[ProbeName] = UniqueProbeID
self.ProbeNumbertoNameDict[UniqueProbeID] = ProbeName
datavals = sline[1:]
for Index,Value in enumerate(datavals):
Patient_ID = str(Index + 1)
Status = TempStatus[Patient_ID]
temp_dict = {'UniqueProbeID':UniqueProbeID,'Patient_ID':Patient_ID,'Status':Status,'DataSet':DataSet,'Value':Value.strip()}
if self.PatientDict.has_key(Patient_ID):
self.PatientDict[Patient_ID].append(temp_dict)
else:
self.PatientDict[Patient_ID] = [temp_dict]
if self.ProbeDict.has_key(UniqueProbeID):
self.ProbeDict[UniqueProbeID].append(temp_dict)
else:
self.ProbeDict[UniqueProbeID] = [temp_dict]
if self.StatusDict.has_key(Status):
self.StatusDict[Status].append(temp_dict)
else:
self.StatusDict[Status] = [temp_dict]
if self.TrainvsTestDict.has_key(DataSet):
self.TrainvsTestDict[DataSet].append(temp_dict)
else:
self.TrainvsTestDict[DataSet] = [temp_dict]
for UniqueProbeID, line in enumerate(Test_Set_Lines):
"""Like the above for loop but for the testing_set.tab file. This loop must come after the above loop."""
DataSet = 'TEST'
UniqueProbeID = str(UniqueProbeID + 1)
line = line.strip()
sline = line.split('\t')
ProbeName = sline[0].strip()
datavals = sline[1:]
for Index,Value in enumerate(datavals):
Patient_ID = str(Index + 78)
Status = TempStatus[Patient_ID]
temp_dict = {'UniqueProbeID':UniqueProbeID,'Patient_ID':Patient_ID,'Status':Status,'DataSet':DataSet,'Value':Value.strip()}
if self.PatientDict.has_key(Patient_ID):
self.PatientDict[Patient_ID].append(temp_dict)
else:
self.PatientDict[Patient_ID] = [temp_dict]
if self.ProbeDict.has_key(UniqueProbeID):
self.ProbeDict[UniqueProbeID].append(temp_dict)
else:
import sys
print "UH-OH BIG TROUBLE."
print "UniqueProbeID: %s"%UniqueProbeID
sys.exit(89)
self.ProbeDict[UniqueProbeID] = [temp_dict]
if self.StatusDict.has_key(Status):
self.StatusDict[Status].append(temp_dict)
else:
self.StatusDict[Status] = [temp_dict]
if self.TrainvsTestDict.has_key(DataSet):
self.TrainvsTestDict[DataSet].append(temp_dict)
else:
self.TrainvsTestDict[DataSet] = [temp_dict]
def OutputSVMLightFormat(self,ProbeNames=None,DataSet='TRAIN'):
"""This method takes advantage of the "DataSet" index to distinguish which values should be incorporated in the SVMLight formatted output. It takes an optional second argument which should be a dictionary of probe names. Only records whose "UniqueProbeID" is in the ProbeName Dict will be included in the output."""
OutList = []
if ProbeNames == None:
for Patient,DictList in self.PatientDict.iteritems():
if DictList[0]['DataSet'] != DataSet:
"""Skip irrelevant dataset(s)"""
continue
if DictList[0]['Status'] == '0':
OutLine = '-1 '
elif DictList[0]['Status'] == '1':
OutLine = '1 '
for Dict in DictList:
if 'NaN' not in Dict['Value']:
OutLine = OutLine + str(Dict['UniqueProbeID'])+':'+str(Dict['Value'])+' '
OutList.append(OutLine)
else:
for Patient,DictList in self.PatientDict.iteritems():
if DictList[0]['DataSet'] != DataSet:
"""Skip irrelevant dataset(s)"""
continue
if DictList[0]['Status'] == '0':
OutLine = '-1 '
elif DictList[0]['Status'] == '1':
OutLine = '1 '
for Dict in DictList:
if 'NaN' not in Dict['Value']:
if self.ProbeNumbertoNameDict[Dict['UniqueProbeID']] in ProbeNames:
OutLine = OutLine + str(Dict['UniqueProbeID'])+':'+str(Dict['Value'])+' '
OutList.append(OutLine)
return OutList
def SnarfAppendSVMAlphas(self,AlphaList):
AlphaMap = dict([kvtup for kvtup in AlphaList])
for Probe, RecordList in self.ProbeDict.iteritems():
for Record in RecordList:
if AlphaMap.has_key(Record['Patient_ID']):
Record['Alpha'] = AlphaMap[Record['Patient_ID']]
def CalculateProbeInformativeness(self,Probes=None):
if Probes == None:
Probes = self.ProbeDict.keys()
InformativenessList = []
for Probe in Probes:
DeltaTotal = 0.0
Numerator = 0.0
RecordList = self.ProbeDict[Probe]
for Record in RecordList:
if Record['DataSet'] == 'TRAIN':
if 'NaN' in Record['Value'] or not Record.has_key('Alpha'):
continue
DeltaTotal+=1.0
if Record['Status'] == '0':
NumeratorTerm = Record['Alpha'] * float(Record['Value']) * -1.0
elif Record['Status'] == '1':
NumeratorTerm = Record['Alpha'] * float(Record['Value'])
Numerator = Numerator + NumeratorTerm
Informativeness = (Numerator / DeltaTotal)
InformativenessList.append((self.ProbeNumbertoNameDict[Probe],Informativeness))
InformativenessList.sort(key=lambda x:x[1])
return InformativenessList
def main():
"""This main function (called below which the script is launched from the command line) generates the required SVMlight formatted files. This script expects to be passed command line options something like this:
SimpleDb.py --train=training_set.tab --test=testing_set.tab --pats=patient_data.tab --tops=top100absfisher
The fourth file 'top100absfisher' is generated by the 'calculate_fisher_criterion' function in 'fisher_criterion.py' with the 'ABS' parameter set to true. Clearly there are cleaner ways to do this... but... I was in a hurry. Next time around I'll make calculate_fisher_criterion a method of DataStore, I didn't do this originally because DataStore didn't exist, when I originally wrote calculate_fisher_criterion. """
import optparse
parser = optparse.OptionParser()
parser.add_option("--test")
parser.add_option("--train")
parser.add_option("--pats")
parser.add_option("--tops")
(options,args) = parser.parse_args()
Top100FishAbsolute = dict([(line.strip(),'') for line in open(options.tops)])
TrainWithHeader = [line for line in open(options.train)]
TrainingData = TrainWithHeader[1:]
TestWithHeader = [line for line in open(options.test)]
TestingData = TestWithHeader[1:]
PatientStatusDataWithHeader = [line for line in open(options.pats)]
PatientStatusData = PatientStatusDataWithHeader[1:]
ExperimentalData = DataStore(TrainingData,TestingData,PatientStatusData)
"""Above is reading data in, below is outputting data."""
FullTrainFile = open('train_4919.svm','w')
FullTrainLines = ExperimentalData.OutputSVMLightFormat(DataSet='TRAIN')
for line in FullTrainLines:
FullTrainFile.write(line+'\n')
FullTrainFile.close()
FullTestFile = open('test_4919.svm','w')
FullTestLines = ExperimentalData.OutputSVMLightFormat(DataSet='TEST')
for line in FullTestLines:
FullTestFile.write(line+'\n')
FullTestFile.close()
TopTestFile = open('test_100.svm','w')
TopTestLines = ExperimentalData.OutputSVMLightFormat(Top100FishAbsolute,DataSet='TEST')
for line in TopTestLines:
TopTestFile.write(line+'\n')
TopTestFile.close()
TopTrainFile = open('train_100.svm','w')
TopTrainLines = ExperimentalData.OutputSVMLightFormat(Top100FishAbsolute,DataSet='TRAIN')
for line in TopTrainLines:
TopTrainFile.write(line+'\n')
TopTrainFile.close()
if __name__ == '__main__':
main()
|
"""
IRC check.
"""
def select_irc_target(direction='forward'):
"""
This method is to inform job checker which targets
to check, which need meet one requirement:
1. status is job_launched or job_running
Returns a list of targe
"""
qm_collection = db['qm_calculate_center']
irc_status = 'irc_{}_status'.format(direction)
query = {irc_status:
{"$in":
["job_launched", "job_running", "job_queueing"]
}
}
targets = list(qm_collection.find(query))
return targets
def check_irc_status(job_id):
"""
This method checks pbs status of a job given job_id
Returns off_queue or job_launched or job_running
"""
commands = ['qstat', '-f', job_id]
process = subprocess.Popen(commands,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if "Unknown Job Id" in stderr.decode():
return "off_queue"
# in pbs stdout is byte, so we need to decode it at first.
stdout = stdout.decode().strip().split()
idx = stdout.index('job_state')
if stdout[idx+2] == 'R':
return "job_running"
elif stdout[idx+2] == 'Q':
return "job_queueing"
else:
return "job_launched"
def check_irc_content(target_path, direction='forward'):
reactant_path = path.join(target_path, 'reactant.xyz')
irc_path = path.join(target_path, 'IRC/')
base_dir_path = path.join(path.dirname(path.dirname(
path.dirname(path.dirname(irc_path)))), 'config')
opt_lot = path.join(base_dir_path, 'opt_freq.lot')
opt_name = '{}_opt.in'.format(direction)
opt_in = path.join(irc_path, opt_name)
irc_output = path.join(irc_path, 'irc_{}.out'.format(direction))
with open(opt_lot) as f:
config = [line.strip() for line in f]
with open(reactant_path, 'r') as f1:
lines = f1.readlines()
atom_number = int(lines[0])
with open(irc_output, 'r') as f:
f.seek(0, 2)
fsize = f.tell()
f.seek(max(fsize - 12280, 0), 0) # Read last 12 kB of file
lines = f.readlines()
if lines[-2] == ' IRC backup failure\n' or lines[-2] == ' IRC failed final bisector step\n':
with open(irc_output, 'r') as f:
full_lines = f.readlines()
# Sometimes irc success in forward(reverse) but fail in reverse(forward).
# We wan't to catch the final structure to optimize.
# But if "convergence criterion reached" in first direction fail in second that will cause reactant equal to product.
if ' IRC -- convergence criterion reached.\n' in full_lines:
for idx, i in enumerate(full_lines):
if i.startswith(' IRC -- convergence criterion reached.\n'):
break
full_lines = full_lines[:idx]
for idx2, j in enumerate(reversed(full_lines)):
if j.startswith(' Standard Nuclear Orientation (Angstroms)\n'):
break
geo = []
for i in full_lines[-idx2 + 2: -idx2 + 2 + atom_number]:
atom = i.split()[1:]
geo.append(' '.join(atom))
with open(opt_in, 'w') as f:
f.write('$molecule\n{} {}\n'.format(0, 1))
f.write('\n'.join(geo))
f.write('\n$end\n\n')
for line in config:
f.write(line + '\n')
else:
for idx, i in enumerate(lines):
if i.startswith(' Standard Nuclear Orientation (Angstroms)\n'):
break
geo = []
for i in lines[idx + 3: idx + 3 + atom_number]:
atom = i.split()[1:]
geo.append(' '.join(atom))
with open(opt_in, 'w') as f:
f.write('$molecule\n{} {}\n'.format(0, 1))
f.write('\n'.join(geo))
f.write('\n$end\n\n')
for line in config:
f.write(line + '\n')
return 'need opt'
elif lines[-5] == ' * Thank you very much for using Q-Chem. Have a nice day. *\n':
return 'job_success'
elif lines[-2] == ' Bad initial gradient\n':
return 'Bad initial gradient'
elif lines[-2] == ' IRC --- Failed line search\n':
return 'Failed line search'
elif lines[-6] == ' Error in gen_scfman\n':
return 'Error in gen_scfman'
else:
return 'unknown fail information'
def generate_irc_product_xyz(target, direction='forward'):
irc_path = path.join(target['path'], 'IRC')
reactant_path = path.join(target['path'], 'reactant.xyz')
output_name = 'irc_{}.out'.format(direction)
output = path.join(irc_path, output_name)
name = path.join(irc_path, '{}.xyz'.format(direction))
with open(reactant_path, 'r') as f1:
lines = f1.readlines()
atom_number = int(lines[0])
with open(output, 'r') as f:
full_lines = f.readlines()
count = 1
for idx, i in enumerate(full_lines):
if i.startswith(' IRC -- convergence criterion reached.\n'):
count += 1
if count == 2:
break
full_lines = full_lines[:idx]
for idx2, j in enumerate(reversed(full_lines)):
if j.startswith(' Standard Nuclear Orientation (Angstroms)\n'):
break
geo = []
for i in full_lines[-idx2 + 2: -idx2 + 2 + atom_number]:
atom = i.split()[1:]
geo.append(' '.join(atom))
with open(name, 'w') as f:
f.write(str(atom_number))
f.write('\n\n')
f.write('\n'.join(geo))
def check_irc_jobs():
"""
This method checks job with following steps:
1. select jobs to check
2. check the job pbs-status, e.g., qstat -f "job_id"
3. check job content
4. update with new status
"""
# 1. select jobs to check
targets = select_irc_target(direction='forward')
qm_collection = db['qm_calculate_center']
# 2. check the job pbs status
for target in targets:
job_id = target['irc_forward_jobid']
# 2. check the job pbs status
new_status = check_irc_status(job_id)
if new_status == "off_queue":
# 3. check job content
new_status = check_irc_content(target['path'], direction='forward')
# 4. check with original status which
# should be job_launched or job_running
# if any difference update status
irc_status = 'irc_{}_status'.format('forward')
orig_status = target[irc_status]
if orig_status != new_status:
if new_status == 'job_success':
generate_irc_product_xyz(target, direction='forward')
update_field = {
irc_status: new_status, 'irc_equal': 'waiting for check'
}
qm_collection.update_one(target, {"$set": update_field}, True)
elif new_status == 'need opt':
opt_status = 'opt_{}_status'.format('forward')
update_field = {
irc_status: new_status, opt_status: 'job_unrun'
}
qm_collection.update_one(target, {"$set": update_field}, True)
else:
update_field = {
irc_status: new_status
}
qm_collection.update_one(target, {"$set": update_field}, True)
# 1. select jobs to check
targets = select_irc_target(direction='reverse')
qm_collection = db['qm_calculate_center']
# 2. check the job pbs status
for target in targets:
job_id = target['irc_reverse_jobid']
# 2. check the job pbs status
new_status = check_irc_status(job_id)
if new_status == "off_queue":
# 3. check job content
new_status = check_irc_content(target['path'], direction='reverse')
# 4. check with original status which
# should be job_launched or job_running
# if any difference update status
irc_status = 'irc_{}_status'.format('reverse')
orig_status = target[irc_status]
if orig_status != new_status:
if new_status == 'job_success':
generate_irc_product_xyz(target, direction='reverse')
update_field = {
irc_status: new_status, 'irc_equal': 'waiting for check'
}
qm_collection.update_one(target, {"$set": update_field}, True)
elif new_status == 'need opt':
opt_status = 'opt_{}_status'.format('reverse')
update_field = {
irc_status: new_status, opt_status: 'job_unrun'
}
qm_collection.update_one(target, {"$set": update_field}, True)
else:
update_field = {
irc_status: new_status
}
qm_collection.update_one(target, {"$set": update_field}, True)
"""
IRC success check.
This is to check whether irc forward and reverse direction equal to expectation.
"""
def select_irc_equal_target():
"""
This method is to inform job checker which targets
to check, which need meet one requirement:
1. status is job_launched or job_running
Returns a list of targe
"""
qm_collection = db['qm_calculate_center']
query = {'$and':
[
{"irc_forward_status":
{"$in":
['job_success', 'opt_success']}},
{'irc_reverse_status':
{'$in':
['job_success', 'opt_success']}},
{'irc_equal':
{'$in':
['waiting for check']}}
]
}
targets = list(qm_collection.find(query))
return targets
def check_irc_equal():
targets = select_irc_equal_target()
qm_collection = db['qm_calculate_center']
for target in targets:
if target['irc_forward_status'] in ['job_success', 'opt_success'] and target['irc_reverse_status'] in ['job_success', 'opt_success']:
new_status = check_irc_equal_status(target)
orig_status = target['irc_equal']
if orig_status != new_status:
if new_status == 'forward equal to reactant and reverse equal to product' or new_status == 'reverse equal to reactant and forward equal to product' or new_status == 'reverse equal to reactant but forward does not equal to product' or new_status == 'forward equal to reactant but reverse does not equal to product':
update_field = {
'irc_equal': new_status, 'energy_status': 'job_unrun'
}
qm_collection.update_one(
target, {"$set": update_field}, True)
else:
update_field = {
'irc_equal': new_status
}
qm_collection.update_one(
target, {"$set": update_field}, True)
elif target['opt_reverse_status'] == 'job_fail' or target['opt_forward_status'] == 'job_fail':
update_field = {
'irc_equal': 'opt fail'
}
qm_collection.update_one(target, {"$set": update_field}, True)
def check_irc_equal_status(target):
irc_path = path.join(target['path'], 'IRC/')
reactant_path = path.join(target['path'], 'reactant.xyz')
product_path = path.join(target['path'], 'ssm_product.xyz')
forward_output = path.join(irc_path, 'forward.xyz')
reverse_output = path.join(irc_path, 'reverse.xyz')
pyMol_1 = xyz_to_pyMol(reactant_path)
pyMol_2 = xyz_to_pyMol(product_path)
pyMol_3 = xyz_to_pyMol(forward_output)
pyMol_4 = xyz_to_pyMol(reverse_output)
if pyMol_3.write('inchiKey').strip() == pyMol_4.write('inchiKey').strip():
return 'forward equal to reverse'
elif (pyMol_1.write('inchiKey').strip() == pyMol_3.write('inchiKey').strip()) and (pyMol_1.write('inchiKey').strip() == pyMol_4.write('inchiKey').strip()):
return 'forward and reverse equal to reactant'
elif (pyMol_2.write('inchiKey').strip() == pyMol_3.write('inchiKey').strip()) and (pyMol_2.write('inchiKey').strip() == pyMol_4.write('inchiKey').strip()):
return 'forward and reverse equal to product'
elif pyMol_1.write('inchiKey').strip() == pyMol_3.write('inchiKey').strip() and pyMol_2.write('inchiKey').strip() == pyMol_4.write('inchiKey').strip():
return 'forward equal to reactant and reverse equal to product'
elif pyMol_1.write('inchiKey').strip() == pyMol_4.write('inchiKey').strip() and pyMol_2.write('inchiKey').strip() == pyMol_3.write('inchiKey').strip():
return 'reverse equal to reactant and forward equal to product'
elif pyMol_1.write('inchiKey').strip() == pyMol_4.write('inchiKey').strip() and pyMol_2.write('inchiKey').strip() != pyMol_3.write('inchiKey').strip():
return 'reverse equal to reactant but forward does not equal to product'
elif pyMol_1.write('inchiKey').strip() != pyMol_4.write('inchiKey').strip() and pyMol_2.write('inchiKey').strip() == pyMol_3.write('inchiKey').strip():
return 'reverse does not equal to reactant but forward equal to product'
elif pyMol_1.write('inchiKey').strip() == pyMol_3.write('inchiKey').strip() and pyMol_2.write('inchiKey').strip() != pyMol_4.write('inchiKey').strip():
return 'forward equal to reactant but reverse does not equal to product'
elif pyMol_1.write('inchiKey').strip() != pyMol_3.write('inchiKey').strip() and pyMol_2.write('inchiKey').strip() == pyMol_4.write('inchiKey').strip():
return 'forward does not equal to reactant but reverse equal to product'
else:
return 'unknown (Maybe both of them are not equal to reactant&product)'
"""
IRC opt check.
"""
def select_irc_opt_target(direction='forward'):
"""
This method is to inform job checker which targets
to check, which need meet one requirement:
1. status is job_launched or job_running
Returns a list of targe
"""
qm_collection = db['qm_calculate_center']
irc_opt_status = 'opt_{}_status'.format(direction)
reg_query = {irc_opt_status:
{"$in":
["opt_job_launched", "opt_job_running", "opt_job_queueing"]
}
}
targets = list(qm_collection.find(reg_query))
return targets
def check_irc_opt_job_status(job_id):
"""
This method checks pbs status of a job given job_id
Returns off_queue or job_launched or job_running
"""
commands = ['qstat', '-f', job_id]
process = subprocess.Popen(commands,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if "Unknown Job Id" in stderr.decode():
return "off_queue"
# in pbs stdout is byte, so we need to decode it at first.
stdout = stdout.decode().strip().split()
idx = stdout.index('job_state')
if stdout[idx+2] == 'R':
return "opt_job_running"
elif stdout[idx+2] == 'Q':
return 'opt_job_queueing'
else:
return "opt_job_launched"
def check_irc_opt_content(dir_path, direction='forward'):
reactant_path = os.path.join(dir_path, 'reactant.xyz')
irc_path = path.join(dir_path, "IRC")
xyzname = '{}.xyz'.format(direction)
output = path.join(irc_path, xyzname)
output_path = path.join(irc_path, '{}_opt.out'.format(direction))
try:
q = QChem(outputfile=output_path)
q.create_geo_file(output)
return 'job_success'
except:
return 'job_fail'
def check_irc_opt_jobs():
"""
This method checks job with following steps:
1. select jobs to check
2. check the job pbs-status, e.g., qstat -f "job_id"
3. check job content
4. update with new status
"""
# 1. select jobs to check
targets = select_irc_opt_target(direction='forward')
irc_opt_jobid = 'irc_{}_opt_jobid'.format(str('forward'))
qm_collection = db['qm_calculate_center']
# 2. check the job pbs_status
for target in targets:
job_id = target[irc_opt_jobid]
new_status = check_irc_opt_job_status(job_id)
if new_status == "off_queue":
# 3. check job content
new_status = check_irc_opt_content(
target['path'], direction='forward')
# 4. check with original status which
# should be job_launched or job_running
# if any difference update status
irc_status = 'irc_{}_status'.format(str('forward'))
irc_opt_status = 'opt_{}_status'.format(str('forward'))
orig_status = target[irc_opt_status]
if orig_status != new_status:
if new_status == 'job_success':
update_field = {
irc_status: 'opt_success', irc_opt_status: new_status, 'irc_equal': 'waiting for check'
}
qm_collection.update_one(
target, {"$set": update_field}, True)
else:
update_field = {
irc_status: 'opt_fail', irc_opt_status: new_status
}
qm_collection.update_one(
target, {"$set": update_field}, True)
# 1. select jobs to check
targets = select_irc_opt_target(direction='reverse')
irc_opt_jobid = 'irc_{}_opt_jobid'.format(str('reverse'))
# 2. check the job pbs_status
for target in targets:
job_id = target[irc_opt_jobid]
new_status = check_irc_opt_job_status(job_id)
if new_status == "off_queue":
# 3. check job content
new_status = check_irc_opt_content(
target['path'], direction='reverse')
# 4. check with original status which
# should be job_launched or job_running
# if any difference update status
irc_status = 'irc_{}_status'.format(str('reverse'))
irc_opt_status = 'opt_{}_status'.format(str('reverse'))
orig_status = target[irc_opt_status]
if orig_status != new_status:
if new_status == 'job_success':
update_field = {
irc_status: 'opt_success', irc_opt_status: new_status, 'irc_equal': 'waiting for check'
}
qm_collection.update_one(
target, {"$set": update_field}, True)
else:
update_field = {
irc_status: 'opt_fail', irc_opt_status: new_status
}
qm_collection.update_one(
target, {"$set": update_field}, True)
|
"""
Iniciar la aplicacion.
"""
import random
from dog_api import DogAPI
from podio import Podio
def main():
"""
Iniciar la aplicacion.
"""
ancho = 72
print('=' * ancho)
print('Jagua Maravichu'.center(ancho))
print('=' * ancho)
razas = []
opciones = []
api = DogAPI()
podio = Podio()
# api.actualizar_razas()
razas = api.obtener_razas()
podio.cargar()
while True:
jugar = input("¿Querés jugar? (S/N): ")
if jugar in ('N', 'n'):
break
nombre = input('¿Cuál es tu nombre?: ')
cantidad = int(input('¿Cuántas adivinanzas querés hacer?: '))
correctas = 0
incorrectas = 0
puntos = 0
for i in range(cantidad):
if not api.hay_conexion():
print('No hay conexión a Internet </3')
break
print((' Pregunta No. ' +
str(i + 1) + ' ').center(ancho, '-'))
imagen = api.obtener_imagen()
print(imagen['url'])
print('¿Qué raza es?')
opciones = random.choices(razas, k=2)
opciones.append(imagen['raza'])
random.shuffle(opciones)
for indice, opcion in enumerate(opciones):
print(str(indice) + ') ' + opcion)
respuesta = int(input("Escribí tu respuesta: "))
if respuesta == opciones.index(imagen['raza']):
print('Correcto!! Sabés mucho sobre razas caninas :D')
correctas = correctas + 1
else:
print('Incorrecto. Tenés que tomar un curso sobre razas caninas D:')
incorrectas = incorrectas + 1
opciones.clear()
if cantidad > 0:
puntos = round(((correctas / cantidad) * 100), 2)
print(nombre + ', tu puntaje final es: ' + str(puntos))
podio.ingresar_puntaje(nombre, puntos)
print(' Puntuaciones '.center(ancho, '='))
podio.desplegar()
podio.actualizar()
print('Chauuu!! Nos vemos pronto woof woof')
if __name__ == '__main__':
main()
|
from collections import defaultdict
from typing import Iterator
from ..types.openapi import OpenApiSchema, OpenApiSpec
from ..types.python import (
PythonClassAttribute,
PythonEnumAttribute,
PythonImport,
)
from .constants import MODELS_PACKAGE
from .formatter import enum2attribute, property2attribute
from .reference import PythonContext
BASIC_TYPES: dict[str, str] = {
"integer": "int",
"string": "str",
"number": "float",
"boolean": "bool",
}
def find_references(schema: OpenApiSchema) -> Iterator[str]:
"""
Determine any references in this class and return to the caller.
This method needs to be recursive to account for nest type composition.
"""
if schema.ref:
yield schema.ref
if schema.items:
yield "List"
# If items is non-empty is set we're in an array type
if schema.items:
if isinstance(schema.items, list):
for item in schema.items:
yield from find_references(item)
else:
yield from find_references(schema.items)
if isinstance(schema.additional_items, OpenApiSchema):
yield from find_references(schema.additional_items)
# If properties is non-empty is set we're in an object type
for prop in schema.properties.values():
yield from find_references(prop)
if schema.additional_properties:
yield "Dict"
# When additional_properties is True we will assume the type is dict[str, Any]
if schema.additional_properties is True:
yield "Any"
# When additional_properties is an object we have to recurse into its contents
if isinstance(schema.additional_properties, OpenApiSchema):
yield from find_references(schema.additional_properties)
# When nullable is True we will assume the type is Optional[T]
if schema.nullable:
# TODO [KnownLimitation]: NULLABLE_TYPES
# Need to add Optional to the imports if nullable is True
pass
def update_model_imports(
openapi_spec: OpenApiSpec, class_contexts: dict[str, PythonContext]
) -> None:
"""
For each of the models, determine if they reference other models.
If they do, then we need to import them when generating the model class.
We will update each context with any references we discover.
"""
for current_ref, current_context in class_contexts.items():
# Extra assurance we're not doing anything funky
if current_context.package_name != MODELS_PACKAGE:
continue
schema = openapi_spec.components.schemas[current_context.openapi_name]
module_imports: dict[str, set[str]] = defaultdict(set)
for discovered_reference in find_references(schema):
# Ignore recursive references
if discovered_reference == current_context.ref:
continue
# Handle typing module imports separately
elif discovered_reference in ("Any", "List", "Dict", "Optional"):
module_imports["typing"].add(discovered_reference)
# Use the contexts dict to determine the paths of other the models
else:
import_class_context = class_contexts[discovered_reference]
import_module_name = f".{import_class_context.module_name}"
import_class_name = import_class_context.class_name
module_imports[import_module_name].add(import_class_name)
current_context.module_imports = [
PythonImport(import_module, sorted(list(names)))
for import_module, names in module_imports.items()
]
def find_attribute_type(
*,
class_contexts: dict[str, PythonContext],
current_class_name: str,
schema: OpenApiSchema,
) -> str:
# TODO [KnownLimitation]: NULLABLE_TYPES
# Need to wrap the type in Optional if nullable is True
if schema.ref:
discovered_class_name = class_contexts[schema.ref].class_name
if discovered_class_name == current_class_name:
# Handle the case of forward references gracefully
return f'"{discovered_class_name}"'
else:
return discovered_class_name
# TODO [KnownLimitation]: MULTIPLE_SCHEMA_TYPES
# Need to handle Union type if there are multiple types involved.
# find_references should check to see if a union is needed
elif basic_type := next(filter(None, map(BASIC_TYPES.get, schema.type)), None):
return basic_type
elif schema.items:
if (
schema.type is not None
and len(schema.type) > 0
and "array" not in schema.type
):
raise ValueError(f"Invalid schema reference {current_class_name}: {schema}")
# TODO [KnownLimitation]: MULTIPLE_ITEM_TYPES
# Need to handle Union type in this event.
# find_references should check to see if a union is needed
if isinstance(schema.items, list):
item = schema.items[0]
else:
item = schema.items
discovered_type = find_attribute_type(
class_contexts=class_contexts,
current_class_name=current_class_name,
schema=item,
)
return f"List[{discovered_type}]"
# TODO [KnownLimitation]: PROPS_AND_ADDITIONAL_PROPS
# Need to handle the case where there are both known and unknown prop types
# TODO [KnownLimitation]: INNER_PROPS
# Need to handle the case where a property has properties
elif schema.properties:
raise ValueError("Cannot handle nested properties")
elif schema.additional_properties is True:
return "Dict[str, Any]"
elif isinstance(schema.additional_properties, OpenApiSchema):
discovered_type = find_attribute_type(
class_contexts=class_contexts,
current_class_name=current_class_name,
schema=schema.additional_properties,
)
return f"Dict[str, {discovered_type}]"
else:
raise ValueError
def update_model_attributes(
openapi_spec: OpenApiSpec, class_contexts: dict[str, PythonContext]
) -> None:
for current_context in class_contexts.values():
# Extra assurance we're not doing anything funky
if current_context.package_name != MODELS_PACKAGE:
continue
schema = openapi_spec.components.schemas[current_context.openapi_name]
if schema.enum and schema.properties:
raise ValueError(
f"Component Schema is not a valid enum or object type: {schema}"
)
# TODO [KnownLimitation]: TOP_LEVEL_ADDITIONAL_PROPS
# Need to handle the case where the component schema has additional props
if schema.additional_properties:
raise ValueError(
f"Component Schema is not a valid enum or object type: {schema}"
)
# TODO [KnownLimitation]: TOP_LEVEL_ARRAY
# Need to handle the case where the component schema has additional props
if schema.items or schema.additional_items:
raise ValueError(
f"Component Schema is not a valid enum or object type: {schema}"
)
if schema.enum:
# TODO [KnownLimitation]: NON_STRING_ENUM
# Handle the case where enums are not strings.
if schema.type != ["string"]:
raise ValueError(f"Only string enum values are supported: {schema}")
enum_attributes: list[PythonEnumAttribute] = []
for enum in schema.enum:
attribute_name = enum2attribute(enum)
attribute_value = f'"{enum}"'
enum_attributes.append(
PythonEnumAttribute(
attribute_name=attribute_name,
attribute_value=attribute_value,
)
)
current_context.enum_attributes = enum_attributes
else:
class_attributes: list[PythonClassAttribute] = []
for property_name, property_schema in schema.properties.items():
attribute_name = property2attribute(property_name)
attribute_type = find_attribute_type(
class_contexts=class_contexts,
current_class_name=current_context.class_name,
schema=property_schema,
)
class_attributes.append(
PythonClassAttribute(
attribute_name=attribute_name,
openapi_name=property_name,
attribute_type=attribute_type,
attribute_default="None",
)
)
current_context.class_attributes = class_attributes
|
# Generated by Django 3.1.5 on 2021-02-10 18:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0003_auto_20210211_0226'),
]
operations = [
migrations.AlterField(
model_name='snapshot',
name='verified',
field=models.BooleanField(default=False, null=True),
),
]
|
# Problem:
# Write a program that introduces a product name and checks whether it is a fruit or a vegetable.
# -> Fruit are: banana, apple, kiwi, cherry, lemon and grapes
# -> Vegetables are: tomato, cucumber, pepper and carrot
# -> All others are unknown ;
# Display "fruit", "vegetable" or "unknown" according to the introduced product.
product = input()
if product == "banana":
print("fruit")
elif product == "apple":
print("fruit")
elif product == "kiwi":
print("fruit")
elif product == "cherry":
print("fruit")
elif product == "lemon":
print("fruit")
elif product == "grapes":
print("fruit")
elif product == "tomato":
print("vegetable")
elif product == "cucumber":
print("vegetable")
elif product == "pepper":
print("vegetable")
elif product == "carrot":
print("vegetable")
else:
print("unknown")
|
import pandas as pd
from pandas import json_normalize
import json
import os
from pathlib import Path
path = "./json_files"
file_list = os.listdir(path)
print ("file_list: {}".format(file_list))
path_fixed = "./json_files/csv_files"
for file in file_list:
if file.endswith(".json"):
print ("file: {}".format(file))
file_name = file.split(".")[0]
print ("file_name: {}".format(file_name))
with open(path + "/" + file) as json_file:
json_data = json.load(json_file)
df = json_normalize(json_data["articles"])
print ("df: {}".format(df))
df.to_csv(path_fixed + "/" + file_name + ".csv")
DATASET = sorted([x for x in Path(path_fixed).glob("*.csv")])
def dataframe_from_csv(target):
return pd.read_csv(target).rename(columns=lambda x: x.strip())
def dataframe_from_csvs(targets):
return pd.concat([dataframe_from_csv(x) for x in targets])
DATA = dataframe_from_csvs(DATASET)
print(DATA)
DATA.to_csv("DATA.csv") |
#!/usr/bin/env python3
from texttable import Texttable
from .fetch_coins import fetch_crypto_data
from .args_parser import parse_args
def generate_table(headers, data):
t = Texttable(max_width=0)
headersRow = []
for h in headers:
headersRow.append(h['title'])
dataRow = []
for index, item in enumerate(data):
result = []
for h in headers:
if(h['title'] == '#'):
result.append(index + 1)
else:
result.append(item[h['key']])
dataRow.append(result)
t.add_rows([headersRow] + dataRow)
return t
full_headers = [
{'title': '#'},
{'title': 'Name', 'key': 'name'},
{'title': 'Price', 'key': 'price'},
{'title': '24h %', 'key': 'today_change'},
{'title': '7d %', 'key': 'week_change'},
{'title': 'Market Cap', 'key': 'market_cap'},
{'title': 'Volume', 'key': 'volume'}
]
simple_headers = [
{'title': '#'},
{'title': 'Name', 'key': 'name'},
{'title': 'Price', 'key': 'price'},
]
def main():
args = parse_args()
data = fetch_crypto_data(vars(args))
headers = full_headers
if (args.simple):
headers = simple_headers
print('Currency: USD')
print(generate_table(headers, data).draw())
if __name__ == '__main__':
main()
|
"""
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import argparse
import datetime
import errno
import logging
import os
import sys
import cleanup
import closure
from preprocess import Preprocess
def main(args):
"""Preprocess a font for use as a TachyFont.
Args:
args: list, command line arguments.
Raises:
ValueError: if build directory cannot be created
Returns:
Status of the operation.
"""
parser = argparse.ArgumentParser(prog='pyprepfnt')
parser.add_argument('fontfile', help='Input font file')
parser.add_argument('output_dir', help='Output directory')
parser.add_argument('--force', default=False, action='store_true',
help='Force preprocessing even if the timestamps indicate'
' it is not necessary')
parser.add_argument('--hinting', default=False, action='store_true',
help='Retain hinting if set, else strip hinting')
parser.add_argument('--reuse_clean', default=False, action='store_true',
help='Reuse the "clean" file if possible')
parser.add_argument('--log', default='WARNING',
help='Set the logging level; eg, --log=INFO')
parser.add_argument('--verbose', default=False, action='store_true',
help='Report internal operations')
cmd_args = parser.parse_args(args)
loglevel = getattr(logging, cmd_args.log.upper(), None)
if not isinstance(loglevel, int):
raise ValueError('Invalid log level: %s' % loglevel)
log = logging.getLogger()
logging_handler = logging.StreamHandler(sys.stdout)
logging_handler.setLevel(loglevel)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
logging_handler.setFormatter(formatter)
log.addHandler(logging_handler)
log.setLevel(loglevel)
verbose = cmd_args.verbose
force_preprocessing = cmd_args.force
log.debug('force_preprocessing = ' + str(force_preprocessing))
fontfile = cmd_args.fontfile
fonttime = os.path.getmtime(fontfile)
# TODO(bstell) use Logger
basename = os.path.basename(fontfile)
log.info('preprocess %s = %d bytes' % (cmd_args.fontfile,
os.path.getsize(cmd_args.fontfile)))
filename, extension = os.path.splitext(basename)
cur_time = datetime.datetime.now()
build_dir = 'tmp-%s' % filename
if not cmd_args.reuse_clean:
build_dir = ('%s-%04d-%02d-%02d-%02d-%02d-%02d.%d' %
(build_dir, cur_time.year, cur_time.month, cur_time.day,
cur_time.hour, cur_time.minute, cur_time.second, os.getpid()))
output_dir = cmd_args.output_dir
log.debug('JAR file: ' + output_dir)
try:
os.makedirs(build_dir)
except OSError as exception:
if exception.errno != errno.EEXIST:
log.error('failed to create build_dir (' + build_dir + ')')
raise
log.debug('if reuse_clean then we should compare the source font and final jar')
cleanfile = filename + '_clean' + extension
cleanfilepath = build_dir + '/' + cleanfile
# Decide if we are building the cleaned up version of the font.
rebuild_clean = not cmd_args.reuse_clean
cleanfile_exists = os.path.isfile(cleanfilepath)
if force_preprocessing or not cleanfile_exists:
rebuild_clean = True
else:
cleantime = os.path.getmtime(cleanfilepath)
if cleantime <= fonttime:
rebuild_clean = True
log.debug('rebuild_clean = ' + str(rebuild_clean))
if rebuild_clean:
log.debug('cleaned version: ' + cleanfilepath)
cleanup.cleanup(fontfile, cmd_args.hinting, cleanfilepath, verbose)
closure.dump_closure_map(cleanfilepath, build_dir)
else:
log.debug('reuse cleaned up version: ' + cleanfilepath)
# Get the latest cleaned up font timestamp.
cleantime = os.path.getmtime(cleanfilepath)
# Decide if we are rebuilding the jar file.
tachyfont_file = filename + '.TachyFont.jar'
jarfilepath = build_dir + '/' + tachyfont_file
rebuild_jar = False
jarfile_exists = os.path.isfile(jarfilepath)
log.debug('file %s exists: %s' % (jarfilepath, jarfile_exists))
if force_preprocessing or not jarfile_exists:
rebuild_jar = True
else:
jartime = os.path.getmtime(jarfilepath)
if jartime <= cleantime:
rebuild_jar = True
log.debug('rebuild_jar = ' + str(rebuild_jar))
if rebuild_jar:
log.debug('start proprocess')
preprocess = Preprocess(cleanfilepath, build_dir, verbose)
log.debug('build base')
preprocess.base_font()
log.debug('dump cmap')
preprocess.cmap_dump()
log.debug('build glyph data')
preprocess.serial_glyphs()
log.debug('write sha-1 fingerprint')
preprocess.sha1_fingerprint()
log.debug('create jar file')
sub_files = ('base closure_data closure_idx codepoints gids glyph_data '
'glyph_table sha1_fingerprint')
jar_cmd = 'cd %s; jar cf %s %s' % (build_dir, tachyfont_file, sub_files)
log.debug('jar_cmd: ' + jar_cmd)
status = os.system(jar_cmd)
log.debug('jar command status: ' + str(status))
if status:
log.error('jar command status: ' + str(status))
return status
else:
log.debug('no need to rebuild intermediate jar file: ' + jarfilepath)
# Get the latest cleaned up jar timestamp.
jartime = os.path.getmtime(jarfilepath)
# Decide if we are copying over the jar file.
copy_jar = False
jarcopy_filepath = output_dir + '/' + tachyfont_file
jarcopy_exists = os.path.isfile(jarcopy_filepath)
if force_preprocessing or not jarcopy_exists:
copy_jar = True
else:
jarcopytime = os.path.getmtime(jarcopy_filepath)
if jarcopytime <= jartime:
copy_jar = True
log.debug('copy_jar = ' + str(copy_jar))
if copy_jar:
log.debug('cp the files to the output directory')
log.info('cleaned: %s = %d' % (cleanfile, os.path.getsize(cleanfilepath)))
log.info('Jar: %s/%s' % (output_dir, tachyfont_file))
cp_cmd = ('cp %s/%s %s/%s %s' %
(build_dir, tachyfont_file, build_dir, cleanfile, output_dir))
log.debug('cp_cmd: ' + cp_cmd)
status = os.system(cp_cmd)
log.debug('cp status ' + str(status))
if status:
log.error('cp status = ' + str(status))
return status
else:
log.debug('the existing jar file is up to date: ' + jarfilepath)
if cmd_args.reuse_clean:
log.debug('leaving the build directory: ' + build_dir)
status = 0
else:
log.debug('cleanup the build directory')
rm_cmd = ('rm -rf %s' % build_dir)
log.debug('rm_cmd: ' + rm_cmd)
status = os.system(rm_cmd)
log.debug('rm status ' + str(status))
if status:
log.error('rm status = ' + str(status))
return status
log.debug('command status = ' + str(status))
if status != 0:
log.info('preprocessing FAILED')
return status
if __name__ == '__main__':
cmd_status = main(sys.argv[1:])
sys.exit(cmd_status)
|
import tensorflow as tf
from pplp.core import losses
from pplp.core import orientation_encoder
from pplp.core.pplp_fc_layers import ops
KEY_CLASSIFICATION_LOSS = 'classification_loss'
KEY_REGRESSION_LOSS = 'regression_loss'
KEY_PPLP_LOSS = 'pplp_loss'
KEY_OFFSET_LOSS_NORM = 'offset_loss_norm'
KEY_ANG_LOSS_NORM = 'ang_loss_norm'
def build(model, prediction_dict):
"""Builds the loss for a variety of box representations
Args:
model: network model
prediction_dict: prediction dictionary
Returns:
losses_output: loss dictionary
"""
avod_box_rep = model._config.avod_config.avod_box_representation
fc_layers_type = model._config.layers_config.avod_config.WhichOneof('fc_layers')
if avod_box_rep in ['box_3d', 'box_4ca']:
if fc_layers_type == 'fusion_fc_layers':
# Boxes with classification and offset output
losses_output = _build_cls_off_loss(model, prediction_dict)
elif avod_box_rep in ['box_8c', 'box_8co', 'box_4c']:
losses_output = _build_cls_off_loss(model, prediction_dict)
else:
raise ValueError('Invalid box representation', avod_box_rep)
return losses_output
def _get_cls_loss(model, cls_logits, cls_gt):
"""Calculates cross entropy loss for classification
Args:
model: network model
cls_logits: predicted classification logits
cls_gt: ground truth one-hot classification vector
Returns:
cls_loss: cross-entropy classification loss
"""
# Cross-entropy loss for classification
weighted_softmax_classification_loss = \
losses.WeightedSoftmaxLoss()
cls_loss_weight = model._config.loss_config.cls_loss_weight
cls_loss = weighted_softmax_classification_loss(
cls_logits, cls_gt, weight=cls_loss_weight)
# Normalize by the size of the minibatch
with tf.variable_scope('cls_norm'):
cls_loss = cls_loss / tf.cast(
tf.shape(cls_gt)[0], dtype=tf.float32)
# Add summary scalar during training
if model._train_val_test == 'train':
tf.summary.scalar('classification', cls_loss)
return cls_loss
def _get_positive_mask(positive_selection, cls_softmax, cls_gt):
"""Gets the positive mask based on the ground truth box classifications
Args:
positive_selection: positive selection method
(e.g. 'corr_cls', 'not_bkg')
cls_softmax: prediction classification softmax scores
cls_gt: ground truth classification one-hot vector
Returns:
positive_mask: positive mask
"""
# Get argmax for predicted class
classification_argmax = tf.argmax(cls_softmax, axis=1)
# Get the ground truth class indices back from one_hot vector
class_indices_gt = tf.argmax(cls_gt, axis=1)
# class_indices_gt = tf.Print(class_indices_gt, ['^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^line 88(pplp loss) : class_indices_gt =', class_indices_gt], summarize=1000)
# Mask for which predictions are not background
not_background_mask = tf.greater(class_indices_gt, 0)
# Combine the masks
if positive_selection == 'corr_cls':
# Which prediction classifications match ground truth
correct_classifications_mask = tf.equal(
classification_argmax, class_indices_gt)
positive_mask = tf.logical_and(
correct_classifications_mask, not_background_mask)
elif positive_selection == 'not_bkg':
positive_mask = not_background_mask
else:
raise ValueError('Invalid positive selection', positive_selection)
return positive_mask
def _get_off_loss(model, offsets, offsets_gt,
cls_softmax, cls_gt):
"""Calculates the smooth L1 combined offset and angle loss, normalized by
the number of positives
Args:
model: network model
offsets: prediction offsets
offsets_gt: ground truth offsets
cls_softmax: prediction classification softmax scores
cls_gt: classification ground truth one-hot vector
Returns:
final_reg_loss: combined offset and angle vector loss
offset_loss_norm: normalized offset loss
"""
# weighted_smooth_l1_localization_loss = losses.WeightedSmoothL1Loss()
weighted_smooth_l1_localization_loss_2output = losses.WeightedSmoothL1Loss_2ouput()
# weighted_softmax_loss = losses.WeightedSoftmaxLoss()
reg_loss_weight = model._config.loss_config.reg_loss_weight
# anchorwise_localization_loss = weighted_smooth_l1_localization_loss( # shape=(?,), ? is the number of anchor candidates.
# offsets, offsets_gt, weight=reg_loss_weight)
anchorwise_localization_loss, elementwise_localization_loss = weighted_smooth_l1_localization_loss_2output( # shape=(?,), ? is the number of anchor candidates.
offsets, offsets_gt, weight=reg_loss_weight)
# cls_gt = tf.Print(cls_gt, ['^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^line 144(pplp loss) : model._positive_selection =', model._positive_selection], summarize=1000)
# cls_softmax = tf.Print(cls_softmax, ['^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^line 145(pplp loss) : cls_softmax =', cls_softmax], summarize=1000)
# cls_gt = tf.Print(cls_gt, ['^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^line 146(pplp loss) : cls_gt =', cls_gt], summarize=1000)
positive_mask = _get_positive_mask(model._positive_selection,
cls_softmax, cls_gt)
# positive_mask = tf.Print(positive_mask, ['^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^line 149(pplp loss) : positive_mask =', positive_mask], summarize=1000)
# Cast to float to get number of positives
pos_classification_floats = tf.cast(
positive_mask, tf.float32)
# Apply mask to only keep regression loss for positive predictions
# anchorwise_localization_loss = tf.Print(anchorwise_localization_loss, ['^^^^^^^^^^^^^^line 151(pplp loss) : anchorwise_localization_loss.shape =', tf.shape(anchorwise_localization_loss)], summarize=1000)
pos_localization_loss = tf.reduce_sum(tf.boolean_mask(
anchorwise_localization_loss, positive_mask))
# pos_localization_loss = tf.Print(pos_localization_loss, ['^^^^^^^^^^^^^^line 154(pplp loss) : pos_localization_loss.shape =', tf.shape(pos_localization_loss)], summarize=1000)
# elementwise_localization_loss = tf.Print(elementwise_localization_loss, ['^^^^^^^^^^^^^^line 155(pplp loss) : elementwise_localization_loss.shape =', tf.shape(elementwise_localization_loss)], summarize=1000)
valid_elementwise_localization_loss = tf.boolean_mask(
elementwise_localization_loss, positive_mask, axis=0)
# valid_elementwise_localization_loss = tf.Print(valid_elementwise_localization_loss, ['^^^^^^^^^^^^^^line 157(pplp loss) : valid_elementwise_localization_loss.shape =', tf.shape(valid_elementwise_localization_loss)], summarize=1000)
pos_localization_loss_elementwise = tf.reduce_sum(valid_elementwise_localization_loss, axis=0)
# pos_localization_loss_elementwise = tf.reduce_sum(tf.boolean_mask(
# elementwise_localization_loss, positive_mask, axis=0), axis=1)
# pos_localization_loss_elementwise = tf.Print(pos_localization_loss_elementwise, ['^^^^^^^^^^^^^^line 156(pplp loss) : pos_localization_loss_elementwise =', pos_localization_loss_elementwise], summarize=1000)
# pos_localization_loss_elementwise = tf.Print(pos_localization_loss_elementwise, ['^^^^^^^^^^^^^^line 162(pplp loss) : pos_localization_loss_elementwise.shape =', tf.shape(pos_localization_loss_elementwise)], summarize=1000)
# Combine regression losses
combined_reg_loss = pos_localization_loss
with tf.variable_scope('reg_norm'):
# Normalize by the number of positive/desired classes
# only if we have any positives
num_positives = tf.reduce_sum(pos_classification_floats)
pos_div_cond = tf.not_equal(num_positives, 0)
offset_loss_norm = tf.cond(
pos_div_cond,
lambda: pos_localization_loss / num_positives,
lambda: tf.constant(0.0))
offset_loss_norm_elewise = tf.cond(
pos_div_cond,
lambda: tf.divide(pos_localization_loss_elementwise, num_positives),
lambda: tf.constant([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]))
offset_x1, offset_y1, offset_x2, offset_y2, offset_x3, offset_y3, offset_x4, offset_y4, offset_h1, offset_h2 = tf.split(offset_loss_norm_elewise, [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], axis=0)
final_reg_loss = tf.cond(
pos_div_cond,
lambda: combined_reg_loss / num_positives,
lambda: tf.constant(0.0))
# Add summary scalars
if model._train_val_test == 'train':
tf.summary.scalar('localization', offset_loss_norm)
tf.summary.scalar('offset_x1', offset_x1[0])
tf.summary.scalar('offset_y1', offset_y1[0])
tf.summary.scalar('offset_x2', offset_x2[0])
tf.summary.scalar('offset_y2', offset_y2[0])
tf.summary.scalar('offset_x3', offset_x3[0])
tf.summary.scalar('offset_y3', offset_y3[0])
tf.summary.scalar('offset_x4', offset_x4[0])
tf.summary.scalar('offset_y4', offset_y4[0])
tf.summary.scalar('offset_h1', offset_h1[0])
tf.summary.scalar('offset_h2', offset_h2[0])
tf.summary.scalar('regression_total', final_reg_loss)
tf.summary.scalar('mb_num_positives', num_positives)
return final_reg_loss, offset_loss_norm
def _build_cls_off_loss(model, prediction_dict):
"""Builds classification, offset, and angle vector losses.
Args:
model: network model
prediction_dict: prediction dictionary
Returns:
losses_output: losses dictionary
"""
# Minibatch Predictions
mb_cls_logits = prediction_dict[model.PRED_MB_CLASSIFICATION_LOGITS]
mb_cls_softmax = prediction_dict[model.PRED_MB_CLASSIFICATION_SOFTMAX]
mb_offsets = prediction_dict[model.PRED_MB_OFFSETS]
# Ground Truth
mb_cls_gt = prediction_dict[model.PRED_MB_CLASSIFICATIONS_GT]
mb_offsets_gt = prediction_dict[model.PRED_MB_OFFSETS_GT]
# Losses
with tf.variable_scope('pplp_losses'):
with tf.variable_scope('classification'):
# mb_cls_logits = tf.Print(mb_cls_logits, ['line 305(pplp loss) : mb_cls_logits =', mb_cls_logits], summarize=1000)
cls_loss = _get_cls_loss(model, mb_cls_logits, mb_cls_gt)
with tf.variable_scope('regression'):
final_reg_loss, offset_loss_norm = _get_off_loss(
model, mb_offsets, mb_offsets_gt,
mb_cls_softmax, mb_cls_gt)
with tf.variable_scope('pplp_loss'):
pplp_loss = cls_loss + final_reg_loss
# pplp_loss = tf.Print(pplp_loss, ['line 320(pplp loss) : pplp_loss =', pplp_loss], summarize=1000)
tf.summary.scalar('pplp_loss', pplp_loss)
# Loss dictionary
losses_output = dict()
losses_output[KEY_CLASSIFICATION_LOSS] = cls_loss
losses_output[KEY_REGRESSION_LOSS] = final_reg_loss
losses_output[KEY_PPLP_LOSS] = pplp_loss
# Separate losses for plotting
losses_output[KEY_OFFSET_LOSS_NORM] = offset_loss_norm
return losses_output
|
# -*- coding: utf-8 -*-
from PyQt4 import QtCore,QtGui
from pineboolib import decorators
from pineboolib.fllegacy.FLSqlCursor import FLSqlCursor
from pineboolib.utils import DefFun
from pineboolib.fllegacy.FLRelationMetaData import FLRelationMetaData
from pineboolib.fllegacy.FLFieldMetaData import FLFieldMetaData
from pineboolib.fllegacy.FLTableMetaData import FLTableMetaData
class FLTableDB(QtGui.QWidget):
_tableView = None
_vlayout = None
_lineEdit = None
_comboBox_1 = None
_comboBox_2 = None
topWidget = QtGui.QWidget
showed = False
def __init__(self, parent = None, action_or_cursor = None, *args):
print("FLTableDB:", parent, action_or_cursor , args)
# TODO: Falta el lineeditsearch y el combo, que los QS lo piden
super(FLTableDB,self).__init__(parent,*args)
# TODO: LA inicialización final hay que hacerla más tarde, en el primer
# show(), porque sino obligas a tenerlo todo preparado en el constructor.
self._tableView = QtGui.QTableView()
self._lineEdit = QtGui.QLineEdit()
_label1 = QtGui.QLabel()
_label2 = QtGui.QLabel()
self._comboBox_1 = QtGui.QComboBox()
self._comboBox_2 = QtGui.QComboBox()
_label1.setText("Buscar")
_label2.setText("en")
self._vlayout = QtGui.QVBoxLayout()
_hlayout = QtGui.QHBoxLayout()
self._tableView._v_header = self._tableView.verticalHeader()
self._tableView._v_header.setDefaultSectionSize(18)
self._tableView._h_header = self._tableView.horizontalHeader()
self._tableView._h_header.setDefaultSectionSize(70)
_hlayout.addWidget(_label1)
_hlayout.addWidget(self._lineEdit)
_hlayout.addWidget(_label2)
_hlayout.addWidget(self._comboBox_1)
_hlayout.addWidget(self._comboBox_2)
self._vlayout.addLayout(_hlayout)
self._vlayout.addWidget(self._tableView)
self.setLayout(self._vlayout)
self._parent = parent
while True:
parent_cursor = getattr(self._parent,"_cursor", None)
if parent_cursor: break
new_parent = self._parent.parentWidget()
if new_parent is None: break
self._parent = new_parent
print(self._parent)
self._tableView.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self._tableView.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self._tableView.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self._tableView.setAlternatingRowColors(True)
if action_or_cursor is None and parent_cursor:
action_or_cursor = parent_cursor
if isinstance(action_or_cursor,FLSqlCursor):
self._cursor = action_or_cursor
elif isinstance(action_or_cursor,str):
self._cursor = FLSqlCursor(action_or_cursor)
else:
self._cursor = None
if self._cursor:
self._tableView._h_header.setResizeMode(QtGui.QHeaderView.ResizeToContents)
self._tableView.setModel(self._cursor._model)
self._tableView.setSelectionModel(self._cursor.selection())
self.tableRecords = self # control de tabla interno
#Carga de comboBoxs y connects .- posiblemente a mejorar
if self._cursor:
for column in range(self._cursor._model.columnCount()):
self._comboBox_1.addItem(self._cursor._model.headerData(column, QtCore.Qt.Horizontal, QtCore.Qt.DisplayRole))
self._comboBox_2.addItem(self._cursor._model.headerData(column, QtCore.Qt.Horizontal, QtCore.Qt.DisplayRole))
self._comboBox_1.addItem("*")
self._comboBox_2.addItem("*")
self._comboBox_1.setCurrentIndex(0)
self._comboBox_2.setCurrentIndex(1)
self._comboBox_1.currentIndexChanged.connect(self.comboBox_putFirstCol)
self._comboBox_2.currentIndexChanged.connect(self.comboBox_putSecondCol)
self.sort = []
self.timer_1 = QtCore.QTimer(self)
self.timer_1.singleShot(100, self.loaded)
def __getattr__(self, name): return DefFun(self, name)
def loaded(self):
# Es necesario pasar a modo interactivo lo antes posible
# Sino, creamos un bug en el cierre de ventana: se recarga toda la tabla para saber el tamaño
print("FLTableDB: setting columns in interactive mode")
self._tableView._h_header.setResizeMode(QtGui.QHeaderView.Interactive)
def cursor(self):
assert self._cursor
return self._cursor
def obj(self):
return self
def comboBox_putFirstCol(self):
self.putFirstCol(str(self._comboBox_1.currentText()))
def comboBox_putSecondCol(self):
self.putSecondCol(str(self._comboBox_2.currentText()))
def putFirstCol(self, fN):
_oldPos= None
_oldFirst = self._tableView._h_header.logicalIndex(0)
for column in range(self._cursor._model.columnCount()):
if self._cursor._model.headerData(column, QtCore.Qt.Horizontal, QtCore.Qt.DisplayRole).lower() == fN.lower():
_oldPos = self._tableView._h_header.visualIndex(column)
if not self._comboBox_1.currentText() == fN:
self._comboBox_1.setCurrentIndex(column)
return False
break
if not _oldPos or fN == "*":
return False
else:
self._tableView._h_header.swapSections(_oldPos, 0)
self._comboBox_2.setCurrentIndex(_oldFirst)
return True
def putSecondCol(self, fN):
_oldPos= None
_oldSecond = self._tableView._h_header.logicalIndex(1)
for column in range(self._cursor._model.columnCount()):
if self._cursor._model.headerData(column, QtCore.Qt.Horizontal, QtCore.Qt.DisplayRole).lower() == fN.lower():
_oldPos = self._tableView._h_header.visualIndex(column)
break
if not _oldPos or fN == "*":
return False
if not self._comboBox_1.currentText() == fN:
self._tableView._h_header.swapSections(_oldPos, 1)
else:
self._comboBox_1.setCurrentIndex(_oldSecond)
return True
def setTableName(self, tableName):
self._tableName = tableName
if self.showed:
if self.topWidget:
self.initCursor()
else:
self.initFakeEditor()
def setForeignField(self, foreingField):
self._foreingField = foreingField
if self.showed:
if self.topWidget:
self.initCursor()
else:
self.initFakeEditor()
def setFieldRelation(self, fieldRelation):
self._fieldRelation = fieldRelation
if self.showed:
if self.topWidget:
self.initCursor()
else:
self.initFakeEditor()
@decorators.NotImplementedWarn
def initCursor(self):
# si no existe crea la tabla
if not self._cursor: return False
if not self._cursor._model: return False
self._tMD = 0
if not self._sortField: self._tMD = self._cursor._model.name()
if self._tMD:
self.sortField_ = self._tMD.value(self._cursor._currentregister, self._tMD.primaryKey())
ownTMD = False
if not self._tableName:
#if not cursor_->db()->manager()->existsTable(tableName_)) {
ownTMD = True
#tMD = cursor_->db()->manager()->createTable(tableName_);
else:
ownTMD = True
self._tMD = self._cursor._model._table.name
if not self._tMD:
return
if not self._foreignField or not self._fieldRelation:
if not self._cursor._model:
if ownTMD and self._tMD and not self._tMD.inCache():
self._tMD = None
return
if not self._cursor._model.name() == self._tableName:
ctxt = self._cursor.context();
self._cursor = FLSqlCursor(self._tableName)
if self._cursor:
self._cursor.setContext(ctxt)
cursorAux = 0
if ownTMD and self._tMD and not self._tMD.inCache():
self._tMD = None
return
else:
cursorTopWidget = self.topWidget._cursor() # ::qt_cast<FLFormDB *>(topWidget)->cursor()
if cursorTopWidget and not cursorTopWidget._model.name() == self._tableName:
self._cursor = cursorTopWidget
if not self._tableName or not self._foreignField or not self._fieldRelation or cursorAux:
if ownTMD and self._tMD and not self._tMD.inCache():
tMD = None
return
cursorAux = self._cursor
curName = self._cursor._model.name()
rMD = self._cursor._model.relation(self._foreignField,self._fieldRelation,self._tableName)
testM1 = self._tMD.relation(self._fieldRelation, self._foreignField, curName)
checkIntegrity = bool(False)
if not rMD:
if testM1:
checkIntegrity = (testM1.cardinality() == FLRelationMetaData.RELATION_M1)
fMD = FLTableMetaData(self._cursor._model.field(self._foreignField))
if (fMD):
tmdAux = self._cursor._model(self._tableName);
if not tmdAux or tmdAux.isQuery():
checkIntegrity = False
if tmdAux and not tmdAux.inCache(): # mirar inCache()
tmdAux = None
rMD = FLRelationMetaData(self._tableName,self._fieldRelation, FLRelationMetaData.RELATION_1M, False, False, checkIntegrity)
fMD.addRelationMD(rMD)
print("FLTableDB : La relación entre la tabla del formulario %r y esta tabla %r de este campo no existe, pero sin embargo se han indicado los campos de relación( %r, %r )" % (curName, self._tableName, self._fieldRelation, self._foreignField))
print("FLTableDB : Creando automáticamente %r.%r --1M--> %r.%r" % (curName, self._foreignField, self._tableName, self._fieldRelation))
else:
print("FLTableDB : El campo ( %r ) indicado en la propiedad foreignField no se encuentra en la tabla ( %r )" % (self._foreignField, curName))
rMD = testM1
if not rMD:
fMD = FLFieldMetaData(tMD.field(self._fieldRelation))
if (fMD):
rMD = FLRelationMetaData(curName,self._foreignField, FLRelationMetaData.RELATION_1M, False, False, False)
fMD.addRelationMD(rMD)
print("FLTableDB : Creando automáticamente %r.%r --1M--> %r.%r" % (self._tableName, self._fieldRelation, curName, self._foreignField))
else:
print("FLTableDB : El campo ( %r ) indicado en la propiedad fieldRelation no se encuentra en la tabla ( %r )" % (self._fieldRelation, self._tableName))
self._cursor = FLSqlCursor(self._tableName, True, self._cursor.db().connectionName(), cursorAux, rMD, self);
if not self._cursor:
self._cursor = cursorAux
cursorAux = 0
else:
self._cursor.setContext(cursorAux.context())
if self.showed:
self.disconnect(cursorAux, QtCore.SIGNAL('newBuffer()'), self.refresh())
self.connect(cursorAux,QtCore.SIGNAL('newBuffer()'), self.refresh())
if cursorAux and self.topWidget.isA("FLFormSearchDB"):
self.topWidget.setCaption(self._cursor._model.alias())
self.topWidget.setCursor(self._cursor) #::qt_cast<FLFormSearchDB *>(topWidget)->setCursor(cursor_);
if ownTMD and tMD and not tMD.inCache():
tMD = None
@QtCore.pyqtSlot()
def close(self):
print("FLTableDB: close()")
@QtCore.pyqtSlot()
def refresh(self):
print("FLTableDB: refresh()", self.parent().parent().parent())
self._cursor.refresh()
@QtCore.pyqtSlot()
def show(self):
print("FLTableDB: show event")
super(FLTableDB, self).show()
@QtCore.pyqtSlot()
def insertRecord(self):
self._cursor.insertRecord()
@QtCore.pyqtSlot()
def editRecord(self):
self._cursor.editRecord()
@QtCore.pyqtSlot()
def deleteRecord(self):
self._cursor.deleteRecord()
@QtCore.pyqtSlot()
def browseRecord(self):
self._cursor.browseRecord()
@QtCore.pyqtSlot()
def copyRecord(self):
self._cursor.copyRecord()
|
import simplejson as json
from glob import glob
from bs4util.spantable import TableFrame, frame_keys, describe_frame
from collections import OrderedDict
from itertools import chain
from copy import deepcopy
import logging
log = logging.getLogger()
def show_log():
print("log = %s" % log)
log.info("info! %s" % __name__)
log.debug("debug! %s" % __name__)
def find_files(dirpath,prefix,skip=True):
'''Finds files to test in a given directory.'''
paths = sorted(glob("%s/%s*.html" % (dirpath,prefix)))
if (skip):
todolist = [p for p in paths if 'SKIP' not in p]
skiplist = [p for p in paths if 'SKIP' in p]
return todolist,skiplist
else:
return paths,[]
def read_file(path):
with open(path,"rt") as f:
return "\n".join(f.readlines())
def load_spec(block):
'''Loads an (ordered) spec dict from a raw JSON string.'''
d = json.loads(block)
allkeys = frame_keys()
spec = OrderedDict((k,d[k]) for k in allkeys if k in d)
# We ast the dims member as a tuple, if present, to make it compatible
# to the .dims attribute in STDF objects.
if 'dims' in spec:
spec['dims'] = tuple(spec['dims'])
return spec
def describe_spec(spec):
for key in spec.keys():
if key == 'dims':
yield "spec.dims = %s; keys = %s" % (spec['dims'],list(spec.keys()))
else:
yield "spec.%s = %s" % (key,spec[key])
def extract_test(soup):
table = soup.find_all('table')[0]
block = soup.find_all('pre')[0].string
frame = TableFrame(table)
spec = load_spec(block)
return frame,spec
def test_dims(frame,spec):
log.debug("frame.dims: got = %s" % str(frame.dims))
log.debug("frame.dims: exp = %s" % str(spec['dims']))
assert frame.dims == spec['dims'], "frame.dims - mismatch"
def test_frame_rows(frame,spec):
log.debug("frame.rows: got = %s" % list(frame.rows()))
log.debug("frame.rows: exp = %s" % spec['rows'])
assert list(frame.rows()) == spec['rows'], "frame.rows - mismatch"
# Note that key is expected to be in spec, but spec[key] may be None.
def test_section_rows(frame,spec,key):
section = frame.__getattribute__(key)
log.debug("frame.%s.rows: got = %s" % (key,list(section.rows())))
log.debug("frame.%s.rows: exp = %s" % (key,spec[key]))
assert list(section.rows()) == spec[key], "frame.%s.rows - mismatch" % key
# Note that key is expected to be in spec, but spec[key] may be None.
def test_section(frame,spec,key):
section = frame.__getattribute__(key)
log.debug("frame.%s got = %s" % (key,section))
log.debug("frame.%s exp = %s" % (key,spec[key]))
if section is not None and spec[key] is not None:
test_section_rows(frame,spec,key)
elif section is not None:
assert False, "frame.%s present where null expected" % key
elif spec[key] is not None:
assert False, "frame.%s is null where non-null" % key
else:
# If we get here, both frame and spec members are null.
# This means the spec (correctly) requested that the frame
# not be present. So, implicitly a passing result.
log.debug("frame.%s is null, as expected" % key)
def describe_pair(frame,spec):
return chain (describe_spec(spec),describe_frame(frame))
def test_pair(frame,spec):
for line in describe_pair(frame,spec):
log.debug(line)
for key in spec.keys():
if key == 'dims':
test_dims(frame,spec)
elif key == 'rows':
test_frame_rows(frame,spec)
else:
test_section(frame,spec,key)
|
"""
Views for the Jetpack application
"""
import commonware.log
import os
import shutil
import codecs
import tempfile
import urllib2
import time
import waffle
from contextlib import closing
from simplejson import JSONDecodeError
from statsd import statsd
from zipfile import ZipFile, ZIP_DEFLATED
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.db import transaction
from django.views.static import serve
from django.shortcuts import get_object_or_404
from django.http import (HttpResponseRedirect, HttpResponse,
HttpResponseForbidden, HttpResponseServerError,
Http404, HttpResponseBadRequest) # , QueryDict
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.core.exceptions import ValidationError, NON_FIELD_ERRORS
from django.db import IntegrityError, transaction
from django.db.models import Q, ObjectDoesNotExist
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from django.template.defaultfilters import escape
from django.conf import settings
from django.utils import simplejson
from django.forms.fields import URLField
from tasks import zip_source
from base.shortcuts import get_object_with_related_or_404
from utils import validator
from utils.helpers import pathify, render, render_json
from utils.exceptions import parse_validation_messages
from jetpack.package_helpers import (get_package_revision,
create_package_from_xpi)
from jetpack.models import (Package, PackageRevision, Module, Attachment, SDK,
EmptyDir, EDITABLE_EXTENSIONS)
from jetpack.errors import (FilenameExistException, DependencyException,
IllegalFilenameException, KeyNotAllowed)
from person.models import Profile
log = commonware.log.getLogger('f.jetpack')
def browser(request, page_number=1, type_id=None, username=None):
"""
Display a list of addons or libraries with pages
Filter based on the request (type_id, username).
"""
# calculate which template to use
template_suffix = ''
packages = Package.objects.active()
author = None
if username:
try:
profile = Profile.objects.get_user_by_username_or_nick(username)
except ObjectDoesNotExist:
raise Http404
author = profile.user
packages = packages.filter(author__pk=author.pk)
template_suffix = '%s_user' % template_suffix
if type_id:
other_type = 'l' if type_id == 'a' else 'a'
other_packages_number = len(packages.filter(type=other_type))
packages = packages.filter(type=type_id)
template_suffix = '%s_%s' % (template_suffix,
settings.PACKAGE_PLURAL_NAMES[type_id])
packages = packages.sort_recently_active()
limit = request.GET.get('limit', settings.PACKAGES_PER_PAGE)
try:
pager = Paginator(
packages,
per_page=limit,
orphans=1
).page(page_number)
except (EmptyPage, InvalidPage):
raise Http404
packages = {
"all_public_addons": author.packages_originated.addons().active().count(),
"all_public_libraries": author.packages_originated.libraries().active().count()}
return render(request,
'package_browser%s.html' % template_suffix, {
'pager': pager,
'single': False,
'author': author,
'person': author,
'packages': packages,
'other_packages_number': other_packages_number
})
def view_or_edit(request, pk=None, id_number=None, type_id=None,
revision_number=None, version_name=None, latest=False):
"""
Edit if user is the author, otherwise view
"""
revision = get_package_revision(pk, id_number, type_id,
revision_number, version_name, latest)
edit_available = not waffle.switch_is_active('read_only')
if revision.package.deleted:
edit_available = False
if not request.user.is_authenticated():
raise Http404
try:
Package.objects.active_with_deleted(viewer=request.user).get(
pk=revision.package.pk)
except ObjectDoesNotExist:
raise Http404
if not revision.package.active:
if not request.user.is_authenticated():
raise Http404
try:
Package.objects.active_with_disabled(viewer=request.user).get(
pk=revision.package.pk)
except ObjectDoesNotExist:
raise Http404
if (edit_available
and request.user.is_authenticated()
and request.user.pk == revision.author.pk):
return edit(request, revision)
else:
return view(request, revision)
@login_required
def edit(request, revision):
"""
Edit package - only for the author
"""
if request.user.pk != revision.author.pk:
# redirecting to view mode without displaying an error
messages.info(request,
"Not sufficient priviliges to edit the source. "
"You've been redirected to view mode.")
return HttpResponseRedirect(
reverse(
"jp_%s_revision_details" % revision.package.get_type_name(),
args=[revision.package.id_number, revision.revision_number])
)
#return HttpResponseForbidden('You are not the author of this Package')
libraries = revision.dependencies.all()
library_counter = len(libraries)
sdk_list = None
if revision.package.is_addon():
library_counter += 1
sdk_list = SDK.objects.exclude_disabled(current=revision.sdk.version)
return render(request,
"%s_edit.html" % revision.package.get_type_name(), {
'revision': revision,
'item': revision.package,
'single': True,
'libraries': libraries,
'library_counter': library_counter,
'readonly': False,
'edit_mode': True,
'sdk_list': sdk_list})
def view(request, revision):
"""
Show package - read only
"""
libraries = revision.dependencies.all()
library_counter = len(libraries)
if revision.package.is_addon():
library_counter += 1
# prepare the json for the Tree
tree = simplejson.dumps({'Lib': revision.get_modules_tree(),
'Data': revision.get_attachments_tree(),
'Plugins': revision.get_dependencies_tree()})
return render(request,
"%s_view.html" % revision.package.get_type_name(), {
'revision': revision,
'libraries': libraries,
'library_counter': library_counter,
'readonly': True,
'tree': tree
})
def download_module(request, pk):
"""
return a JSON with all module info
"""
module = get_object_with_related_or_404(Module, pk=pk)
if not module.can_view(request.user):
log_msg = ("[security] Attempt to download private module (%s) by "
"non-owner (%s)" % (pk, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this module.')
return HttpResponse(module.get_json())
def get_module(request, revision_id, filename):
"""
return a JSON with all module info
"""
try:
revision = PackageRevision.objects.get(pk=revision_id)
mod = revision.modules.get(filename=filename)
except PackageRevision.DoesNotExist, Module.DoesNotExist:
log_msg = 'No such module %s' % filename
log.error(log_msg)
raise Http404
if not mod.can_view(request.user):
log_msg = ("[security] Attempt to download private module (%s) by "
"non-owner (%s)" % (mod, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this module.')
return HttpResponse(mod.get_json())
@transaction.commit_on_success
@login_required
def copy(request, revision_id):
"""
Copy package - create a duplicate of the Package, set user as author
"""
source = get_object_with_related_or_404(PackageRevision, pk=revision_id)
log.debug('[copy: %s] Copying started from (%s)' % (revision_id, source))
# save package
try:
package = source.package.copy(request.user)
except IntegrityError, err:
log.critical(("[copy: %s] Package copy failed") % revision_id)
return HttpResponseForbidden('You already have a %s with that name' %
escape(source.package.get_type_name()))
# save revision with all dependencies
source.save_new_revision(package)
copied = source
del source
log.info('[copy: %s] Copied to %s, (%s)' % (revision_id, copied.pk,
copied.full_name))
return render_json(request,
"json/%s_copied.json" % package.get_type_name(),
{'revision': copied})
@login_required
def disable(request, pk):
"""
Disable Package and return confirmation
"""
package = get_object_or_404(Package, pk=pk)
if request.user.pk != package.author.pk:
log_msg = 'User %s wanted to disable not his own Package %s.' % (
request.user, pk)
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
package.get_type_name()))
package.disable()
return render_json(request,
"json/package_disabled.json",
{'package': package})
@login_required
def activate(request, pk):
"""
Undelete Package and return confirmation
"""
package = get_object_or_404(Package, pk=pk)
if request.user.pk != package.author.pk:
log_msg = ("[security] Attempt to activate package (%s) by "
"non-owner (%s)" % (pk, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
package.get_type_name()))
package.enable()
return render_json(request,
"json/package_activated.json",
{'package': package})
@login_required
def delete(request, pk):
"""
Delete Package and return confirmation
"""
package = get_object_or_404(Package, pk=pk)
if request.user.pk != package.author.pk:
log_msg = ("[security] Attempt to delete package (%s) by "
"non-owner (%s)" % (pk, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
package.get_type_name()))
package.delete()
return render_json(request, "json/package_deleted.json")
@require_POST
@login_required
def add_module(request, revision_id):
"""
Add new module to the PackageRevision
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to add a module to package (%s) by "
"non-owner (%s)" % (id_number, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
revision.package.get_type_name()))
filename = request.POST.get('filename')
mod = Module(
filename=filename,
author=request.user,
code="""// %s.js - %s's module
// author: %s""" % (filename, revision.package.full_name,
request.user.get_profile())
)
try:
mod.save()
revision.module_add(mod)
except FilenameExistException, err:
mod.delete()
return HttpResponseForbidden(escape(str(err)))
return render_json(request,
"json/module_added.json",
{'revision': revision, 'module': mod})
@require_POST
@login_required
def rename_module(request, revision_id):
"""
Rename a module in a PackageRevision
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to rename a module to package (%s) by "
"non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
old_name = request.POST.get('old_filename')
new_name = request.POST.get('new_filename')
if old_name == 'main':
return HttpResponseForbidden(
'Sorry, you cannot change the name of the main module.'
)
if not revision.validate_module_filename(new_name):
return HttpResponseForbidden(
('Sorry, there is already a module in your add-on '
'with the name "%s". Each module in your add-on '
'needs to have a unique name.') % new_name
)
modules = revision.modules.all()
module = None
for mod in modules:
if mod.filename == old_name:
module = mod
if not module:
log_msg = 'Attempt to rename a non existing module %s from %s.' % (
old_name, revision_id)
log.warning(log_msg)
return HttpResponseForbidden(
'There is no such module in %s' % escape(
revision.package.full_name))
module.filename = new_name
revision.add_commit_message('module renamed')
revision.update(module)
return render_json(request,
"json/module_renamed.json",
{'revision': revision, 'module': module})
@require_POST
@login_required
def remove_module(request, revision_id):
"""
Remove module from PackageRevision
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to remove a module from package (%s) "
"by non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
filenames = request.POST.get('filename').split(',')
revision.add_commit_message('module removed')
try:
removed_modules, removed_dirs = revision.modules_remove_by_path(
filenames)
except Module.DoesNotExist:
log_msg = 'Attempt to delete a non existing module(s) %s from %s.' % (
str(filenames), revision_id)
log.warning(log_msg)
return HttpResponseForbidden(
'There is no such module in %s' % escape(
revision.package.full_name))
return render_json(request,
"json/module_removed.json",
{'revision': revision,
'removed_modules': simplejson.dumps(removed_modules),
'removed_dirs': simplejson.dumps(removed_dirs)})
@require_POST
@login_required
def add_folder(request, revision_id):
" adds an EmptyDir to a revision "
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to add a folder to revision (%s) by "
"non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
foldername, root = (
request.POST.get('name', ''),
request.POST.get('root_dir'))
dir = EmptyDir(name=foldername, author=request.user, root_dir=root)
try:
dir.save()
revision.folder_add(dir)
except FilenameExistException, err:
dir.delete()
return HttpResponseForbidden(escape(str(err)))
return render_json(request,
"json/folder_added.json",
{'revision': revision, 'folder': dir})
@require_POST
@login_required
def remove_folder(request, revision_id):
" removes an EmptyDir from a revision "
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to remove a folder from revision (%s) "
"by non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
foldername, root = (
pathify(request.POST.get('name', '')),
request.POST.get('root_dir'))
try:
folder = revision.folders.get(name=foldername, root_dir=root)
except EmptyDir.DoesNotExist:
response = None
if root == 'data':
response = revision.attachment_rmdir(foldername)
if not response:
log_msg = 'Attempt to delete a non existing folder %s from %s.' % (
foldername, revision_id)
log.warning(log_msg)
return HttpResponseForbidden(
'There is no such folder in %s' % escape(
revision.package.full_name))
revision, removed_attachments, removed_emptydirs = response
return render_json(request,
'json/%s_rmdir.json' % root, {
'revision': revision, 'path': foldername,
'removed_attachments': simplejson.dumps(removed_attachments),
'removed_dirs': simplejson.dumps(removed_emptydirs),
'foldername': foldername})
else:
revision.folder_remove(folder)
return render_json(request,
"json/folder_removed.json",
{'revision': revision, 'folder': folder})
@require_POST
@login_required
def switch_sdk(request, revision_id):
" switch SDK used to create XPI - sdk_id from POST "
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
return HttpResponseForbidden('You are not the author of this Add-on')
sdk_id = request.POST.get('id', None)
sdk = get_object_or_404(SDK, id=sdk_id)
old_sdk = revision.sdk
log.info('Addon %s (%s) switched from Add-on Kit version %s to %s' % (
revision.package.full_name, revision.package.id_number,
old_sdk.version, sdk.version))
revision.sdk = sdk
revision.add_commit_message('Switched to Add-on Kit %s' % sdk.version)
revision.save()
return render_json(request,
"json/sdk_switched.json",
{'revision': revision, 'sdk': sdk,
'sdk_lib': revision.get_sdk_revision()})
@require_POST
@login_required
def upload_attachment(request, revision_id):
""" Upload new attachment to the PackageRevision
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
log.debug(revision)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to upload attachment to package (%s) "
"by non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
revision.package.get_type_name()))
f = request.FILES.get('upload_attachment')
filename = request.META.get('HTTP_X_FILE_NAME')
if not f:
log_msg = 'Path not found: %s, revision: %s.' % (
filename, revision_id)
log.error(log_msg)
return HttpResponseServerError('Path not found.')
content = f.read()
# try to force UTF-8 code, on error continue with original data
try:
content = unicode(content, 'utf-8')
except:
pass
try:
attachment = revision.attachment_create_by_filename(
request.user, filename, content)
except ValidationError, e:
return HttpResponseForbidden(
'Validation errors.\n%s' % parse_validation_messages(e))
except Exception, e:
return HttpResponseForbidden(str(e))
return render_json(request,
"json/attachment_added.json",
{'revision': revision, 'attachment': attachment})
@require_POST
@login_required
def upload_attachments(request, id_number, type_id,
revision_number=None, version_name=None):
""" Upload new attachments to the PackageRevision
"""
revision = get_package_revision(None, id_number, type_id, revision_number,
version_name)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to upload attachment to package (%s) "
"by non-owner (%s)" % (id_number, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
revision.package.get_type_name()))
content = request.raw_post_data
filename = request.META.get('HTTP_X_FILE_NAME')
if not filename:
log_msg = 'Path not found: %s, package: %s.' % (
filename, id_number)
log.error(log_msg)
return HttpResponseServerError('Path not found.')
try:
attachment = revision.attachment_create_by_filename(
request.user, filename, content)
except ValidationError, e:
return HttpResponseForbidden(
'Validation errors.\n%s' % parse_validation_messages(e))
except Exception, e:
return HttpResponseForbidden(str(e))
return render_json(request,
"json/attachment_added.json",
{'revision': revision, 'attachment': attachment})
@require_POST
@login_required
def add_empty_attachment(request, id_number, type_id,
revision_number=None, version_name=None):
""" Add new empty attachment to the PackageRevision
"""
revision = get_package_revision(None, id_number, type_id, revision_number,
version_name)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to add attachment to package (%s) by "
"non-owner (%s)" % (id_number, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
revision.package.get_type_name()))
filename = request.POST.get('filename', False)
if not filename:
log_msg = 'Path not found: %s, package: %s.' % (
filename, id_number)
log.error(log_msg)
return HttpResponseServerError('Path not found.')
try:
attachment = revision.attachment_create_by_filename(request.user,
filename, '')
except ValidationError, e:
return HttpResponseForbidden(
'Validation errors.\n%s' % parse_validation_messages(e))
except Exception, e:
return HttpResponseForbidden(str(e))
return render_json(request,
"json/attachment_added.json",
{'revision': revision, 'attachment': attachment})
@require_POST
@login_required
def revision_add_attachment(request, pk):
"""Add attachment, download if necessary
"""
revision = get_object_or_404(PackageRevision, pk=pk)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to add attachment to package (%s) by "
"non-owner (%s)" % (revision.package, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
revision.package.get_type_name()))
url = request.POST.get('url', None)
filename = request.POST.get('filename', None)
if not filename or filename == "":
log.error('Trying to create an attachment without name')
return HttpResponseBadRequest('Path not found.')
content = ''
if url:
log.info(('[%s] Preparing to download %s as an attachment of '
'PackageRevision %d') % (filename, url, revision.pk))
# validate url
field = URLField(verify_exists=True)
encoding = request.POST.get('force_contenttype', False)
try:
url = field.clean(url)
except ValidationError, err:
log.warning('[%s] Invalid url provided\n%s' % (url,
'\n'.join(err.messages)))
raise Http404()
except Exception, err:
log.warning('[%s] Exception raised\n%s' % (url, str(err)))
raise Http404()
try:
att = urllib2.urlopen(url, timeout=settings.URLOPEN_TIMEOUT)
except Exception, err:
log.warning('[%s] Exception raised by opening url\n%s' % (url, str(err)))
raise Http404()
# validate filesize
att_info = att.info()
if 'content-length' in att_info.dict:
att_size = int(att_info.dict['content-length'])
if att_size > settings.ATTACHMENT_MAX_FILESIZE:
log.warning('[%s] File is too big (%db)' % (url, att_size))
return HttpResponseBadRequest("Loading attachment failed\n"
"File is too big")
# download attachment's content
log.debug('[%s] Downloading' % url)
try:
content = att.read(settings.ATTACHMENT_MAX_FILESIZE + 1)
except Exception, err:
log.warning('[%s] Exception raised by reading url\n%s' % (url, str(err)))
raise Http404()
# work out the contenttype
basename, ext = os.path.splitext(filename)
unicode_contenttypes = ('utf-8',)
ext = ext.split('.')[1].lower() if ext else None
if not encoding:
encoding = att.headers['content-type'].split('charset=')[-1]
if encoding not in unicode_contenttypes and ext in EDITABLE_EXTENSIONS:
log.info('[%s] Forcing the "utf-8" encoding from '
'"%s"' % (url, encoding))
encoding = 'utf-8'
# convert to unicode if needed
if encoding in unicode_contenttypes:
content = unicode(content, encoding)
if len(content) >= settings.ATTACHMENT_MAX_FILESIZE + 1:
log.warning('[%s] Downloaded file is too big' % url)
return HttpResponseBadRequest("Loading attachment failed\n"
"File is too big")
log.info('[%s] Downloaded %db, encoding: %s' % (url, len(content),
encoding))
att.close()
try:
attachment = revision.attachment_create_by_filename(
request.user, filename, content)
except ValidationError, err:
log.warning("[%s] Validation error.\n%s" % (filename, str(err)))
return HttpResponseForbidden(
'Validation error.\n%s' % parse_validation_messages(err))
except Exception, err:
log.warning("[%s] Exception raised\n%s" % (filename, str(err)))
return HttpResponseForbidden(str(err))
return render_json(request,
"json/attachment_added.json",
{'revision': revision, 'attachment': attachment})
@require_POST
@login_required
@transaction.commit_on_success
def rename_attachment(request, revision_id):
"""
Rename an attachment in a PackageRevision
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to rename attachment in revision (%s) "
"by non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
uid = request.POST.get('uid', '').strip()
try:
attachment = revision.attachments.get(pk=uid)
except:
log_msg = ('Attempt to rename a non existing attachment. attachment: '
'%s, revision: %s.' % (uid, revision))
log.warning(log_msg)
return HttpResponseForbidden(
'There is no such attachment in %s' % escape(
revision.package.full_name))
new_name = request.POST.get('new_filename')
new_ext = request.POST.get('new_ext') or attachment.ext
if not revision.validate_attachment_filename(new_name, new_ext):
return HttpResponseForbidden(
('Sorry, there is already an attachment in your add-on '
'with the name "%s.%s". Each attachment in your add-on '
'needs to have a unique name.') % (new_name, attachment.ext)
)
attachment.filename = new_name
attachment.ext = new_ext
try:
attachment = revision.update(attachment)
except ValidationError, err:
return HttpResponseForbidden(str(err))
return render_json(request,
"json/attachment_renamed.json",
{'revision': revision, 'attachment': attachment})
@require_POST
@login_required
def rmdir(request, pk, target, path):
"""
Remove attachment from PackageRevision
"""
revision = get_object_or_404(PackageRevision, pk=pk)
if target not in ['data', 'lib']:
return HttpResponseForbidden
if target == 'lib':
return HttpResponseForbidden('not supported yet')
revision.attachment_rmdir(path) if target == 'data' else \
revision.modules_rmdir(path)
return render_json(request,
'%s_rmdir.json' % target, {'revision': revision, 'path': path})
@require_POST
@login_required
def remove_attachment(request, revision_id):
"""
Remove attachment from PackageRevision
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ('[security] Attempt to remove attachment from revision '
'(%s) by non-owner (%s)' % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
uid = request.POST.get('uid', '').strip()
attachment = get_object_with_related_or_404(Attachment,
pk=uid, revisions=revision)
if not attachment:
log_msg = ('Attempt to remove a non existing attachment. attachment: '
'%s, revision: %s.' % (uid, revision_id))
log.warning(log_msg)
return HttpResponseForbidden(
'There is no such attachment in %s' % escape(
revision.package.full_name))
revision.attachment_remove(attachment)
return render_json(request,
"json/attachment_removed.json",
{'revision': revision, 'attachment': attachment})
def download_attachment(request, uid):
"""
Display attachment from PackageRevision
"""
attachment = get_object_with_related_or_404(Attachment, id=uid)
if not attachment.can_view(request.user):
log_msg = ("[security] Attempt to download private attachment (%s) by "
"non-owner (%s)" % (uid, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this attachment.')
response = serve(request, attachment.path,
settings.UPLOAD_DIR, show_indexes=False)
response['Content-Disposition'] = 'filename=%s.%s' % (
attachment.filename, attachment.ext)
return response
@require_POST
@login_required
def save(request, revision_id, type_id=None):
"""
Save package and modules
@TODO: check how dynamic module loading affects save
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to save package (%s) by "
"non-owner (%s)" % (revision.pk,
request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
save_revision = False
save_package = False
start_version_name = revision.version_name
start_revision_message = revision.message
response_data = {}
package_full_name = request.POST.get('full_name', False)
jid = request.POST.get('jid', None)
version_name = request.POST.get('version_name', False)
if jid and not validator.is_valid(
'alphanum_plus', jid):
return HttpResponseForbidden(escape(
validator.get_validation_message('alphanum_plus')))
# validate package_full_name and version_name
if version_name and not validator.is_valid(
'alphanum_plus', version_name):
return HttpResponseForbidden(escape(
validator.get_validation_message('alphanum_plus')))
# here we're checking if the *current* full_name is different than the
# revision's full_name
if package_full_name and package_full_name != revision.package.full_name:
try:
revision.set_full_name(package_full_name)
except ValidationError:
return HttpResponseForbidden(escape(
validator.get_validation_message('alphanum_plus_space')))
except IntegrityError:
return HttpResponseForbidden(
'You already have a %s with that name' % escape(
revision.package.get_type_name())
)
else:
save_package = True
save_revision = True
response_data['full_name'] = package_full_name
package_description = request.POST.get('package_description', False)
if package_description:
save_package = True
revision.package.description = package_description
response_data['package_description'] = package_description
extra_json = request.POST.get('package_extra_json')
if extra_json is not None:
# None means it wasn't submitted. We want to accept blank strings.
save_revision = True
try:
revision.set_extra_json(extra_json, save=False)
except JSONDecodeError:
return HttpResponseBadRequest(
'Extra package properties were invalid JSON.')
except IllegalFilenameException, e:
return HttpResponseBadRequest(str(e))
except KeyNotAllowed, e:
return HttpResponseForbidden(str(e))
response_data['package_extra_json'] = extra_json
changes = []
for mod in revision.modules.all():
if request.POST.get(mod.filename, False):
code = request.POST[mod.filename]
if mod.code != code:
mod.code = code
changes.append(mod)
for att in revision.attachments.all():
uid = str(att.pk)
if request.POST.get(uid):
att.data = request.POST[uid]
if att.changed():
changes.append(att)
attachments_changed = {}
if save_revision or changes:
try:
revision.save()
except ValidationError, err:
return HttpResponseForbidden(
'Validation error.\n%s' % parse_validation_messages(err))
if changes:
attachments_changed = simplejson.dumps(
revision.updates(changes, save=False))
revision_message = request.POST.get('revision_message', False)
if revision_message and revision_message != start_revision_message:
revision.message = revision_message
# save revision message without changeing the revision
super(PackageRevision, revision).save()
response_data['revision_message'] = revision_message
if version_name and version_name != start_version_name \
and version_name != revision.package.version_name:
save_package = False
try:
revision.set_version(version_name)
except Exception, err:
return HttpResponseForbidden(escape(err.__str__()))
if jid:
try:
Package.objects.get(jid=jid)
except Package.DoesNotExist:
pass
else:
return HttpResponseForbidden(('Package with JID "%s" already '
'exists in the Builder') % jid)
revision.package.jid = jid
response_data['jid'] = jid
save_package = True
if save_package:
revision.package.save()
response_data['name'] = revision.package.name
response_data['version_name'] = revision.get_version_name_only()
if save_revision or changes:
revision.update_commit_message(True)
return render_json(request, "json/package_saved.json", locals())
@login_required
@transaction.commit_on_success
def create(request, type_id):
"""
Create new Package (Add-on or Library)
Usually no full_name used
"""
full_name = request.POST.get("full_name", None)
description = request.POST.get("description", "")
item = Package(
author=request.user,
full_name=full_name,
description=description,
type=type_id)
item.save()
return HttpResponseRedirect(reverse('jp_latest', args=[item.pk]))
@require_POST
@login_required
def upload_xpi(request):
"""
upload XPI and create Addon and eventual Libraries
"""
try:
xpi = request.FILES['xpi']
except KeyError:
log.warning('No file "xpi" posted')
return HttpResponseForbidden('No xpi supplied.')
temp_dir = tempfile.mkdtemp()
path = os.path.join(temp_dir, xpi.name)
xpi_file = codecs.open(path, mode='wb+')
for chunk in xpi.chunks():
xpi_file.write(chunk)
xpi_file.close()
try:
addon = create_package_from_xpi(path, request.user)
except Exception, err:
log.warning("Bad file %s" % str(err))
return HttpResponseForbidden('Wrong file')
os.remove(path)
shutil.rmtree(temp_dir)
return HttpResponseRedirect(addon.get_absolute_url())
# after front-end will support interactive upload
return HttpResponse(simplejson.dumps({'reload': addon.get_absolute_url()}))
@login_required
def library_autocomplete(request):
"""
'Live' search by name
"""
from search.helpers import package_query
from elasticutils import F
q = request.GET.get('q')
limit = request.GET.get('limit')
try:
limit = int(limit)
except:
limit = settings.LIBRARY_AUTOCOMPLETE_LIMIT
ids = (settings.MINIMUM_PACKAGE_ID, settings.MINIMUM_PACKAGE_ID - 1)
notAddonKit = ~(F(id_number=ids[0]) | F(id_number=ids[1]))
onlyMyPrivateLibs = (F(active=True) | F(author=request.user.id))
try:
qs = (Package.search().query(or_=package_query(q)).filter(type='l')
.filter(notAddonKit).filter(onlyMyPrivateLibs))
found = qs[:limit]
except Exception, ex:
log.exception('Library autocomplete error')
found = []
return render_json(request,
'json/library_autocomplete.json', {'libraries': found})
@require_POST
@login_required
def assign_library(request, revision_id):
" assign library to the package "
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to assign library to revision (%s) by "
"non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden('You are not the author of this Package')
# TODO: make linking work with library_id instead of id number
library = get_object_or_404(
Package, type='l', id_number=request.POST['id_number'])
if request.POST.get('use_latest_version', False):
lib_revision = library.version
else:
lib_revision = library.latest
try:
revision.dependency_add(lib_revision)
except Exception, err:
return HttpResponseForbidden(str(err))
lib_revision_url = lib_revision.get_absolute_url() \
if request.user.pk == lib_revision.pk \
else lib_revision.get_absolute_url()
return render_json(request,
'json/library_assigned.json', {
'revision': revision,
'library': library,
'lib_revision': lib_revision,
'lib_revision_url': lib_revision_url})
@require_POST
@login_required
def remove_library(request, revision_id):
" remove dependency from the library provided via POST "
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to remove library from revision (%s) by "
"non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
revision.package.get_type_name()))
# TODO: make unlinking work with library_id instead of id number
lib_id_number = request.POST.get('id_number')
library = get_object_or_404(Package, id_number=lib_id_number)
try:
revision.dependency_remove_by_id_number(lib_id_number)
except Exception, err:
return HttpResponseForbidden(escape(err.__str__()))
return render_json(request,
'json/dependency_removed.json',
{'revision': revision, 'library': library})
@require_POST
@login_required
def update_library(request, revision_id):
" update a dependency to a certain version "
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if request.user.pk != revision.author.pk:
log_msg = ("[security] Attempt to update library in revision (%s) by "
"non-owner (%s)" % (revision_id, request.user))
log.warning(log_msg)
return HttpResponseForbidden(
'You are not the author of this %s' % escape(
revision.package.get_type_name()))
# TODO: make updating work with library_id instead of id number
lib_id_number = request.POST.get('id_number')
lib_revision = request.POST.get('revision')
library = get_object_or_404(PackageRevision, pk=lib_revision,
package__id_number=lib_id_number)
try:
revision.dependency_update(library)
except DependencyException, err:
return HttpResponseForbidden(escape(err.__str__()))
return render_json(request,
'json/library_updated.json', {
'revision': revision,
'library': library.package,
'lib_revision': library})
@login_required
def latest_dependencies(request, revision_id):
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
out_of_date = revision.get_outdated_dependency_versions()
return render_json(request,
'json/latest_dependencies.json', {'revisions': out_of_date})
@never_cache
def get_revisions_list_html(request, revision_id):
" returns revision list to be displayed in the modal window "
current = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if not current.package.can_view(request.user):
raise Http404
revisions = current.package.revisions.all()
revision_number = int(current.revision_number)
return render(request,
'_package_revisions_list.html', {
'package': current.package,
'revisions': revisions,
'revision_number': revision_number,
'current': current})
@never_cache
def get_latest_revision_number(request, package_id):
""" returns the latest revision number for given package """
package = get_object_or_404(Package, id_number=package_id)
if not package.can_view(request.user):
raise Http404
return HttpResponse(simplejson.dumps({
'revision_number': package.latest.revision_number}))
@never_cache
def get_revision_modules_list(request, pk):
"""returns JSON object with all modules which will be exported to XPI
"""
revision = get_object_or_404(PackageRevision, pk=pk)
return HttpResponse(simplejson.dumps(revision.get_module_names()),
mimetype="application/json")
@never_cache
def get_revision_conflicting_modules_list(request, pk):
"""returns JSON object with all modules which will be exported to XPI
"""
revision = get_object_or_404(PackageRevision, pk=pk)
return HttpResponse(simplejson.dumps(
revision.get_conflicting_module_names()), mimetype="application/json")
def _get_zip_cache_key(request, hashtag):
session = request.session.session_key
return 'zip:timing:queued:%s:%s' % (hashtag, session)
@csrf_exempt
@require_POST
def prepare_zip(request, revision_id):
"""
Prepare download zip This package is built asynchronously and we assume
it works. It will be downloaded in %``get_zip``
"""
revision = get_object_with_related_or_404(PackageRevision, pk=revision_id)
if (not revision.package.active and request.user != revision.package.author):
# pretend package doesn't exist as it's private
raise Http404()
hashtag = request.POST.get('hashtag')
if not hashtag:
return HttpResponseForbidden('Add-on Builder has been updated!'
'We have updated this part of the application. Please '
'empty your cache and reload to get changes.')
if not validator.is_valid('alphanum', hashtag):
log.warning('[security] Wrong hashtag provided')
return HttpResponseBadRequest("{'error': 'Wrong hashtag'}")
log.info('[zip:%s] Addon added to queue' % hashtag)
# caching
tqueued = time.time()
tkey = _get_zip_cache_key(request, hashtag)
cache.set(tkey, tqueued, 120)
# create zip file
zip_source(pk=revision.pk, hashtag=hashtag, tqueued=tqueued)
return HttpResponse('{"delayed": true}')
@never_cache
def get_zip(request, hashtag, filename):
"""
Download zip (it has to be ready)
"""
if not validator.is_valid('alphanum', hashtag):
log.warning('[security] Wrong hashtag provided')
return HttpResponseForbidden("{'error': 'Wrong hashtag'}")
path = os.path.join(settings.XPI_TARGETDIR, '%s.zip' % hashtag)
log.info('[zip:%s] Downloading Addon from %s' % (filename, path))
tend = time.time()
tkey = _get_zip_cache_key(request, hashtag)
tqueued = cache.get(tkey)
if tqueued:
ttotal = (tend - tqueued) * 1000
statsd.timing('zip.total', ttotal)
total = '%dms' % ttotal
else:
total = 'n/a'
log.info('[zip:%s] Downloading Add-on (%s)' % (hashtag, total))
response = serve(request, path, '/', show_indexes=False)
response['Content-Disposition'] = ('attachment; '
'filename="%s.zip"' % filename)
return response
@never_cache
def check_zip(r, hashtag):
"""Check if zip file is prepared."""
if not validator.is_valid('alphanum', hashtag):
log.warning('[security] Wrong hashtag provided')
return HttpResponseForbidden("{'error': 'Wrong hashtag'}")
path = os.path.join(settings.XPI_TARGETDIR, '%s.zip' % hashtag)
# Check file if it exists
if os.path.isfile(path):
return HttpResponse('{"ready": true}')
return HttpResponse('{"ready": false}')
@never_cache
def all_zip(request, pk):
"""Zip all and return a file."""
if not pk:
log.critical("[zip] No package_id provided")
return
package = Package.objects.get(pk=pk)
zips = []
# Zip all revisions of the package
for revision in package.revisions.all():
zips.append(revision.zip_source(hashtag=revision.get_cache_hashtag()))
# Zip all zipped revisions into one file
zip_targetname = "package-%d.zip" % package.pk
zip_targetpath = os.path.join(settings.XPI_TARGETDIR, zip_targetname)
with closing(ZipFile(zip_targetpath, 'w', ZIP_DEFLATED)) as z:
for fn in zips:
z.write(fn, os.path.basename(fn))
log.info('[zipall:%s] Downloading All zipped' % pk)
response = serve(request, zip_targetpath, '/', show_indexes=False)
response['Content-Disposition'] = ('attachment; filename="%s"' % zip_targetname)
return response
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_ARB_shading_language_include'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_ARB_shading_language_include',error_checker=_errors._error_checker)
GL_NAMED_STRING_LENGTH_ARB=_C('GL_NAMED_STRING_LENGTH_ARB',0x8DE9)
GL_NAMED_STRING_TYPE_ARB=_C('GL_NAMED_STRING_TYPE_ARB',0x8DEA)
GL_SHADER_INCLUDE_ARB=_C('GL_SHADER_INCLUDE_ARB',0x8DAE)
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,ctypes.POINTER( ctypes.POINTER( _cs.GLchar )),arrays.GLintArray)
def glCompileShaderIncludeARB(shader,count,path,length):pass
@_f
@_p.types(None,_cs.GLint,arrays.GLcharArray)
def glDeleteNamedStringARB(namelen,name):pass
@_f
@_p.types(None,_cs.GLint,arrays.GLcharArray,_cs.GLsizei,arrays.GLintArray,arrays.GLcharArray)
def glGetNamedStringARB(namelen,name,bufSize,stringlen,string):pass
@_f
@_p.types(None,_cs.GLint,arrays.GLcharArray,_cs.GLenum,arrays.GLintArray)
def glGetNamedStringivARB(namelen,name,pname,params):pass
@_f
@_p.types(_cs.GLboolean,_cs.GLint,arrays.GLcharArray)
def glIsNamedStringARB(namelen,name):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLint,arrays.GLcharArray,_cs.GLint,arrays.GLcharArray)
def glNamedStringARB(type,namelen,name,stringlen,string):pass
|
from threading import Thread
import time
from hallo.events import EventMessage
def test_threads_simple(hallo_getter):
test_hallo = hallo_getter({"hallo_control"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "active threads")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "i have" in data[0].text.lower()
assert "active threads" in data[0].text.lower()
def test_threads_increase(hallo_getter):
test_hallo = hallo_getter({"hallo_control"})
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "active threads")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
first_threads = int(
data[0].text.lower().split("active")[0].split("have")[1].strip()
)
# Launch 10 threads
for _ in range(10):
Thread(target=time.sleep, args=(10,)).start()
# Run function again
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "active threads")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
second_threads = int(
data[0].text.lower().split("active")[0].split("have")[1].strip()
)
assert second_threads > first_threads, "Thread count should have increased"
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# ----------------------------------------------------------------------------
# If you submit this package back to Spack as a pull request,
# please first remove this boilerplate and all FIXME comments.
#
# This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled
# them, you can save this file and test your package like this:
#
# spack install angsd
#
# You can edit this file again by typing:
#
# spack edit angsd
#
# See the Spack documentation for more information on packaging.
# ----------------------------------------------------------------------------
from spack import *
class AngsdLubio(Package):
"""Angsd is a program for analysing NGS data. The software can handle a
number of different input types from mapped reads to imputed genotype
probabilities. Most methods take genotype uncertainty into account
instead of basing the analysis on called genotypes. This is especially
useful for low and medium depth data."""
homepage = "https://github.com/ANGSD/angsd"
git = "https://github.com/ANGSD/angsd.git"
version('master', branch='master')
version('2019-11-05', commit='d22308aa5a3f3b5a1e1759c5770b5a0c7c95a226')
version('0.921', sha256='8892d279ce1804f9e17fe2fc65a47e5498e78fc1c1cb84d2ca2527fd5c198772')
version('0.919', sha256='c2ea718ca5a5427109f4c3415e963dcb4da9afa1b856034e25c59c003d21822a')
depends_on('htslib')
conflicts('^htslib@1.6:', when='@0.919')
def setup_run_environment(self, env):
env.set('R_LIBS', self.prefix.R)
def edit(self, spec, prefix):
makefile = FileFilter('Makefile')
makefile.filter('HTS_INCDIR=$(realpath $(HTSSRC))', 'HTS_INCDIR=$(realpath $(HTSSRC))/include')
makefile.filter('HTS_LIBDIR=$(realpath $(HTSSRC))', 'HTS_LIBDIR=$(realpath $(HTSSRC))/lib')
def install(self, spec, prefix):
make()
mkdirp(prefix.bin)
install('angsd', join_path(prefix.bin))
install_tree('R', prefix.R)
install_tree('RES', prefix.RES)
install_tree('scripts', prefix.scripts)
|
import xml.etree.ElementTree as ET
from ltr.judgments import _judgments_by_qid
def fold_whoopsies(whoopsies1, whoopsies2):
""" Merge whoopsies2 into whoopsies1
sorted on query, then descending on magnitude
of the whoops (so biggest whoops for queries come first)"""
whoopsies1.extend(whoopsies2)
whoopsies1.sort(key=lambda x: (x.qid, 1000-x.magnitude()))
return whoopsies1
def dedup_whoopsies(sortedWhoopsies):
""" Take whoopsies sorted first by qid, then
magnitude, then return the worst whoopsie
by query """
mergedWhoopsies = iter(sortedWhoopsies)
whoopsies = []
whoopsie = None
lastQid = -1
try:
while True:
# Read ahead to next query
while whoopsie is None or lastQid == whoopsie.qid:
whoopsie = next(mergedWhoopsies)
whoopsies.append(whoopsie)
lastQid = whoopsie.qid
except StopIteration:
pass
return whoopsies
class MARTModel:
def __init__(self, ranklib_xml, features):
""" Create a MART model from a ranklib_ensemble
(string w/ ranklib xml model output)
using `features` - array of named features
where the 0th item is ranklib feature 1
[{'name': 'release_date'}, ...] """
# Clean up header
valid = False
linesSplit = ranklib_xml.split('\n')
if linesSplit[0] == "## LambdaMART":
print("Whoopsies on LAMBDAMart")
valid = True
if linesSplit[0] == "## Random Forests":
if linesSplit[1] == "## No. of bags = 1":
print("RF with 1 bag")
valid = True
if (not valid):
raise ValueError("Whoopsies only support LambdaMART of Random Forest of bags=1")
headerAt = 0
for line in linesSplit:
if len(line) > 0 and line[0] == '#':
headerAt += 1
else:
break;
print("Header At %s" % headerAt)
validXml = '\n'.join(ranklib_xml.split('\n')[headerAt:])
lambdaModel = ET.fromstring(validXml)
# List of tuples (weight, root split)
self.trees = []
for node in lambdaModel:
self.trees.append((float(node.attrib['weight']),
Split(node[0], features)) )
def __str__(self):
rVal = ""
for tree in self.trees:
weight = tree[0]
tree = tree[1]
rVal += tree.treeString(weight=weight)
rVal += "\n\n"
return rVal
def whoopsies(self):
""" After eval, what are the most glaring
query-doc inconsistencies in the provided judgments
over the whole ensemble """
whoopsQueries = {}
perTreeWhoops = [None for _ in self.trees]
for treeNo, tree in enumerate(self.trees):
treeWhoopsies = tree[1].whoopsies()
for whoops in dedup_whoopsies(treeWhoopsies):
if whoops.qid not in whoopsQueries:
whoopsQueries[whoops.qid] = QueryWhoopsie(qid=whoops.qid,
totalMagnitude=0,
minGrade=0,
count=0,
maxGrade=0,
perTreeWhoops=perTreeWhoops)
whoopsQueries[whoops.qid].count += 1
whoopsQueries[whoops.qid].totalMagnitude += whoops.magnitude()
whoopsQueries[whoops.qid].minGrade = whoops.minGrade
whoopsQueries[whoops.qid].maxGrade = whoops.maxGrade
whoopsQueries[whoops.qid].perTreeWhoops[treeNo] = whoops
return whoopsQueries
def eval(self, judgments):
for tree in self.trees:
# weight = tree[0]
tree = tree[1]
tree.eval(judgments)
class QueryWhoopsie:
def __init__(self, qid, totalMagnitude,
count, maxGrade, minGrade,
perTreeWhoops):
self.qid = qid
self.count = count
self.totalMagnitude = totalMagnitude
self.maxGrade=maxGrade
self.minGrade=minGrade
self.perTreeWhoops = perTreeWhoops
def perTreeReport(self):
treeSummary = []
for treeNo, whoops in enumerate(self.perTreeWhoops):
if whoops is None:
treeSummary.append("<None>")
else:
treeSummary.append("tree:%s=>%s(%s)-%s(%s)" % (treeNo, whoops.minGrade, whoops.minGradeDocId, whoops.maxGrade, whoops.maxGradeDocId))
return ";".join(treeSummary)
class Whoopsie:
def __init__(self, qid, judgList,
minGrade, maxGrade,
minGradeDocId, maxGradeDocId,
output):
self.qid = qid; self.judgList = judgList
self.minGrade = minGrade; self.maxGrade = maxGrade
self.minGradeDocId = minGradeDocId; self.maxGradeDocId = maxGradeDocId
self.output = output
def magnitude(self):
return self.maxGrade - self.minGrade
class EvalReport:
def __init__(self, split):
if split.output is None:
raise ValueError("Split not a leaf")
self.split = split
self.count = len(split.evals)
self.whoopsies = []
self.computeWhoopsies()
def computeWhoopsies(self):
judgmentsByQid = _judgments_by_qid(self.split.evals)
report = []
for qid, judgList in judgmentsByQid.items():
if len(judgList) > 1:
minGradeDocId = judgList[0].docId
maxGradeDocId = judgList[0].docId
minGrade = maxGrade = judgList[0].grade
for judg in judgList:
if judg.grade < minGrade:
minGrade = judg.grade
minGradeDocId = judg.docId
if judg.grade > maxGrade:
maxGrade = judg.grade
maxGradeDocId = judg.docId
if minGrade != maxGrade:
report.append(Whoopsie(qid=qid, judgList=judgList,
minGrade=minGrade, maxGrade=maxGrade,
minGradeDocId=minGradeDocId, maxGradeDocId=maxGradeDocId,
output=self.split.output))
report.sort(key=lambda x: x.maxGrade - x.minGrade, reverse=True)
self.whoopsies = report
def __str__(self):
reportStr = ";".join(["qid:%s:%s(%s)-%s(%s)" % (report.qid, report.minGrade,
report.minGradeDocId,
report.maxGrade,
report.maxGradeDocId)
for report in self.whoopsies])
return "%s/%s/%s" % (self.count, len(self.whoopsies), reportStr)
def __repr__(self):
return str(self)
class Split:
def __init__(self, splitEl, features):
self.feature = None # Name of the feature
self.featureOrd = None # ONE BASED, feature ord in the ranklib model
self.featureIdx = None # Zero BASED - use for lookups
self.threshold = None
self.value = None
self.left = None
self.right = None
self.output = None
self.evalReport = None
self.evals = []
for el in splitEl:
if (el.tag == 'feature'):
self.featureOrd = int(el.text.strip())
self.featureIdx = self.featureOrd - 1
self.feature = features[self.featureIdx]['name']
elif (el.tag == 'threshold'):
self.threshold = float(el.text.strip())
elif (el.tag == 'split' and 'pos' in el.attrib):
if el.attrib['pos'] == 'right':
self.right = Split(splitEl=el, features=features)
elif el.attrib['pos'] == 'left':
self.left = Split(splitEl=el, features=features)
else:
raise ValueError("Unrecognized Split Pos {}".format(el.attrib['pos']))
elif (el.tag == 'output'):
self.output = float(el.text.strip())
def clearEvals(self):
""" Clear the eval state """
if self.output:
self.evals = []
self.evalReport = None
elif self.right:
self.right.clearEvals()
elif self.left:
self.left.clearEvals()
def _evalAppend(self, judgment):
""" For model/feature analysis purposes, evaluate the model with
the 'judgment' placing at each leaf the obj
so we can analyze how well the model is classifying items
Args:
- judgment: some Python object with a features attribute
which is a list of floating point numbers where
0th corresponds to Ranklib's '1'th
"""
if self.output:
self.evals.append(judgment)
return
ftrToEval = self.featureIdx
featureVal = judgment.features[ftrToEval]
if featureVal > self.threshold:
assert self.right is not None
self.right._evalAppend(judgment)
else:
assert self.left is not None
self.left._evalAppend(judgment)
def _computeEvalStats(self):
if self.output:
self.evalReport = EvalReport(self)
return
else:
assert self.right is not None
assert self.left is not None
self.right._computeEvalStats()
self.left._computeEvalStats()
def eval(self, judgments):
self.clearEvals()
for judgment in judgments:
self._evalAppend(judgment)
self._computeEvalStats()
def whoopsies(self):
""" Return all the whoopsies from the child nodes in
a list of whoopsies ordered first by qid, then by
magnitude descending. IE (1,4),(1,3),(2,2),(2,0)..."""
if self.output:
if self.evalReport is None:
return []
return self.evalReport.whoopsies
else:
assert self.right is not None
assert self.left is not None
rWhoopsies = self.right.whoopsies()
lWhoopsies = self.left.whoopsies()
return fold_whoopsies(lWhoopsies, rWhoopsies)
def treeString(self, weight=1.0, nestLevel=0):
def idt(nestLevel):
#return ("%s" % nestLevel) * 2 * nestLevel
return (" ") * 2 * nestLevel
rVal = ""
if self.feature:
rVal += idt(nestLevel)
rVal += "if %s > %s:\n" % (self.feature, self.threshold)
assert self.right is not None
assert self.left is not None
if self.right:
rVal += self.right.treeString(weight=weight,
nestLevel=nestLevel+1)
if self.left:
rVal += idt(nestLevel)
rVal += "else:\n"
rVal += self.left.treeString(weight=weight,
nestLevel=nestLevel+1)
if self.output:
rVal += idt(nestLevel)
rVal += "<= %.4f" % (self.output * weight)
if self.evalReport:
rVal += "(%s)" % self.evalReport
rVal += "\n"
return rVal
def dump_model(modelName, features):
""" Print a model in pythoneque code
Args:
- modelName: The name of the model, will be read from data/modelName_name.txt
- features: List of features, 0th item corresponding to Ranklib feature 1
each feature is an object with a 'name' parameter
"""
with open('data/{}_model.txt'.format(modelName)) as f:
ensembleXml = f.read()
model = MARTModel(ensembleXml, features)
for tree in model.trees:
weight = tree[0]
tree = tree[1]
print(tree.treeString(weight=weight))
def eval_model(modelName, features, judgments):
""" Evaluate a model relative to a list of judgments,
return a model """
judgmentList = [judgment for judgment in judgments]
with open('data/{}_model.txt'.format(modelName)) as f:
ensembleXml = f.read()
model = MARTModel(ensembleXml, features)
model.eval(judgmentList)
return model
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-ServiceTriggerPerfEventProvider
GUID : 6545939f-3398-411a-88b7-6a8914b8cec7
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("6545939f-3398-411a-88b7-6a8914b8cec7"), event_id=1, version=0)
class Microsoft_Windows_ServiceTriggerPerfEventProvider_1_0(Etw):
pattern = Struct(
"TriggerSubType" / WString,
"TriggerData" / WString
)
@declare(guid=guid("6545939f-3398-411a-88b7-6a8914b8cec7"), event_id=2, version=0)
class Microsoft_Windows_ServiceTriggerPerfEventProvider_2_0(Etw):
pattern = Struct(
"TriggerSubType" / WString,
"TriggerData" / WString
)
@declare(guid=guid("6545939f-3398-411a-88b7-6a8914b8cec7"), event_id=3, version=0)
class Microsoft_Windows_ServiceTriggerPerfEventProvider_3_0(Etw):
pattern = Struct(
"TriggerSubType" / WString,
"TriggerData" / WString
)
|
"""
Script for extracting an analyzing a GraphMCF with a cleft
(v3) 24.03.16 - Modification for not to compute filaments
Input: - Density map tomogram
- Segmentation tomogram
Output: - GraphMCF
"""
__author__ = 'Antonio Martinez-Sanchez'
# ################ Package import
import time
import pyseg as ps
import scipy as sp
import os
import numpy as np
try:
import pickle as pickle
except:
import pickle
########## Global variables
SEG_NAME ='cleft_seg'
# Labels
CLEFT_LBL = 1
PST_MB_LBL = 2
PRE_MB_LBL = 3
PST_CITO_LBL = 4
PRE_CITO_LBL = 5
########################################################################################
# PARAMETERS
########################################################################################
####### Input data
ROOT_PATH = '/home/martinez/workspace/disperse/data/psd_an1/in/zd/bin2'
# ROOT_PATH = '/home/martinez/workspace/disperse/data/psd_an1/in/test'
# Original density map
input_tomo_l = (ROOT_PATH+'/syn_14_14_bin2_rot_crop2.fits',
)
# Segmentation tomograms
input_seg_l = (ROOT_PATH+'/syn_14_14_bin2_crop2_clft_seg.fits',
)
####### Output data
output_dir = '/home/martinez/workspace/disperse/data/psd_an1/zd/pst_4/cleft_batch'
# output_dir = '/home/martinez/workspace/disperse/data/psd_an1/test/mb_graph'
####### GraphMCF
s_sig = 1.5
csig = 0.01
ang_rot = None # -96.9174
ang_tilt = None # 60
nstd = 3
smooth = 3
res = 0.684 # nm/pix
DILATE_NITER = 2 # pix
####### Graph density thresholds
v_den = 0.008 # nm^3
ve_ratio = 2
v_prop = None # ps.globals.STR_FIELD_VALUE # In None topological simplification
e_prop = ps.globals.STR_FIELD_VALUE_EQ # ps.globals.STR_VERT_DST
v_mode = None # 'low'
e_mode = 'low'
prop_topo = ps.globals.STR_FIELD_VALUE_EQ # None is ps.globals.STR_FIELD_VALUE
######## Masking thresholds
max_len = 15 # nm
########################################################################################
# MAIN ROUTINE
########################################################################################
# Print initial message
print('Extracting GraphMCF for clefts in tomograms.')
print('\tAuthor: ' + __author__)
print('\tDate: ' + time.strftime("%c") + '\n')
print('Options:')
print('\tDisPerSe persistence threshold (csig): ' + str(csig))
if ang_rot is not None:
print('Missing wedge edge compensation (rot, tilt): (' + str(ang_rot) + ', ' + str(ang_tilt) + ')')
print('\tSigma for gaussian pre-processing: ' + str(s_sig))
print('\tSigma for contrast enhancement: ' + str(nstd))
print('\tSkeleton smoothing factor: ' + str(smooth))
print('\tData resolution: ' + str(res) + ' nm/pixel')
print('\tOutput directory: ' + output_dir)
print('Graph density thresholds:')
if v_prop is None:
print('\tTarget vertex density (membrane) ' + str(v_den) + ' vertex/nm^3 for topological simplification')
else:
print('\tTarget vertex density (membrane) ' + str(v_den) + ' vertex/nm^3 for property ' + v_prop + ' with mode ' + v_mode)
print('\tTarget edge/vertex ratio (non membrane) ' + str(ve_ratio) + ' for property ' + e_prop + ' with mode ' + e_mode)
print('')
# Loop for processing the input data
print('Running main loop: ')
for (in_tomo, in_seg) in zip(input_tomo_l, input_seg_l):
print('\tComputing paths for ' + in_tomo + ' ...')
path, stem_tomo = os.path.split(in_tomo)
stem_pkl, _ = os.path.splitext(stem_tomo)
input_file = output_dir + '/' + stem_pkl + '_g' + str(s_sig) + '.fits'
_, stem = os.path.split(input_file)
stem, _ = os.path.splitext(stem)
print('\tLoading input data: ' + stem_tomo)
tomo = ps.disperse_io.load_tomo(in_tomo)
seg = ps.disperse_io.load_tomo(in_seg)
print('\tComputing mask for DisPerSe...')
maskh = (seg == PST_MB_LBL) + (seg == PRE_MB_LBL) + (seg == CLEFT_LBL)
tomod = ps.disperse_io.seg_dist_trans(maskh.astype(np.bool)) * res
maskh = np.ones(shape=seg.shape, dtype=np.int)
maskh[DILATE_NITER:-DILATE_NITER, DILATE_NITER:-DILATE_NITER, DILATE_NITER:-DILATE_NITER] = 0
mask = np.asarray(tomod > (max_len + 2*DILATE_NITER*res), dtype=np.int)
maskh += mask
mask = np.asarray(maskh > 0, dtype=np.float)
input_msk = output_dir + '/' + stem + '_mask.fits'
ps.disperse_io.save_numpy(mask.transpose(), input_msk)
mask_mbs = ((seg == PST_MB_LBL) + (seg == PRE_MB_LBL)).astype(np.bool)
print('\tSmoothing input tomogram (s=' + str(s_sig) + ')...')
density = sp.ndimage.filters.gaussian_filter(tomo, s_sig)
density = ps.globals.cont_en_std(density, nstd=nstd, lb=0, ub=1)
ps.disperse_io.save_numpy(density, output_dir + '/' + stem + '.vti')
ps.disperse_io.save_numpy(density.transpose(), input_file)
print('\tInitializing DisPerSeg...')
work_dir = output_dir + '/disperse'
disperse = ps.disperse_io.DisPerSe(input_file, work_dir)
disperse.clean_work_dir()
# Manifolds for descending fields with the inverted image
disperse.set_manifolds('J0a')
# Down skeleton
disperse.set_dump_arcs(-1)
# disperse.set_nsig_cut(nsig)
rcut = round(density[mask.astype(np.bool)].std()*csig, 4)
print('\tPersistence cut thereshold set to: ' + str(rcut) + ' grey level')
disperse.set_cut(rcut)
disperse.set_mask(input_msk)
disperse.set_smooth(smooth)
print('\tRunning DisPerSe...')
disperse.mse(no_cut=False, inv=False)
skel = disperse.get_skel()
manifolds = disperse.get_manifolds(no_cut=False, inv=False)
# Build the GraphMCF for the membrane
print('\tBuilding MCF graph...')
graph = ps.mb.MbGraphMCF(skel, manifolds, density, seg)
graph.set_resolution(res)
graph.build_from_skel(basic_props=False)
graph.filter_self_edges()
graph.filter_repeated_edges()
print('\tFiltering nodes close to mask border...')
mask = sp.ndimage.morphology.binary_dilation(mask, iterations=DILATE_NITER)
for v in graph.get_vertices_list():
x, y, z = graph.get_vertex_coords(v)
if mask[int(round(x)), int(round(y)), int(round(z))]:
graph.remove_vertex(v)
print('\tBuilding geometry...')
graph.build_vertex_geometry()
print('\tCLAHE on filed_value_inv property...')
graph.compute_edges_length(ps.globals.SGT_EDGE_LENGTH, 1, 1, 1, False)
graph.clahe_field_value(max_geo_dist=50, N=256, clip_f=100., s_max=4.)
print('\tComputing vertices and edges properties...')
graph.compute_vertices_dst()
graph.compute_edge_filamentness()
graph.add_prop_inv(prop_topo, edg=True)
graph.compute_edge_affinity()
print('\tApplying general thresholds...')
if ang_rot is not None:
print('\tDeleting edges in MW area...')
graph.filter_mw_edges(ang_rot, ang_tilt)
print('\tComputing graph global statistics in membranes (before simplification)...')
nvv, nev, nepv = graph.compute_global_stat(mask=mask_mbs)
print('\t\t-Vertex density: ' + str(round(nvv,5)) + ' nm^3')
print('\t\t-Edge density: ' + str(round(nev,5)) + ' nm^3')
print('\t\t-Edge/Vertex ratio: ' + str(round(nepv,5)))
print('\tGraph density simplification for vertices with membranes as reference...')
if prop_topo != ps.globals.STR_FIELD_VALUE:
print('\t\tProperty used: ' + prop_topo)
graph.set_pair_prop(prop_topo)
try:
graph.graph_density_simp_ref(mask=np.asarray(mask_mbs, dtype=np.int), v_den=v_den,
v_prop=v_prop, v_mode=v_mode)
except ps.pexceptions.PySegInputWarning as e:
print('WARNING: graph density simplification failed:')
print('\t-' + e.get_message())
print('\tGraph density simplification for edges in post membrane...')
mask_pst = (seg == PST_MB_LBL) * (~mask)
nvv, nev, nepv = graph.compute_global_stat(mask=mask_pst)
if nepv > ve_ratio:
e_den = nvv * ve_ratio
hold_e_prop = e_prop
graph.graph_density_simp_ref(mask=np.asarray(mask_pst, dtype=np.int), e_den=e_den,
e_prop=hold_e_prop, e_mode=e_mode, fit=True)
print('\tGraph density simplification for edges in pre membrane...')
mask_pre = (seg == PRE_MB_LBL) * (~mask)
nvv, nev, nepv = graph.compute_global_stat(mask=mask_pre)
if nepv > ve_ratio:
e_den = nvv * ve_ratio
hold_e_prop = e_prop
graph.graph_density_simp_ref(mask=np.asarray(mask_pre, dtype=np.int), e_den=e_den,
e_prop=hold_e_prop, e_mode=e_mode, fit=True)
print('\tGraph density simplification for edges in the PSD...')
mask_psd = (seg == PST_CITO_LBL) * (~mask)
nvv, nev, nepv = graph.compute_global_stat(mask=mask_psd)
if nepv > ve_ratio:
e_den = nvv * ve_ratio
hold_e_prop = e_prop
graph.graph_density_simp_ref(mask=np.asarray(mask_psd, dtype=np.int), e_den=e_den,
e_prop=hold_e_prop, e_mode=e_mode, fit=True)
else:
print('\tWARNING: demanded ratio ' + str(nepv) + \
' could not be achieved (current is ' + str(nepv))
print('\tGraph density simplification for edges in the AZ...')
mask_az = (seg == PRE_CITO_LBL) * (~mask)
nvv, nev, nepv = graph.compute_global_stat(mask=mask_az)
if nepv > ve_ratio:
e_den = nvv * ve_ratio
hold_e_prop = e_prop
graph.graph_density_simp_ref(mask=np.asarray(mask_az, dtype=np.int), e_den=e_den,
e_prop=hold_e_prop, e_mode=e_mode, fit=True)
else:
print('\tWARNING: demanded ratio ' + str(nepv) + \
' could not be achieved (current is ' + str(nepv))
print('\tGraph density simplification for edges in the Cleft...')
mask_clft = (seg == CLEFT_LBL) * (~mask)
nvv, nev, nepv = graph.compute_global_stat(mask=mask_clft)
if nepv > ve_ratio:
e_den = nvv * ve_ratio
hold_e_prop = e_prop
graph.graph_density_simp_ref(mask=np.asarray(mask_clft, dtype=np.int), e_den=e_den,
e_prop=hold_e_prop, e_mode=e_mode, fit=True)
else:
print('\tWARNING: demanded ratio ' + str(nepv) + \
' could not be achieved (current is ' + str(nepv))
print('\tComputing graph global statistics (after simplification)...')
nvv, nev_mb, nepv_mb = graph.compute_global_stat(mask=mask)
_, nev_pst, nepv_pst = graph.compute_global_stat(mask=mask_pst)
_, nev_pre, nepv_pre = graph.compute_global_stat(mask=mask_pre)
_, nev_psd, nepv_psd = graph.compute_global_stat(mask=mask_psd)
_, nev_az, nepv_az = graph.compute_global_stat(mask=mask_az)
_, nev_cl, nepv_cl = graph.compute_global_stat(mask=mask_clft)
print('\t\t-Vertex density: ' + str(round(nvv,5)) + ' nm^3')
print('\t\t-Edge density (pst-membrane):' + str(round(nev_pst,5)) + ' nm^3')
print('\t\t-Edge density (pre-membrane):' + str(round(nev_pre,5)) + ' nm^3')
print('\t\t-Edge density (PSD):' + str(round(nev_psd,5)) + ' nm^3')
print('\t\t-Edge density (AZ):' + str(round(nev_az,5)) + ' nm^3')
print('\t\t-Edge density (Cleft):' + str(round(nev_cl,5)) + ' nm^3')
print('\t\t-Edge/Vertex ratio (pst-membrane): ' + str(round(nepv_pst,5)))
print('\t\t-Edge/Vertex ratio (pst-membrane): ' + str(round(nepv_pre,5)))
print('\t\t-Edge/Vertex ratio (PSD): ' + str(round(nepv_psd,5)))
print('\t\t-Edge/Vertex ratio (AZ): ' + str(round(nepv_az,5)))
print('\t\t-Edge/Vertex ratio (Cleft): ' + str(round(nepv_cl,5)))
print('\tComputing graph properties (2)...')
# graph.compute_edge_curvatures()
graph.compute_edges_length(ps.globals.SGT_EDGE_LENGTH, 1, 1, 1, False)
graph.compute_vertices_dst()
graph.compute_edge_filamentness()
graph.compute_edge_affinity()
print('\tAdding segmentation...')
graph.add_scalar_field_nn(seg, SEG_NAME)
print('\tSaving intermediate graphs...')
ps.disperse_io.save_vtp(graph.get_vtp(av_mode=True, edges=True),
output_dir + '/' + stem + '_edges.vtp')
ps.disperse_io.save_vtp(graph.get_vtp(av_mode=False, edges=True),
output_dir + '/' + stem + '_edges_2.vtp')
ps.disperse_io.save_vtp(graph.get_scheme_vtp(nodes=True, edges=True),
output_dir + '/' + stem + '_sch.vtp')
print('\tPickling the graph as: ' + stem_pkl + '.pkl')
# ps.disperse_io.save_numpy(density, output_dir + '/' + stem + '.vti')
graph.pickle(output_dir + '/' + stem_pkl + '.pkl')
print('Terminated. (' + time.strftime("%c") + ')')
|
"""Top-level package for cookiecutter_example."""
__author__ = """Max Nikoi van der Merwe"""
__email__ = 'maxnvdm@yahoo.com'
__version__ = '0.1.0'
|
from typing import List
from aiomailserver.core.service import MailService
from aiomailserver.auth.base import MailUser
class BaseMailbox(MailService):
async def store_message(self, user: MailUser, message):
raise NotImplementedError()
async def folders(self, user: MailUser) -> List[str]:
return []
async def folder_content(self, user: MailUser, folder_name: str):
raise NotImplementedError()
async def get_message(self, user: MailUser, folder_name: str,
message_id: str):
raise NotImplementedError()
|
from django.db import models
from django_countries.fields import CountryField
from django.utils import timezone
# Create your models here.
class Group(models.Model):
''' A group is an organisation that receives an invitation. '''
def __str__(self):
return self.display_name
pnr = models.CharField(
max_length=6,
unique=True,
verbose_name="Confirmation Code",
)
address_1 = models.CharField(
max_length=80,
blank=True,
verbose_name="Address Line 1",
)
address_2 = models.CharField(
max_length=80,
blank=True,
verbose_name="Address Line 2",
)
address_city = models.CharField(
max_length=80,
blank=True,
verbose_name="City or Suburb",
)
address_state_province = models.CharField(
max_length=80,
blank=True,
verbose_name="State or Province",
)
address_postal_code = models.CharField(
max_length=80,
blank=True,
verbose_name="Postal Code",
)
address_country = CountryField(
blank=True,
verbose_name="Country"
)
telephone = models.CharField(
max_length=20,
blank=True,
)
# 4 chars to allow for ICAO if absolutely necessary
home_airport = models.CharField(
max_length=4,
blank=True,
)
display_name = models.CharField(max_length=255)
events = models.ManyToManyField(
"Event",
blank=True,
)
class Person(models.Model):
def __str__(self):
return self.name
RSVP_UNKNOWN = 1
RSVP_ATTENDING = 2
RSVP_NOT_ATTENDING = 3
RSVP_CHOICES = (
(RSVP_UNKNOWN, "No Response"),
(RSVP_ATTENDING, "Attending"),
(RSVP_NOT_ATTENDING, "Not attending"),
)
name = models.CharField(max_length=255)
email = models.EmailField()
group = models.ForeignKey(Group)
rsvp_status = models.IntegerField(
choices=RSVP_CHOICES,
default=RSVP_UNKNOWN,
)
name_flagged = models.BooleanField()
dietary_restrictions = models.TextField(blank=True)
class Event(models.Model):
def __str__(self):
return self.name
short_name = models.CharField(
max_length=20,
help_text="This is used to look up an event, e.g. by the "
"group_has_event tag.",
unique=True,
)
name = models.CharField(
max_length=255,
unique=True,
)
date_time = models.DateTimeField()
end_date_time = models.DateTimeField()
venue = models.CharField(max_length=255)
address = models.TextField()
directions_url = models.CharField(max_length=255)
description = models.TextField()
class Mailout(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=255)
event = models.ForeignKey(Event)
subject = models.CharField(max_length=255)
plain_text = models.TextField()
class MailSent(models.Model):
def __str__(self):
return "%s sent to %s" % (self.mailout, self.recipient)
recipient = models.ForeignKey(Person)
mailout = models.ForeignKey(Mailout)
datestamp = models.DateTimeField(default=timezone.now)
|
from trsbuts.UTSConnection import UTSConnection
class SearchProductDefinitionService:
def __init__(self):
self._servicepath = "/UTS/rest/tibbiCihaz"
# ÜRÜN SORGULAMA SERVİSİ
# Birincil ürün numarası ve üretici ÜTS firma numarası ile ürün bilgilerini sorgulamasını sağlayan servistir.
def urunsorgula(self, uno: str):
servicedata = "{"
if uno != "":
servicedata = servicedata + "\"UNO\":\"" + uno + "\""
c: UTSConnection = UTSConnection()
return c.connect(
self._servicepath + "/urunSorgula",
servicedata + "}"
)
else:
return ""
# BÜTÜN ÜRÜNLERİ SORGULAMA SERVİSİ
# Firmaların kendi ürünlerini çoklu olarak sorgulamalarını sağlayan servistir.
def butunurunlerisorgula(self, sayfaBuyuklugu, sayfaIndeksi, baslangicTarihi, bitisTarihi):
servicepath = self._servicepath + "/tibbiCihazSorgula"
servicedata = "{"
servicedata = servicedata + "\"sayfaBuyuklugu\":" + sayfaBuyuklugu
servicedata = servicedata + ",\"sayfaIndeksi\":" + sayfaIndeksi
servicedata = servicedata + ",\"baslangicTarihi\":\"" + baslangicTarihi + "\""
servicedata = servicedata + ",\"bitisTarihi\":\"" + bitisTarihi + "\""
servicedata = servicedata + "}"
|
#!/usr/bin/env python
# coding: utf-8
# In[2]:
#packages
import pandas as pd
import os.path
import csv
# In[6]:
#Saving tweet data in csv
def saveTweetData(topicName, tweetId, createdAt, text, retweetCount, likeCount, isQuote, place, geo, hashtags,
userMentions, source, sourceUrl, userId, userName, userScreenName, userCreatedAt, userFollowersCount,
userDescription, userVerified,userProtected, userLocation, userFriendsCount, userStatusesCount,
userListedCount, rawData):
fileName = "data/"+topicName+".csv"
if (os.path.isfile(fileName)):
#print("filename", fileName, "again")
with open(fileName, 'a', encoding='UTF-8') as csvFile:
writer = csv.writer(csvFile)
writer.writerow(["", tweetId, createdAt, text, retweetCount, likeCount, isQuote, place, geo, hashtags,
userMentions, source, sourceUrl, userId, userName, userScreenName, userCreatedAt, userFollowersCount,
userDescription, userVerified,userProtected, userLocation, userFriendsCount, userStatusesCount,
userListedCount, rawData])
else :
tweetDict = {'tweetId':tweetId, 'createdAt':createdAt, 'text':text, 'retweetCount':retweetCount, 'likeCount':likeCount, 'isQuote':isQuote,
'place':place, 'geo':geo, 'hashtags':hashtags, 'userMentions':userMentions, 'source':source,'sourceUrl':sourceUrl,
'userId':userId, 'userName':userName, 'userScreenName':userScreenName, 'userCreatedAt':userCreatedAt, 'userFollowersCount':userFollowersCount,
'userDescription':userDescription, 'userVerified':userVerified,'userProtected':userProtected, 'userLocation':userLocation,
'userFriendsCount':userFriendsCount, 'userStatusesCount':userStatusesCount, 'userListedCount':userListedCount, 'rawData':rawData}
tweetDf = pd.DataFrame(tweetDict, index=[0])
tweetDf.to_csv(fileName)
# In[ ]:
|
# Copyright (c) 2016 Intel, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinderclient import api_versions
from cinderclient import exceptions as exc
from cinderclient.tests.unit import utils
from cinderclient.tests.unit.v3 import fakes
from cinderclient.v3 import volume_backups_restore
class VolumesTest(utils.TestCase):
def test_update(self):
cs = fakes.FakeClient(api_version=api_versions.APIVersion('3.9'))
b = cs.backups.get('1234')
backup = b.update(name='new-name')
cs.assert_called(
'PUT', '/backups/1234',
{'backup': {'name': 'new-name'}})
self._assert_request_id(backup)
def test_pre_version(self):
cs = fakes.FakeClient(api_version=api_versions.APIVersion('3.8'))
b = cs.backups.get('1234')
self.assertRaises(exc.VersionNotFoundForAPIMethod,
b.update, name='new-name')
def test_restore(self):
cs = fakes.FakeClient(api_version=api_versions.APIVersion('3.0'))
backup_id = '76a17945-3c6f-435c-975b-b5685db10b62'
info = cs.restores.restore(backup_id)
cs.assert_called('POST', '/backups/%s/restore' % backup_id)
self.assertIsInstance(info,
volume_backups_restore.VolumeBackupsRestore)
self._assert_request_id(info)
def test_restore_with_name(self):
cs = fakes.FakeClient(api_version=api_versions.APIVersion('3.0'))
backup_id = '76a17945-3c6f-435c-975b-b5685db10b62'
name = 'restore_vol'
info = cs.restores.restore(backup_id, name=name)
expected_body = {'restore': {'volume_id': None, 'name': name}}
cs.assert_called('POST', '/backups/%s/restore' % backup_id,
body=expected_body)
self.assertIsInstance(info,
volume_backups_restore.VolumeBackupsRestore)
|
alist = []
def app(env, start_response):
'''
print 'foo!'
print type(start_response)
print dir(start_response)
c = start_response.__class__
try:
c()
except:
import traceback; traceback.print_exc()
try:
class x(c): pass
except:
import traceback; traceback.print_exc()
'''
start_response('200 alright', [])
try:
a
except:
import sys
x = sys.exc_info()
start_response('500 error', alist, x)
return [b'hello']
import bjoern
bjoern.run(app, '0.0.0.0', 8080)
|
import numpy as np
import tensorflow as tf
from .Layer import Layer
from .initializers import zeros
class RNN(Layer):
def __init__(self, output_dim,
input_dim=None,
initializer='glorot_uniform',
recurrent_initializer='orthogonal',
recurrent_activation='tanh',
length_of_sequences=None,
return_sequence=False,
initial_state=None,
rng=None):
super().__init__()
self.output_dim = output_dim
self.input_dim = input_dim
self.initializer = initializer
self.recurrent_initializer = recurrent_initializer
self.recurrent_activation = \
self.activation_initializer(recurrent_activation)
self._length_of_sequences = length_of_sequences
self._return_sequence = return_sequence
self._initial_state = initial_state
self._use_mask = False
@property
def input_shape(self):
return (self._length_of_sequences, self.input_dim)
def compile(self):
input_dim = self.input_dim
output_dim = self.output_dim
initializer = self.initializer
recurrent_initializer = self.recurrent_initializer
self.W = self.kernel_initializer(initializer,
shape=(input_dim, output_dim),
name='W')
self.W_recurrent = \
self.kernel_initializer(recurrent_initializer,
shape=(output_dim, output_dim),
name='W_recurrent')
self.b = zeros((output_dim), name='b')
self.params = [self.W, self.W_recurrent, self.b]
def forward(self, x, **kwargs):
'''
# Arguments
mask: Tensor. Mask for padded value.
recurrent: boolean (default True).
Whether to loop the input sequence.
initial_state: (default None). Override self._initial_state.
'''
def _recurrent(state, elems):
if not self._use_mask:
x = elems
else:
x = elems[0]
mask = elems[1]
h = self.recurrent_activation(tf.matmul(x, self.W)
+ tf.matmul(state, self.W_recurrent)
+ self.b)
if not self._use_mask:
return h
else:
mask = mask[:, np.newaxis]
return mask * h + (1 - mask) * state
mask = kwargs['mask'] if 'mask' in kwargs else None
self._use_mask = True if mask is not None else False
recurr = kwargs['recurrent'] if 'recurrent' in kwargs else True
if 'initial_state' in kwargs:
initial_state = kwargs['initial_state']
else:
initial_state = self._initial_state
if initial_state is None:
initial_state = \
tf.matmul(x[:, 0, :],
tf.zeros((self.input_dim, self.output_dim)))
if not recurr:
if mask is None:
states = _recurrent(initial_state, x)
else:
states = _recurrent(initial_state, [x, mask])
return states
else:
if mask is None:
states = tf.scan(fn=_recurrent,
elems=tf.transpose(x, perm=[1, 0, 2]),
initializer=initial_state)
else:
mask = tf.transpose(mask)
states = tf.scan(fn=_recurrent,
elems=[tf.transpose(x, perm=[1, 0, 2]), mask],
initializer=initial_state)
if self._return_sequence is True:
return tf.transpose(states, perm=[1, 0, 2])
else:
return states[-1]
|
from pathlib import Path
import pytest
from tabler import HTML, Table
from ...test_tools import TablerTestTools
class TestHTML:
def test_open(self):
with pytest.raises(NotImplementedError):
Table("", table_type=HTML())
def test_write(self, tmpdir):
table = TablerTestTools.basic_table()
filepath = str(Path(str(tmpdir)) / "testfile.html")
table.write(filepath, table_type=HTML())
with open(str(Path(__file__).parent / "expected.html"), "r") as f:
expected = f.read()
with open(filepath, "r") as f:
assert f.read() == expected
|
"""
Tests for the package structure and import names.
Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net).
"""
import unittest
import Pyro4
import Pyro4.constants
import Pyro4.core
import Pyro4.errors
import Pyro4.naming
import Pyro4.nsc
import Pyro4.socketutil
import Pyro4.threadutil
import Pyro4.util
class TestPackage(unittest.TestCase):
def testPyro4(self):
self.assertEqual(Pyro4.core.Daemon, Pyro4.Daemon)
self.assertEqual(Pyro4.core.Proxy, Pyro4.Proxy)
self.assertEqual(Pyro4.core.URI, Pyro4.URI)
self.assertEqual(Pyro4.core.callback, Pyro4.callback)
self.assertEqual(Pyro4.core.async, Pyro4.async)
self.assertEqual(Pyro4.core.batch, Pyro4.batch)
self.assertEqual(Pyro4.naming.locateNS, Pyro4.locateNS)
self.assertEqual(Pyro4.naming.resolve, Pyro4.resolve)
if __name__ == "__main__":
unittest.main()
|
"""Parser for future statements"""
from . import ast
from .visitor import ASTVisitor
def is_future(stmt):
"""Return true if statement is a well-formed future statement"""
if not isinstance(stmt, ast.From):
return 0
if stmt.modname == "__future__":
return 1
else:
return 0
class FutureParser(ASTVisitor):
features = ("nested_scopes", "generators", "division",
"absolute_import", "with_statement", "print_function",
"unicode_literals")
def __init__(self):
ASTVisitor.__init__(self)
self.found = {} # set
def visitModule(self, node):
stmt = node.node
for s in stmt.nodes:
if not self.check_stmt(s):
break
def check_stmt(self, stmt):
if is_future(stmt):
for name, asname in stmt.names:
if name in self.features:
self.found[name] = 1
else:
raise SyntaxError, \
"future feature %s is not defined" % name
stmt.valid_future = 1
return 1
return 0
def get_features(self):
"""Return list of features enabled by future statements"""
return self.found.keys()
class BadFutureParser(ASTVisitor):
"""Check for invalid future statements"""
def visitFrom(self, node):
if hasattr(node, 'valid_future'):
return
if node.modname != "__future__":
return
raise SyntaxError, "invalid future statement " + repr(node)
|
import numpy as np
import unittest
from feudal_networks.policies.feudal_batch_processor import FeudalBatchProcessor, FeudalBatch
from feudal_networks.algos.policy_optimizer import Batch
class TestFeudalBatchProcessor(unittest.TestCase):
def test_simple_c_1(self):
# simple case ignoring the fact that the different list have
# elements with different types
c = 1
fbp = FeudalBatchProcessor(c)
obs = [1,2]
a = [1,2]
returns = [1,2]
terminal = False
g = [1,2]
s = [1,2]
features = [1,2]
b = Batch(obs, a, returns, terminal, g, s, features)
fb = fbp.process_batch(b)
np.testing.assert_array_equal(fb.obs, [1])
np.testing.assert_array_equal(fb.a, [1])
np.testing.assert_array_equal(fb.returns, [1])
np.testing.assert_array_equal(fb.s_diff, [1])
np.testing.assert_array_equal(fb.ri, [0])
np.testing.assert_array_equal(fb.gsum, [2])
np.testing.assert_array_equal(fb.features, [1])
obs = [3,4]
a = [3,4]
returns = [3,4]
terminal = False
g = [3,4]
s = [3,4]
features = [3,4]
b = Batch(obs, a, returns, terminal, g, s, features)
fb = fbp.process_batch(b)
np.testing.assert_array_equal(fb.obs, [2,3])
np.testing.assert_array_equal(fb.a, [2,3])
np.testing.assert_array_equal(fb.returns, [2,3])
np.testing.assert_array_equal(fb.s_diff, [1,1])
self.assertEqual(len(fb.ri), 2)
np.testing.assert_array_equal(fb.gsum, [3, 5])
np.testing.assert_array_equal(fb.features, [2,3])
obs = [5]
a = [5]
returns = [5]
terminal = True
g = [5]
s = [5]
features = [5]
b = Batch(obs, a, returns, terminal, g, s, features)
fb = fbp.process_batch(b)
np.testing.assert_array_equal(fb.obs, [4,5])
np.testing.assert_array_equal(fb.a, [4,5])
np.testing.assert_array_equal(fb.returns, [4,5])
np.testing.assert_array_equal(fb.s_diff, [1,0])
self.assertEqual(len(fb.ri), 2)
np.testing.assert_array_equal(fb.gsum, [7,9])
np.testing.assert_array_equal(fb.features, [4,5])
def test_simple_c_2(self):
# simple case ignoring the fact that the different list have
# elements with different types
c = 2
obs = [1,2]
a = [1,2]
returns = [1,2]
terminal = False
g = [1,2]
s = [1,2]
features = [1,2]
b = Batch(obs, a, returns, terminal, g, s, features)
fbp = FeudalBatchProcessor(c)
fb = fbp.process_batch(b)
np.testing.assert_array_equal(fb.obs, [])
np.testing.assert_array_equal(fb.a, [])
np.testing.assert_array_equal(fb.returns, [])
np.testing.assert_array_equal(fb.s_diff, [])
np.testing.assert_array_equal(fb.ri, [])
np.testing.assert_array_equal(fb.gsum, [])
np.testing.assert_array_equal(fb.features, [])
obs = [3,4]
a = [3,4]
returns = [3,4]
terminal = False
g = [3,4]
s = [3,4]
features = [3,4]
b = Batch(obs, a, returns, terminal, g, s, features)
fb = fbp.process_batch(b)
np.testing.assert_array_equal(fb.obs, [1,2])
np.testing.assert_array_equal(fb.a, [1,2])
np.testing.assert_array_equal(fb.returns, [1,2])
np.testing.assert_array_equal(fb.s_diff, [2,2])
self.assertEqual(len(fb.ri), 2)
np.testing.assert_array_equal(fb.gsum, [3,4])
np.testing.assert_array_equal(fb.features, [1,2])
obs = [5]
a = [5]
returns = [5]
terminal = True
g = [5]
s = [5]
features = [5]
b = Batch(obs, a, returns, terminal, g, s, features)
fb = fbp.process_batch(b)
np.testing.assert_array_equal(fb.obs, [3,4,5])
np.testing.assert_array_equal(fb.a, [3,4,5])
np.testing.assert_array_equal(fb.returns, [3,4,5])
np.testing.assert_array_equal(fb.s_diff, [2,1,0])
self.assertEqual(len(fb.ri), 3)
np.testing.assert_array_equal(fb.gsum, [6,9,12])
np.testing.assert_array_equal(fb.features, [3,4,5])
def test_simple_terminal_on_start(self):
c = 2
fbp = FeudalBatchProcessor(c)
obs = [1,2]
a = [1,2]
returns = [1,2]
terminal = True
g = [1,2]
s = [1,2]
features = [1,2]
b = Batch(obs, a, returns, terminal, g, s, features)
fb = fbp.process_batch(b)
np.testing.assert_array_equal(fb.obs, [1,2])
np.testing.assert_array_equal(fb.a, [1,2])
np.testing.assert_array_equal(fb.returns, [1,2])
np.testing.assert_array_equal(fb.s_diff, [1,0])
self.assertEqual(len(fb.ri), 2)
np.testing.assert_array_equal(fb.gsum, [3,4])
np.testing.assert_array_equal(fb.features, [1,2])
def test_intrinsic_reward_and_gsum_calculation(self):
c = 2
fbp = FeudalBatchProcessor(c)
obs = a = returns = features = [None, None, None]
terminal = True
s = [np.array([2,1]), np.array([1,2]), np.array([2,3])]
g = [np.array([1,1]), np.array([2,2]), np.array([3,3])]
b = Batch(obs, a, returns, terminal, s, g, features)
fb = fbp.process_batch(b)
last_ri = (1. + 1. / np.sqrt(2)) / 2
np.testing.assert_array_almost_equal(fb.ri, [0,0,last_ri])
np.testing.assert_array_equal(fb.gsum,
[np.array([3,3]), np.array([4,4]), np.array([6,6])])
if __name__ == '__main__':
unittest.main()
|
"""Summarise hazard data
Get OD data and process it
Author: Raghav Pant
Date: April 20, 2018
"""
import configparser
import csv
import glob
import os
import fiona
import fiona.crs
import rasterio
from sqlalchemy import create_engine
import subprocess as sp
import psycopg2
import osgeo.ogr as ogr
import pandas as pd
import copy
import ast
from osgeo import gdal
import geopandas as gpd
from shapely.geometry import Point
from geoalchemy2 import Geometry, WKTElement
import numpy as np
from vtra.utils import load_config
from vtra.dbutils import *
import vtra.transport_network_creation as tnc
def main():
'''
Create the database connection
'''
conf = load_config()
try:
conn = psycopg2.connect(**conf['database'])
except:
print ("I am unable to connect to the database")
curs = conn.cursor()
engine = create_engine('postgresql://{user}:{password}@{host}:{port}/{database}'.format({
**conf['database']
}))
crop_data_path = os.path.join(conf['paths']['data'], 'crop_data')
od_data_file = os.path.join(conf['paths']['data'], 'od_data', 'OD_transport_data_2008_v2.xlsx')
'''
Step 2: Create the OD proprotions for the differnet modes
'''
'''
First get the modal shares
'''
modes = ['road','rail','air','water']
mode_cols = ['road','rail','air','inland','coastal']
new_mode_cols = ['o','d','road','rail','air','water']
crop_names = ['rice','cash','cass','teas','maiz','rubb','swpo','acof','rcof','pepp']
mode_table = ['airport_nodes','waternodes','railnetworknodes','road2009nodes']
mode_edge_tables = ['airport_edges','wateredges','railnetworkedges','road2009edges']
mode_flow_tables = []
for mo in mode_edge_tables:
fl_table = mo + '_flows'
mode_flow_tables.append(fl_table)
drop_postgres_table_psycopg2(fl_table,conn)
with conn.cursor() as cur:
sql_query = "create table {0} as select edge_id,geom from {1}".format(fl_table,mo)
cur.execute(sql_query)
conn.commit()
'''
Get the modal shares
'''
od_data_modes = pd.read_excel(
od_data_file,
sheet_name = 'mode'
).fillna(0)
# od_data_modes.columns = map(str.lower, od_data_modes.columns)
o_id_col = 'o'
d_id_col = 'd'
od_data_modes['total'] = od_data_modes[mode_cols].sum(axis=1)
for m in mode_cols:
od_data_modes[m] = od_data_modes[m]/od_data_modes['total'].replace(np.inf, 0)
od_data_modes['water'] = od_data_modes['inland'] + od_data_modes['coastal']
od_data_modes = od_data_modes.fillna(0)
# od_data_modes.to_csv('mode_frac.csv',index = False)
od_fracs = od_data_modes[new_mode_cols]
od_data_com = pd.read_excel(od_data_file,sheet_name = 'goods').fillna(0)
crop_cols = ['rice','indust-cro']
for cr in crop_cols:
od_data_com_sums = od_data_com.groupby(['o','d']).agg({cr: 'sum'})
od_com_frac = od_data_com_sums.groupby(level=0).apply(lambda x: x/float(x.sum()))
od_com_frac = od_com_frac.reset_index(level=['o', 'd'])
od_fracs = pd.merge(od_fracs,od_com_frac,how='left', on=['o','d'])
del od_data_com,od_data_com_sums,od_com_frac
od_fracs = od_fracs.fillna(0)
# od_fracs.to_csv('od_fracs.csv')
for file in os.listdir(crop_data_path):
if file.endswith(".tif") and 'spam_p' in file.lower().strip():
fpath = os.path.join(crop_data_path, file)
crop_name = [cr for cr in crop_names if cr in file.lower().strip()][0]
raster_in = fpath
outCSVName = 'crop_concentrations.csv'
crop_table = crop_name + '_production'
'''Clip to region and convert to points'''
os.system('gdal2xyz.py -csv '+raster_in+' '+ outCSVName)
'''Load points and convert to geodataframe with coordinates'''
load_points = pd.read_csv(outCSVName,header=None,names=['x','y','crop_prod'],index_col=None)
load_points = load_points[load_points['crop_prod'] > 0]
# load_points.to_csv('crop_concentrations.csv', index = False)
geometry = [Point(xy) for xy in zip(load_points.x, load_points.y)]
load_points = load_points.drop(['x', 'y'], axis=1)
crs = {'init': 'epsg:4326'}
points_gdp = gpd.GeoDataFrame(load_points, crs=crs, geometry=geometry)
points_gdp['geom'] = points_gdp['geometry'].apply(lambda x: WKTElement(x.wkt, srid=4326))
#drop the geometry column as it is now duplicative
points_gdp.drop('geometry', 1, inplace=True)
# points_gdp = points_gdp.rename(columns={'geometry':'geom'}).set_geometry('geom')
del load_points
print ('created geopandas dataframe from the points')
points_gdp.to_sql(crop_table, engine, if_exists = 'replace', schema = 'public', index = True,dtype={'geom': Geometry('POINT', srid= 4326)})
del points_gdp
'''
Add gid field to crop table
'''
with conn.cursor() as cur:
sql_query = "alter table {0} add column gid serial".format(crop_table)
cur.execute(sql_query)
conn.commit()
print ('Done with loading crop table to database')
'''
Step 1
Assign the regions closest to the crop nodes
'''
nd_table = crop_table
regional_table = 'province_level_stats'
dummy_table = 'dummy_table'
nd_id = 'gid'
nd_gm = 'geom'
regional_gm = 'geom'
nd_attr = ['gid']
regional_attr = ['name_eng','provinceid','od_id']
regional_attr_type = ['character varying', 'integer','integer']
drop_postgres_table_psycopg2(dummy_table,conn)
# nodes_polygons_nearest_psycopg2(dummy_table,nd_table,regional_table,nd_attr,regional_attr,nd_gm,regional_gm,cur,conn)
nodes_polygons_within_nearest_psycopg2(dummy_table,nd_table,regional_table,nd_attr,nd_id,regional_attr,nd_gm,regional_gm,conn)
print ('Done with assigning attributes to the crop table')
add_columns_to_table_psycopg2(nd_table,dummy_table,regional_attr,regional_attr_type,nd_id,conn)
print ('Done with adding columns to the crop table')
'''
Step 2 assign the crop to the closest transport mode node
'''
# mode_table = ['road2009nodes','railwaynetworknodes','airport_nodes','waternodes']
# mode_edge_tables = ['road2009edges','railwaynetworkedges','airport_edges','wateredges']
# modes = ['road','rail','air','water']
modes = ['air','water','rail','road']
mode_id = 'node_id'
crop_id = 'gid'
mode_crop_m = 'od_id'
crop_mode_m = 'od_id'
crop_prod = 'crop_prod'
od_id = 'od_id'
od_id_type = 'integer'
o_id_col = 'o'
d_id_col = 'd'
'''
Get the network
'''
eid = 'edge_id'
nfid = 'node_f_id'
ntid = 'node_t_id'
spid = 'speed'
gmid = 'geom'
o_id_col = 'o'
d_id_col = 'd'
'''
Get the node edge flows
'''
excel_writer = pd.ExcelWriter('vietnam_flow_stats_' + crop_name + '.xlsx')
for m in range(len(mode_table)):
drop_postgres_table_psycopg2(dummy_table,conn)
nodes_polygons_aggregations(dummy_table,mode_table[m],crop_table,mode_id,crop_id,mode_crop_m,crop_mode_m,crop_prod,nd_gm,regional_gm,conn)
add_columns_to_table_psycopg2(dummy_table,mode_table[m],[od_id],[od_id_type],mode_id,conn)
od_nodes_regions = []
with conn.cursor() as cur:
sql_query = '''select {0}, {1}, {2}, {3}/(sum({4}) over (Partition by {5})) from {6}
'''.format(mode_id,od_id,crop_prod,crop_prod,crop_prod,od_id,dummy_table)
cur.execute(sql_query)
read_layer = cur.fetchall()
if read_layer:
for row in read_layer:
n = row[0]
r = row[1]
c = float(row[2])
p = float(row[3])
if p > 0:
od_nodes_regions.append((n,r,c,p))
all_net_dict = {'edge':[],'from_node':[],'to_node':[],'distance':[],'speed':[],'travel_cost':[]}
all_net_dict = tnc.create_network_dictionary(all_net_dict,mode_edge_tables[m],eid,nfid,ntid,spid,'geom',curs,conn)
od_net = tnc.create_igraph_topology(all_net_dict)
'''
Get the OD flows
'''
net_dict = {'Origin_id':[],'Destination_id':[],'Origin_region':[],'Destination_region':[],'Tonnage':[],'edge_path':[],'node_path':[]}
ofile = 'network_od_flows_' + crop_name + modes[m] + '.csv'
output_file = open(ofile,'w')
wr = csv.writer(output_file, delimiter=',', quoting=csv.QUOTE_MINIMAL)
wr.writerow(net_dict.keys())
crop_mode = modes[m]+ '_' + crop_name
if crop_name in ('rice', 'cereal', 'wheat'):
od_fracs[crop_mode] = od_fracs[modes[m]]*od_fracs['rice']
else:
od_fracs[crop_mode] = od_fracs[modes[m]]*od_fracs['indust-cro']
od_flows = list(zip(od_fracs[o_id_col].values.tolist(),od_fracs[d_id_col].values.tolist(),od_fracs[crop_mode].values.tolist()))
origins = list(set(od_fracs[o_id_col].values.tolist()))
destinations = list(set(od_fracs[d_id_col].values.tolist()))
dflows = []
# print (od_flows)
for o in origins:
for d in destinations:
fval = [fl for (org,des,fl) in od_flows if org == o and des == d]
if len(fval) == 1 and fval[0] > 0:
o_matches = [(item[0],item[2]) for item in od_nodes_regions if item[1] == o]
if len(o_matches) > 0:
for o_vals in o_matches:
o_val = 1.0*fval[0]*o_vals[1]
o_node = o_vals[0]
d_matches = [(item[0],item[3]) for item in od_nodes_regions if item[1] == d]
if len(d_matches) > 0:
for d_vals in d_matches:
od_val = 1.0*o_val*d_vals[1]/365
d_node = d_vals[0]
if od_val > 0 and o_node != d_node:
# od_net = tnc.add_igraph_costs(od_net,t_val,0)
orgn_node = od_net.vs['node'].index(o_node)
dest_node = od_net.vs['node'].index(d_node)
# n_pth = od_net.get_shortest_paths(orgn_node,to = dest_node, weights = 'travel_cost', mode = 'OUT', output='vpath')[0]
e_pth = od_net.get_shortest_paths(orgn_node,to = dest_node, weights = 'travel_cost', mode = 'OUT', output='epath')[0]
# n_list = [od_net.vs[n]['node'] for n in n_pth]
e_list = [od_net.es[n]['edge'] for n in e_pth]
# cst = sum([od_net.es[n]['cost'] for n in e_pth])
net_dict = {'Origin_id':o_node,'Destination_id':d_node,'Origin_region':o,'Destination_region':d,
'Tonnage':od_val,'edge_path':e_list,'node_path':[o_node,d_node]}
wr.writerow(net_dict.values())
dflows.append((str([o_node,d_node]),str(e_list),od_val))
print (o,d,fval,modes[m],crop_name)
node_table = modes[m] + '_node_flows'
edge_table = modes[m] + '_edge_flows'
# dom_flows = pd.read_csv(ofile).fillna(0)
dom_flows = pd.DataFrame(dflows,columns = ['node_path', 'edge_path','Tonnage'])
flow_node_edge = dom_flows.groupby(['node_path', 'edge_path'])['Tonnage'].sum().reset_index()
n_dict = {}
e_dict = {}
n_dict,e_dict = get_node_edge_flows(flow_node_edge,n_dict,e_dict)
node_list = get_id_flows(n_dict)
df = pd.DataFrame(node_list, columns = ['node_id',crop_name])
df.to_excel(excel_writer,node_table,index = False)
excel_writer.save()
edge_list = get_id_flows(e_dict)
df = pd.DataFrame(edge_list, columns = ['edge_id',crop_name])
df.to_excel(excel_writer,edge_table,index = False)
excel_writer.save()
if df.empty:
add_zeros_columns_to_table_psycopg2(mode_flow_tables[m], [crop_name],['double precision'],conn)
else:
df.to_sql('dummy_flows', engine, if_exists = 'replace', schema = 'public', index = False)
add_columns_to_table_psycopg2(mode_flow_tables[m], 'dummy_flows', [crop_name],['double precision'], 'edge_id',conn)
curs.close()
conn.close()
if __name__ == '__main__':
main()
|
'''
Generate stylesheet for stimuli version 2
'''
def get_interval_style(index: int, duration: int, delay: int) -> str:
'''
Get style for a particular interval (element class)
:params:
- index (int): interval number
- duration (int): time taken to play the animation, in milliseconds
- delay (int): time to wait before playing the animation, in milliseconds
:return:
- style for a particular interval (str)
'''
return (
f'#stimulus1.interval{index} {{\n'
f' animation-name: spin-accelerate;\n'
f' animation-duration: {duration}ms;\n'
f' animation-timing-function: linear;\n'
f' animation-delay: {delay}ms;\n'
f'}}\n'
)
def generate_v2_style(
stylesheet_path: str,
num_intervals: int,
initial_duration: int,
duration_decay: int,
):
'''
Generate stylesheet for stimuli version 2
:params:
- stylesheet_path (str): path to stylesheet to be generated
- num_intervals (int): number of times the animation should be played
- initial_duration (int): time taken to play the animation, in milliseconds
- duration_decay (int): the reduction in duration required at each interval, in milliseconds
:return:
- void (None)
'''
duration = initial_duration
delay = 0
with open(stylesheet_path, 'w') as stylesheet:
header_comment = '/* This stylesheet is autogenerated by generate_v2_style.py */\n\n'
stylesheet.write(header_comment)
keyframe = (
'@keyframes spin-accelerate {\n'
' 100% {\n'
' transform: rotate(360deg);\n'
' }\n'
'}\n'
)
stylesheet.write(keyframe)
for interval in range(num_intervals):
stylesheet.write('\n')
interval_style = get_interval_style(interval, duration, delay)
stylesheet.write(interval_style)
# order of code below matters
delay += duration
duration -= duration_decay
last_interval_style = (
f'\n'
f'#stimulus1.interval{num_intervals} {{\n'
f' animation-name: spin-accelerate;\n'
f' animation-duration: {duration}ms;\n'
f' animation-timing-function: linear;\n'
f' animation-delay: {delay}ms;\n'
f' animation-iteration-count: infinite;\n'
f'}}\n'
)
stylesheet.write(last_interval_style)
if __name__ == '__main__':
stylesheet_path = '/home/burntice/0_repositories/HP3603/stylesheets/v2.css'
generate_v2_style(
stylesheet_path,
num_intervals=60,
initial_duration=1000,
duration_decay=10,
)
|
import signal
import sys
import time
import pyupm_grove as grove
import pyupm_ttp223 as ttp223
import pyupm_i2clcd as lcd
import dweepy
# create the button object using GPIO pin 0
button = grove.GroveButton(8)
# create the TTP223 touch sensor object using GPIO pin 0
touch = ttp223.TTP223(7)
# Initialize Jhd1313m1 at 0x3E (LCD_ADDRESS) and 0x62 (RGB_ADDRESS)
myLcd = lcd.Jhd1313m1(0, 0x3E, 0x62)
myLcd.setCursor(0,0)
# RGB Blue
#myLcd.setColor(53, 39, 249)
# Read the input and print, waiting one second between readings
count = 0
while 1:
if button.value():
count = count +1
if touch.isPressed():
count = count - 1
myLcd.setCursor(1,2)
myLcd.write("%6d"%count)
dato={}
dato["envio1"]=count
dweepy.dweet_for("OscarOrdaz",dato)
time.sleep(.5)
# Delete the button object
del button
# Delete the touch sensor object
del touch
|
import numpy as np
# from scipy.integrate import trapz
import numba
from tqdm import trange
# ==========================================
@numba.jit(nopython=True)
def phi(x, pe):
'''
Effective potential energy
Input:
x = numpy array of positions, [-0.5, 0.5]
pe = scalar, peclet number
Return:
numpy array of the potential evaluated for each x
'''
#if pe > 0: return pe*(x + 0.5)
return pe*(0.5 - x)
@numba.jit(nopython=True)
def psi_s(x, pe):
'''
Stationary probability density
Input:
x = numpy array of positions, [-0.5, 0.5]
pe = scalar, peclet number
Return:
numpy array of the probability density evaluated at each x
'''
p = np.exp(-phi(x, pe))
Z = pe / (1 - np.exp(-pe))
return p * Z
# ==========================================
@numba.jit
def psi_odd(x, xp, t, tp, pe, n, tolerance=1e-6):
'''
Compute the eigenfunctions \psi(x)\psi(x') for even integers
Input:
x : numpy array of x positions, [-0.5, 0.5]
xp : scalar [-0.5, 0.5] indicates the initial condition
t : scalar > tp, time point
tp : scalar, initial time point
pe : scalar, peclet number
n : scalar, number of eigenfunctions to compute, default 100
Returns:
numpy array evaluating the eigenfunction
'''
psi = 0
w = 1
N = 1
while N <= n:
kn = N * np.pi
ln = kn**2 / pe + pe/4.
norm = 2. / (pe**2 + 4*kn**2)
tmp = pe * np.cos(kn*x) + 2 * kn * np.sin(kn * x)
tmp = tmp * (pe * np.cos(kn * xp) + 2 * kn * np.sin(kn * xp))
psi_add = norm * tmp * np.exp(-ln*(t-tp))
if abs(psi_add.max()) < tolerance: break
psi += psi_add
w += 1
N = 2 * w - 1
return psi
# ==========================================
@numba.jit
def psi_even(x, xp, t, tp, pe, n, tolerance=1e-6):
'''
Compute the eigenfunctions \psi(x)\psi(x') for even integers
Input:
x : numpy array of x positions, [-0.5, 0.5]
xp : scalar [-0.5, 0.5] indicates the initial condition
t : scalar > tp, time point
tp : scalar, initial time point
pe : scalar, peclet number
n : scalar, number of eigenfunctions to compute, default 100
Returns:
numpy array evaluating the eigenfunction
'''
psi = 0
w = 1
N = 2
while N <= n:
kn = N * np.pi
ln = kn**2 / pe + pe/4.
norm = 2. / (pe**2 + 4.*kn**2)
tmp = 2*kn*np.cos(kn*x) - pe*np.sin(kn*x)
tmp = tmp * (2*kn*np.cos(kn*xp) - pe*np.sin(kn*xp))
psi_add = norm * tmp * np.exp(-ln*(t-tp))
if abs(psi_add.max()) < tolerance: break
psi += psi_add
w += 1
N = 2*w
return psi
# ==========================================
@numba.jit
def transition_prob(x, xp, t, tp, pe, n=100):
'''
Compute the transition probability of a particle located at xp at t=tp which is subject to a constant force and diffusion until time t.
Input:
x : numpy array of x positions, [-0.5, 0.5]
xp : scalar [-0.5, 0.5] indicates the initial condition
t : scalar > tp, time point
tp : scalar, initial time point
pe : scalar, peclet number
n : scalar, number of eigenfunctions to compute, default 100
Returns:
numpy array of length x.size, transtiion probabilities for each x
'''
# eigenfunction equations
const = np.exp(-0.5*phi(x, pe) + 0.5*phi(xp, pe))
U = const*psi_odd(x, xp, t, tp, pe, n)
U += const*psi_even(x, xp, t, tp, pe, n)
# add stationary solution
U += psi_s(x, pe)
return U
# ==========================================
@numba.jit
def W(x, wo, t, tp=0, pe=1.0, n=100):
'''
Compute the probability density of a particle's location from its time zero distribution.
Inputs:
x : numpy array of x positions, [-0.5, 0.5]
wo : numpy array of length x.size, probability of the partile's location at each x for t = 0
t : scalar > 0, time point of interest
tp : scalar, initial time point
pe : scalar, peclet number
n : scalar, number of eigenfunctions to compute, default 100
Returns:
numpy array of length x.size, probabilities for each x
'''
W = np.zeros(x.size)
U = np.zeros(shape=(x.size, x.size))
# get the propgator for each initial position
#for wx in tqdm.tqdm(range(x.size)):
for wx in range(x.size):
U[wx, :] = transition_prob(x, x[wx], t, tp, pe, n=n)
# integrate over each initial position
for wx in range(W.size):
W[wx] = np.trapz(U[:, wx] * wo, x)
return W
# ==========================================
# tools
@numba.jit(nopython=True)
def get_timescale(pe):
return 4*pe / (4*np.pi**2 + pe**2)
# ==========================================
# initial distributions to try
def gauss(x, mu, sig):
'''
Gaussian subject to boundaries of the problem, -0.5<= x <= 0.5.
'''
G = np.exp(-0.5 * (x - mu)**2 / sig)
return G / np.trapz(G, x)
def uniform(x):
'''
Uniform distribution for x \in [-0.5, 0.5]
'''
return np.ones(x.size) / (x.max() - x.min())
|
# -*- encoding: utf-8 -*-
"""
@version: 0.01
@author: Tony Qiu
@contact: tony.qiu@liulishuo.com
@file: las.py
@time: 2018/04/19 午後5:56
"""
def max_ascend_len(ary):
maxlist = []
if len(ary) == 0:
return 0
start = 0
end = 0
for idx in range(1, len(ary)):
if ary[idx] > ary[idx-1]:
end = idx
else:
maxlist.append(end-start+1)
end = idx
start = idx
return max(maxlist)
a = [2,1,4,3,1,5,6]
print(max_ascend_len(a))
|
#
# For licensing see accompanying LICENSE file.
# Copyright (C) 2022 Apple Inc. All Rights Reserved.
#
import sys
sys.path.append("..")
import torch
from options.opts import get_training_arguments
from cvnets import get_model
from loss_fn import build_loss_fn
from utils.tensor_utils import create_rand_tensor
from utils import logger
def test_model(*args, **kwargs):
opts = get_training_arguments()
model = get_model(opts)
loss_fn = build_loss_fn(opts)
inp = create_rand_tensor(opts)
if getattr(opts, "common.channels_last", False):
inp = inp.to(memory_format=torch.channels_last)
model = model.to(memory_format=torch.channels_last)
if not inp.is_contiguous(memory_format=torch.channels_last):
logger.warning(
"Unable to convert input to channels_last format. Setting model to contiguous format"
)
model = model.to(memory_format=torch.contiguous_format)
# FLOPs computed using model.profile_model and fvcore can be different because
# model.profile_model ignore some of the operations (e.g., addition) while
# fvcore accounts for all operations (e.g., addition)
model.profile_model(inp)
model.eval()
out = model(inp)
try:
# compute flops using FVCore also
from fvcore.nn import FlopCountAnalysis
flop_analyzer = FlopCountAnalysis(model.eval(), inp)
flop_analyzer.unsupported_ops_warnings(False)
flop_analyzer.uncalled_modules_warnings(False)
total_flops = flop_analyzer.total()
print(
"Flops computed using FVCore for an input of size={} are {:>8.3f} G".format(
list(inp.shape), total_flops / 1e9
)
)
except ModuleNotFoundError:
pass
try:
n_classes = out.shape[1]
pred = torch.argmax(out, dim=1)
targets = torch.randint(0, n_classes, size=pred.shape)
loss = loss_fn(None, out, targets)
loss.backward()
print(model)
print(loss_fn)
print("Random Input : {}".format(inp.shape))
print("Random Target: {}".format(targets.shape))
print("Random Output: {}".format(out.shape))
except:
print(model)
if __name__ == "__main__":
test_model()
|
price = 49
txt = "The price is {} dollars"
print(txt.format(price))
txt = "The price is {:.2f} dollars"
print(txt.format(price))
quantity = 3
itemno = 567
price = 49
myorder = "I want {} pieces of item number {} for {:.2f} dollars."
print(myorder.format(quantity, itemno, price))
quantity = 3
itemno = 567
price = 49
myorder = "I want {0} pieces of item number {1} for {2:.2f} dollars."
print(myorder.format(quantity, itemno, price)) |
from os import mkdir
from os.path import join, exists
import cv2
def extract_frames(video_filename):
"""Extract video frames
Keyword Arguments:
@param video_filename: The input video file name
@type video_filename: str
@returns: The video frames
@rtype: list
"""
# Initializing the video decoder
vhandler = cv2.VideoCapture(video_filename)
# Reading frames
v_frames = []
while True:
ret, frame = vhandler.read()
if ret:
v_frames.append(frame)
else:
break
# Releasing the video
vhandler.release()
return v_frames
def get_match_score(d1, d2):
"""Calculates the match score between two SIFT descriptors
Keyword Arguments:
@param d1: The first SIFT descriptor
@type d1: ndarray
@param d2:The second SIFT descriptor
@type d2: ndarray
@returns: The match score between the two SIFT
descriptors
@rtype: float
"""
bf = cv2.BFMatcher()
matches = bf.knnMatch(d1, d2, k=2)
sim = 0
for m, n in matches:
if m.distance < 0.70 * n.distance:
sim += 1
return sim
def get_unique_frames(images, thr):
"""From a set of images it returns the unique ones.
Images can be cropped, rotated or at different resolutions.
Keyword Arguments:
@param images: The input set of images
@type images: list
@param thr: The threshold to decide if two frames are the same
@type thr: int
@returns: The unique images
@rtype: list
"""
u_images = []
last_unique_des = None
prev_d = None
for i, img in enumerate(images):
print('Processing frame number: %d' % i)
sift = cv2.xfeatures2d.SIFT_create()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray = cv2.blur(gray, (5, 5))
_, des = sift.detectAndCompute(gray, None)
if prev_d is None:
u_images.append(img)
last_unique_des = des
else:
score = get_match_score(prev_d, des)
print(score)
if score <= thr:
u_images.append(img)
elif last_unique_des.shape[0] < des.shape[0]:
u_images[-1] = img
last_unique_des = des
prev_d = des
return u_images # 108 correct number
def save_images(images, out_dir):
"""Save a batch of images to disk
Keyword Arguments:
@param images: The images
@type images: list
@param out_dir: Output directory
@type out_dir: str
"""
if not exists(out_dir):
mkdir(out_dir)
for i, img in enumerate(images):
name = join(out_dir, 'image_%d.jpg' % i)
cv2.imwrite(name, img)
|
import os
from functools import partial
from typing import List, Tuple
import haiku as hk
import jax
import jax.numpy as jnp
import numpy as np
from jax.experimental import optix
from rljax.algorithm.sac import SAC
from rljax.network import ContinuousQFunction, SACDecoder, SACEncoder, SACLinear, StateDependentGaussianPolicy
from rljax.util import fake_action, fake_state, load_params, optimize, preprocess_state, save_params, soft_update, weight_decay
class SAC_AE(SAC):
name = "SAC+AE"
def __init__(
self,
num_agent_steps,
state_space,
action_space,
seed,
max_grad_norm=None,
gamma=0.99,
nstep=1,
num_critics=2,
buffer_size=10 ** 6,
use_per=False,
batch_size=128,
start_steps=1000,
update_interval=1,
tau=0.01,
tau_ae=0.05,
fn_actor=None,
fn_critic=None,
lr_actor=1e-3,
lr_critic=1e-3,
lr_ae=1e-3,
lr_alpha=1e-4,
units_actor=(1024, 1024),
units_critic=(1024, 1024),
log_std_min=-10.0,
log_std_max=2.0,
d2rl=False,
init_alpha=0.1,
adam_b1_alpha=0.5,
feature_dim=50,
lambda_latent=1e-6,
lambda_weight=1e-7,
update_interval_actor=2,
update_interval_ae=1,
update_interval_target=2,
):
assert len(state_space.shape) == 3 and state_space.shape[:2] == (84, 84)
assert (state_space.high == 255).all()
if d2rl:
self.name += "-D2RL"
if fn_critic is None:
def fn_critic(x, a):
# Define without linear layer.
return ContinuousQFunction(
num_critics=num_critics,
hidden_units=units_critic,
d2rl=d2rl,
)(x, a)
if fn_actor is None:
def fn_actor(x):
# Define with linear layer.
x = SACLinear(feature_dim=feature_dim)(x)
return StateDependentGaussianPolicy(
action_space=action_space,
hidden_units=units_actor,
log_std_min=log_std_min,
log_std_max=log_std_max,
clip_log_std=False,
d2rl=d2rl,
)(x)
fake_feature = jnp.empty((1, feature_dim))
fake_last_conv = jnp.empty((1, 39200))
if not hasattr(self, "fake_args_critic"):
self.fake_args_critic = (fake_feature, fake_action(action_space))
if not hasattr(self, "fake_args_actor"):
self.fake_args_actor = (fake_last_conv,)
super(SAC_AE, self).__init__(
num_agent_steps=num_agent_steps,
state_space=state_space,
action_space=action_space,
seed=seed,
max_grad_norm=max_grad_norm,
gamma=gamma,
nstep=nstep,
num_critics=num_critics,
buffer_size=buffer_size,
use_per=use_per,
batch_size=batch_size,
start_steps=start_steps,
update_interval=update_interval,
tau=tau,
fn_actor=fn_actor,
fn_critic=fn_critic,
lr_actor=lr_actor,
lr_critic=lr_critic,
lr_alpha=lr_alpha,
init_alpha=init_alpha,
adam_b1_alpha=adam_b1_alpha,
)
# Encoder.
self.encoder = hk.without_apply_rng(hk.transform(lambda s: SACEncoder(num_filters=32, num_layers=4)(s)))
self.params_encoder = self.params_encoder_target = self.encoder.init(next(self.rng), fake_state(state_space))
# Linear layer for critic and decoder.
self.linear = hk.without_apply_rng(hk.transform(lambda x: SACLinear(feature_dim=feature_dim)(x)))
self.params_linear = self.params_linear_target = self.linear.init(next(self.rng), fake_last_conv)
# Decoder.
self.decoder = hk.without_apply_rng(hk.transform(lambda x: SACDecoder(state_space, num_filters=32, num_layers=4)(x)))
self.params_decoder = self.decoder.init(next(self.rng), fake_feature)
opt_init, self.opt_ae = optix.adam(lr_ae)
self.opt_state_ae = opt_init(self.params_ae)
# Re-define the optimizer for critic.
opt_init, self.opt_critic = optix.adam(lr_critic)
self.opt_state_critic = opt_init(self.params_entire_critic)
# Other parameters.
self._update_target_ae = jax.jit(partial(soft_update, tau=tau_ae))
self.lambda_latent = lambda_latent
self.lambda_weight = lambda_weight
self.update_interval_actor = update_interval_actor
self.update_interval_ae = update_interval_ae
self.update_interval_target = update_interval_target
def select_action(self, state):
last_conv = self._preprocess(self.params_encoder, state[None, ...])
action = self._select_action(self.params_actor, last_conv)
return np.array(action[0])
def explore(self, state):
last_conv = self._preprocess(self.params_encoder, state[None, ...])
action = self._explore(self.params_actor, last_conv, next(self.rng))
return np.array(action[0])
@partial(jax.jit, static_argnums=0)
def _preprocess(
self,
params_encoder: hk.Params,
state: np.ndarray,
) -> jnp.ndarray:
return self.encoder.apply(params_encoder, state)
def update(self, writer=None):
self.learning_step += 1
weight, batch = self.buffer.sample(self.batch_size)
state, action, reward, done, next_state = batch
# Update critic.
self.opt_state_critic, params_entire_critic, loss_critic, abs_td = optimize(
self._loss_critic,
self.opt_critic,
self.opt_state_critic,
self.params_entire_critic,
self.max_grad_norm,
params_critic_target=self.params_entire_critic_target,
params_actor=self.params_actor,
log_alpha=self.log_alpha,
state=state,
action=action,
reward=reward,
done=done,
next_state=next_state,
weight=weight,
**self.kwargs_critic,
)
self.params_encoder = params_entire_critic["encoder"]
self.params_linear = params_entire_critic["linear"]
self.params_critic = params_entire_critic["critic"]
# Update priority.
if self.use_per:
self.buffer.update_priority(abs_td)
# Update actor and alpha.
if self.learning_step % self.update_interval_actor == 0:
self.opt_state_actor, self.params_actor, loss_actor, mean_log_pi = optimize(
self._loss_actor,
self.opt_actor,
self.opt_state_actor,
self.params_actor,
self.max_grad_norm,
params_critic=self.params_entire_critic,
log_alpha=self.log_alpha,
state=state,
**self.kwargs_actor,
)
self.opt_state_alpha, self.log_alpha, loss_alpha, _ = optimize(
self._loss_alpha,
self.opt_alpha,
self.opt_state_alpha,
self.log_alpha,
None,
mean_log_pi=mean_log_pi,
)
# Update autoencoder.
if self.learning_step % self.update_interval_actor == 0:
self.opt_state_ae, params_ae, loss_ae, _ = optimize(
self._loss_ae,
self.opt_ae,
self.opt_state_ae,
self.params_ae,
self.max_grad_norm,
state=state,
key=next(self.rng),
)
self.params_encoder = params_ae["encoder"]
self.params_linear = params_ae["linear"]
self.params_decoder = params_ae["decoder"]
# Update target network.
if self.learning_step % self.update_interval_target == 0:
self.params_encoder_target = self._update_target_ae(self.params_encoder_target, self.params_encoder)
self.params_linear_target = self._update_target_ae(self.params_linear_target, self.params_linear)
self.params_critic_target = self._update_target(self.params_critic_target, self.params_critic)
if writer and self.learning_step % 1000 == 0:
writer.add_scalar("loss/critic", loss_critic, self.learning_step)
writer.add_scalar("loss/actor", loss_actor, self.learning_step)
writer.add_scalar("loss/ae", loss_ae, self.learning_step)
writer.add_scalar("loss/alpha", loss_alpha, self.learning_step)
writer.add_scalar("stat/alpha", jnp.exp(self.log_alpha), self.learning_step)
writer.add_scalar("stat/entropy", -mean_log_pi, self.learning_step)
@partial(jax.jit, static_argnums=0)
def _calculate_value_list(
self,
params_critic: hk.Params,
last_conv: np.ndarray,
action: np.ndarray,
) -> List[jnp.ndarray]:
feature = self.linear.apply(params_critic["linear"], last_conv)
return self.critic.apply(params_critic["critic"], feature, action)
@partial(jax.jit, static_argnums=0)
def _loss_critic(
self,
params_critic: hk.Params,
params_critic_target: hk.Params,
params_actor: hk.Params,
log_alpha: jnp.ndarray,
state: np.ndarray,
action: np.ndarray,
reward: np.ndarray,
done: np.ndarray,
next_state: np.ndarray,
weight: np.ndarray or List[jnp.ndarray],
*args,
**kwargs,
) -> Tuple[jnp.ndarray, jnp.ndarray]:
last_conv = self.encoder.apply(params_critic["encoder"], state)
next_last_conv = jax.lax.stop_gradient(self.encoder.apply(params_critic["encoder"], next_state))
return super(SAC_AE, self)._loss_critic(
params_critic=params_critic,
params_critic_target=params_critic_target,
params_actor=params_actor,
log_alpha=log_alpha,
state=last_conv,
action=action,
reward=reward,
done=done,
next_state=next_last_conv,
weight=weight,
*args,
**kwargs,
)
@partial(jax.jit, static_argnums=0)
def _loss_actor(
self,
params_actor: hk.Params,
params_critic: hk.Params,
log_alpha: jnp.ndarray,
state: np.ndarray,
*args,
**kwargs,
) -> Tuple[jnp.ndarray, jnp.ndarray]:
last_conv = jax.lax.stop_gradient(self.encoder.apply(params_critic["encoder"], state))
return super(SAC_AE, self)._loss_actor(
params_actor=params_actor,
params_critic=params_critic,
log_alpha=log_alpha,
state=last_conv,
*args,
**kwargs,
)
@partial(jax.jit, static_argnums=0)
def _loss_ae(
self,
params_ae: hk.Params,
state: np.ndarray,
key: jnp.ndarray,
) -> Tuple[jnp.ndarray, jnp.ndarray]:
# Preprocess states.
target = preprocess_state(state, key)
# Reconstruct states.
last_conv = self.encoder.apply(params_ae["encoder"], state)
feature = self.linear.apply(params_ae["linear"], last_conv)
reconst = self.decoder.apply(params_ae["decoder"], feature)
# MSE for reconstruction errors.
loss_reconst = jnp.square(target - reconst).mean()
# L2 penalty of latent representations following RAE.
loss_latent = 0.5 * jnp.square(feature).sum(axis=1).mean()
# Weight decay for the decoder.
loss_weight = weight_decay(params_ae["decoder"])
return loss_reconst + self.lambda_latent * loss_latent + self.lambda_weight * loss_weight, None
@property
def params_ae(self):
return {
"encoder": self.params_encoder,
"linear": self.params_linear,
"decoder": self.params_decoder,
}
@property
def params_entire_critic(self):
return {
"encoder": self.params_encoder,
"linear": self.params_linear,
"critic": self.params_critic,
}
@property
def params_entire_critic_target(self):
return {
"encoder": self.params_encoder_target,
"linear": self.params_linear_target,
"critic": self.params_critic_target,
}
def save_params(self, save_dir):
super().save_params(save_dir)
save_params(self.params_encoder, os.path.join(save_dir, "params_encoder.npz"))
save_params(self.params_linear, os.path.join(save_dir, "params_linear.npz"))
save_params(self.params_decoder, os.path.join(save_dir, "params_decoder.npz"))
def load_params(self, save_dir):
super().load_params(save_dir)
self.params_encoder = self.params_encoder_target = load_params(os.path.join(save_dir, "params_encoder.npz"))
self.params_linear = self.params_linear_target = load_params(os.path.join(save_dir, "params_linear.npz"))
self.params_decoder = load_params(os.path.join(save_dir, "params_decoder.npz"))
|
import discord
import os
import random
from dotenv import load_dotenv
from datetime import datetime
load_dotenv()
TOKEN = os.getenv("DISCORD_TOKEN")
groupList = []
groupSize = 0
grpBool = False
now = datetime.now()
current_time = now.strftime("[ %d.%m.%y | %H:%M:%S ] ")
class MyClient(discord.Client):
async def on_ready(self):
f = open("logs.txt", "a")
print("{} logged in.".format(client.user.name))
f.write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
f.write('\n')
f.write(str(current_time) + "{} is online now..".format(client.user.name))
f.write('\n')
f.close()
async def on_message(self, message):
f = open("logs.txt", "a")
global groupSize, grpBool, allGroups
if message.content.startswith("grp"):
await message.delete()
grpBool = True
groupSize = message.content.split(' ')[1]
if int(groupSize) > 0:
print("--------------------------------------------------------------------------------------")
print("Groupsize for each group: " + str(groupSize))
f.write("------------------------------------------------------------------------------------")
f.write('\n')
f.write(str(current_time) + "Group size for each group: " + str(groupSize))
f.write('\n')
await message.channel.send("Group Shuffler Bot: type: [$in] to enter OR [$out] to delete the entry"
, delete_after=10)
await message.channel.send("To end the selection and get the groups type [$end]"
, delete_after=10)
else:
await message.channel.send("Please enter a group size >0 ", delete_after=4)
if message.content.startswith("in"):
await message.delete()
if grpBool.__eq__(True):
if not groupList.__contains__(str(message.author)):
groupList.append(str(message.author))
print("Added " + str(message.author) + " to list")
print("Current group pool: " + str(groupList))
await message.channel.send("Added: " + str(message.author), delete_after=10)
f.write(str(current_time) + "Added:" + str(message.author))
f.write('\n')
else:
await message.channel.send("Please create a group selection first.", delete_after=10)
f.write(str(current_time) + "Please create a group selection first. @" + str(message.author))
f.write('\n')
if message.content.startswith("$out"):
await message.delete()
if grpBool.__eq__(True):
if groupList.__contains__(str(message.author)):
groupList.remove(str(message.author))
print("Removed " + str(message.author) + "from list")
await message.channel.send("Removed " + str(message.author) + " from list", delete_after=10)
f.write(str(current_time) + "Removed:" + str(message.author) + " from list")
f.write('\n')
else:
await message.channel.send("Please create a group selection first.", delete_after=10)
if message.content.startswith("end"):
await message.delete()
groupListFinal = groupList.copy()
random.shuffle(groupListFinal)
count = 1
f.write(str(current_time) + "Shuffling...")
f.write('\n')
groupDict = {}
while len(groupListFinal) >= int(groupSize):
pair = groupListFinal[0:int(groupSize)]
for x in pair:
groupListFinal.remove(x)
await message.channel.send("Group " + str(count) + ": " + str(pair))
groupDict[count] = str(pair)
f.write(str(current_time) + "Group " + str(count) + ": " + str(pair))
f.write('\n')
pair.clear()
count = count + 1
print(groupDict)
groupList.clear()
grpBool = False
f.write("------------------------------------------------------------------------------------")
f.write('\n')
print("--------------------------------------------------------------------------------------")
f.close()
client = MyClient()
client.run(TOKEN)
|
import random
from operator import add
MIN_DELTA = 1e-4
class GridMDP(object):
def __init__(self, metadata):
self.width = metadata['width']
self.height = metadata['height']
self.initial_value = metadata['initial_value']
self.obstacles = metadata['obstacles']
self.living_cost = metadata['living_cost']
self.discount = metadata['discount']
self.transition_distribution = metadata['transition_distribution']
self.rewards = {tuple(terminal['state']) : terminal['reward'] for terminal in metadata['terminals']}
self.terminals = list(self.rewards.keys())
self._init_grid()
# enumerate state space
self.states = set()
for row in range(self.height):
for col in range(self.width):
if self.grid[row][col] is not None:
self.states.add((row, col))
# move one tile at a time
self.actions = [(1, 0), (0, 1), (-1, 0), (0, -1)]
self.num_actions = len(self.actions)
# initialize values and policy
self.policy = {}
self.values = {}
for state in self.states:
self.values[state] = self.initial_value
self.policy[state] = random.choice(self.actions)
def R(self, state):
if state in self.terminals:
return self.rewards[state]
else:
# living cost
return self.living_cost
def _init_grid(self):
self.grid = [[self.initial_value for col in range(self.width)] for row in range(self.height)]
# apply obstacles
for obstacle in self.obstacles:
self.grid[obstacle[0]][obstacle[1]] = None
def _move_forward(self, state, action):
new_state = tuple(map(add, state, action))
return new_state if new_state in self.states else state
def _move_backward(self, state, action):
new_action = self.actions[(self.actions.index(action) + 2) % self.num_actions]
new_state = tuple(map(add, state, new_action))
return new_state if new_state in self.states else state
def _move_left(self, state, action):
new_action = self.actions[(self.actions.index(action) - 1) % self.num_actions]
new_state = tuple(map(add, state, new_action))
return new_state if new_state in self.states else state
def _move_right(self, state, action):
new_action = self.actions[(self.actions.index(action) + 1) % self.num_actions]
new_state = tuple(map(add, state, new_action))
return new_state if new_state in self.states else state
def allowed_actions(self, state):
if state in self.terminals:
return [None]
else:
return self.actions
def next_state_distribution(self, state, action):
if action == None:
return [(0.0, state)]
else:
return [(self.transition_distribution['forward'], self._move_forward(state, action)),
(self.transition_distribution['left'], self._move_left(state, action)),
(self.transition_distribution['right'], self._move_right(state, action)),
(self.transition_distribution['backward'], self._move_backward(state, action))]
def update_values(self, values):
self.values = values
def update_policy(self, policy):
self.policy = policy
def clear(self):
self._init_grid()
for state in self.states:
self.values[state] = self.initial_value
self.policy[state] = random.choice(self.actions)
def _expected_value(state, action, values, mdp):
return sum([prob * values[new_state] for prob, new_state in mdp.next_state_distribution(state, action)])
def values_converged(new_values, old_values):
sum_abs_diff = sum([abs(new_values[state] - old_values[state]) for state in new_values.keys()])
return sum_abs_diff < MIN_DELTA
def policy_converged(new_policy, old_policy):
same_action_for_state = [new_policy[state] == old_policy[state] for state in new_policy.keys()]
return all(same_action_for_state)
def value_iteration(initial_values, mdp, num_iter=100):
# initialize values
values = initial_values
for _ in range(num_iter):
"""
We're making a copy so newly updated values don't affect each other.
In practice, the values converge to the same thing, but I've added this here
in case you want to step through the values iteration-by-iteration.
"""
new_values = dict(values)
for state in mdp.states:
new_values[state] = mdp.R(state) + mdp.discount * max([_expected_value(state, action, values, mdp) for action in mdp.allowed_actions(state)])
if values_converged(new_values, values):
break
# update values for next iteration
values = new_values
return values
def policy_extraction(values, mdp):
policy = {}
for state in mdp.states:
# we don't need to compute the full mdp.R(state) + mdp.discount * ... since mdp.R(state) and mdp.discount are constant given a state
expected_values = [_expected_value(state, action, values, mdp) for action in mdp.allowed_actions(state)]
action_idx, _ = max(enumerate(expected_values), key=lambda ev: ev[1])
policy[state] = mdp.actions[action_idx]
return policy
def policy_evaluation(policy, values, mdp, num_iter=50):
for _ in range(num_iter):
for state in mdp.states:
values[state] = mdp.R(state) + mdp.discount * _expected_value(state, policy[state], values, mdp)
return values
def policy_iteration(initial_policy, mdp, num_iter=100):
policy = initial_policy
values = {state: 0 for state in mdp.states}
for _ in range(num_iter):
new_policy = dict(policy)
values = policy_evaluation(policy, values, mdp)
unchanged_policy = True
for state in mdp.states:
expected_values = [_expected_value(state, action, values, mdp) for action in mdp.allowed_actions(state)]
action_idx, _ = max(enumerate(expected_values), key=lambda ev: ev[1])
action = mdp.actions[action_idx]
if action != new_policy[state]:
new_policy[state] = action
unchanged_policy = False
policy = new_policy
if unchanged_policy:
break
return policy, values
|
import os
import uuid
import logging
import requests
from werkzeug.routing import Rule
from flask import Flask, request, Response, redirect, url_for, render_template
logging.basicConfig(level=logging.DEBUG)
SUBPATH = os.getenv("SUBPATH", "/proxy")
SPLUNK_USERNAME = "andrewb"
SPLUNK_PASSWORD = "PkbdAGSdCJa8"
SPLUNK_BASE_URL = "http://37.48.244.187:8000"
app = Flask(__name__)
app.config.update(
DEBUG=True,
SECRET_KEY=uuid.uuid4().hex
)
DASHBOARDS = {'Vodafone': {'app': 'qrytical', 'name': 'combo_dashboard'}}
LOGGER = app.logger
app.url_rule_class = lambda path, **options: Rule(SUBPATH + path, **options)
session = requests.Session()
@app.route("/en-US/", defaults={"path": ""})
@app.route("/en-US/<path:path>", methods=("GET",))
def proxy(path: str):
try:
# app.logger.debug(path)
# app.logger.debug(request.args)
data = session.get(
f"{SPLUNK_BASE_URL}/en-US/{path}", params=request.args
).content
data = data.decode("utf-8")
data = data.replace("/en-US", f"{SUBPATH}/en-US")
if path.endswith("dashboard.js"):
data = data.replace('make_url("/static/build/pages/enterprise")',
f'"{SUBPATH}" + make_url("/static/build/pages/enterprise")')
elif path.endswith("common.js"):
data = data.replace('splunkUtil.make_url',
f'"{SUBPATH}" + splunkUtil.make_url')
except UnicodeDecodeError:
data = session.get(
f"{SPLUNK_BASE_URL}/en-US/{path}", params=request.args
).content
if path.endswith(".js"):
return Response(data, mimetype="text/javascript", headers={"content-length": len(data)})
elif path.endswith(".css"):
return Response(data, mimetype="text/css", headers={"content-length": len(data)})
elif path.endswith(".html"):
return Response(data, mimetype="text/html", headers={"content-length": len(data)})
elif path.endswith(".png"):
return Response(data, mimetype="image/png", headers={"content-length": len(data)})
elif (
path.endswith(".json")
or request.args.get("output_mode") == "json"
or request.args.get("output_mode") == "json_cols"
):
return Response(data, mimetype="text/json", headers={"content-length": len(data)})
return data
@app.route("/en-US/", defaults={"path": ""})
@app.route("/en-US/<path:path>", methods=("POST",))
def proxy_splunkd(path):
headers = {
"X-Requested-With": "XMLHttpRequest",
"X-Splunk-Form-Key": session.cookies["splunkweb_csrf_token_8000"],
"Accept": "text/javascript, text/html, application/xml, text/xml, */*",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
}
response = session.post(
f"{SPLUNK_BASE_URL}/en-US/{path}",
data=request.values.to_dict(),
verify=False,
headers=headers,
)
return Response(response.content, status=response.status_code)
@app.route("/login")
def login():
dashboard = DASHBOARDS[request.args["id_"]]
return_to = f"app/{dashboard['app']}/{dashboard['name']}"
args = {
"username": SPLUNK_USERNAME,
"password": SPLUNK_PASSWORD
}
LOGGER.debug("Logging in...")
response = session.get(f"{SPLUNK_BASE_URL}/en-US/account/insecurelogin", params=args)
LOGGER.debug("Logged in.")
if response.status_code == 200:
LOGGER.debug("Redirecting...")
return redirect(f"https://localhost:443{SUBPATH}/en-US/{return_to}")
else:
return redirect(url_for("error", message=response.content, status=response.status_code))
@app.route("/ping")
def ping():
return "OK"
@app.route("/error")
def error():
message = request.args["message"]
status = request.args["message"]
return render_template("error.html", message=message, status=status)
if __name__ == "__main__":
app.run(host="0.0.0.0", port="5000")
|
from pypadre.core.model.computation.run import Run
from pypadre.pod.backend.i_padre_backend import IPadreBackend
from pypadre.pod.repository.local.file.generic.i_file_repository import File
from pypadre.pod.repository.local.file.run_repository import RunFileRepository
from pypadre.pod.repository.serializer.serialiser import JSonSerializer
META_FILE = File("metadata.json", JSonSerializer)
class RunGitlabRepository(RunFileRepository):
def __init__(self, backend: IPadreBackend):
super().__init__(backend=backend)
self._gitlab_backend = self.backend.experiment
def list(self, search, offset=0, size=100):
if search is None:
search = {self._gitlab_backend.RELATIVE_PATH: 'executions/runs'}
else:
search[self._gitlab_backend.RELATIVE_PATH] = 'executions/runs'
return self._gitlab_backend.list(search, offset, size, caller=self)
def get(self, uid):
return self._gitlab_backend.get(uid, rpath='executions/runs', caller=self)
def _get_by_repo(self, repo, path=''):
metadata = self._gitlab_backend.get_file(repo, META_FILE, path=path)
execution_path = '/'.join(path.split('/')[:-2])
execution = self.parent._get_by_repo(repo, path=execution_path)
run = Run(execution=execution, metadata=metadata)
return run
def update(self, run: Run, commit_message: str):
self.parent.update(run.parent, commit_message=commit_message)
def _put(self, obj, *args, directory: str, merge=False, **kwargs):
super()._put(obj, *args, directory=directory, merge=merge, **kwargs)
self.parent.update(obj.parent, commit_message="Added a new run or updated an existing one to the experiment.")
|
import discord
from discord import message
from discord.ext import commands
from datetime import datetime
class admins(commands.Cog):
def __init__(self, client):
self.client = client
#Comando de Ban
@commands.command(name="ban", help="Bane alguém do seu servidor. ```R>ban <ID ou marque a pessoa> (Motivo)```",aliases=['Ban'])
@commands.cooldown(2, 5, commands.BucketType.guild)
@commands.has_permissions(ban_members=True)
async def ban(self, context, member: discord.Member, *, reason = None):
if context.author.bot:
await context.message.delete()
return
elif member == context.author:
await context.message.delete()
await context.send('Você não pode se banir Bobinho!')
return
elif member == discord.NotFound:
await context.message.delete()
await context.send(f'O {member}, não foi encontrado')
return
#Embeds
embed_ban = discord.Embed(
title=f'O {member} foi banido!',
description=f'Motivo: {reason}',
color=discord.Color.green(),
timestamp= datetime.utcnow())
embed_ban.set_footer(text=f'ID: {member.id}, Autor do Banimento: {context.author}')
embed_error = discord.Embed(
title=f'Algo deu errado!',
description=f'Verfique se o bot está com cargo maior que {member.mention}, para poder banir',
color=discord.Color.red(),
timestamp=datetime.utcnow())
embed_error.set_footer(text=f'ID: {member.id}')
#Processo Do banimento
try:
await member.ban(reason=reason)
await context.message.delete()
await context.send(embed=embed_ban)
except:
await context.message.delete()
await context.send(embed=embed_error)
return
@commands.command(name="unban", help="desbane alguém do seu servidor. ```R>unban <ID da pessoa> (Motivo)```", aliases=['Unban'])
@commands.has_permissions(ban_members=True)
async def unban(self, context, user: discord.User, *, reason=None):
if context.author.bot:
await context.message.delete()
return
elif user == context.author:
await context.send(
f'Você não pode se desbanir {context.author.mention}... *Até porque você não está banido em primeiro lugar!*')
return
guild = context.guild
#Embeds
embed_unban = discord.Embed(
title=f'O {user} foi desbanido!',
description=f'Motivo: {reason}',
color=discord.Color.green(),
timestamp=datetime.utcnow())
embed_unban.set_footer(text=f'ID: {user.id}')
embed_error = discord.Embed(
title=f'Algo deu errado!',
description=f'Verfique se: O "{user}" está na lista de banimentos',
color=discord.Color.red(),
timestamp=datetime.utcnow())
embed_error.set_footer(text=f'ID: {user.id}')
#Processo do Unban
try:
await guild.unban(user=user, reason=reason)
await context.message.delete()
await context.send(embed=embed_unban)
except:
await context.message.delete()
await context.send(embed=embed_error)
def setup(client):
client.add_cog(admins(client)) |
from __future__ import division
from __future__ import print_function
import numpy as np
from data import preproc as pp
import pandas as pd
import os
class Sample:
"single sample from the dataset"
def __init__(self, gtText, filePath):
self.gtText = gtText
self.filePath = filePath
class Batch:
"batch containing images and ground truth texts"
def __init__(self, gtTexts, imgs):
self.imgs = np.stack(imgs, axis=0)
self.gtTexts = gtTexts
class DataLoader:
"loads data which corresponds to IAM format"
def __init__(self, filePath, batchSize, imgSize, maxTextLen,train = True):
"loader for dataset at given location, preprocess images and text according to parameters"
#make the end of the filepathlist contain the / so that we can add the file name to the end of it
#will me augment the data in anyway?
self.dataAugmentation = False
#where does the index start - should always be 0
self.currIdx = 0
#self selected batch size
self.batchSize = batchSize
#X & Y coordinates of the png
self.imgSize = imgSize
#empty list of images to fill with the samples
self.samples = []
self.filePath = filePath
self.maxTextLen = maxTextLen
self.partitionNames = ['trainSample','validationSample']
self.train = train
df = pd.read_csv('/home/austin/Documents/Github/SimpleHTR/words_csv/2020-06-03 11:39:42.000901.csv')
chars = set()
for index, row in df.iterrows():
# filename: part1-part2-part3 --> part1/part1-part2/part1-part2-part3.png
fileName = row['file_name']
# GT text are columns starting at 9
gtText = row['truth']
chars = chars.union(set(list(gtText)))
# put sample into list
self.samples.append(Sample(gtText, fileName))
# split into training and validation set: 95% - 5%
splitIdx = int(0.95 * len(self.samples))
trainSamples = self.samples[:splitIdx]
validationSamples = self.samples[splitIdx:]
# put words into lists
trainWords = [x.gtText for x in trainSamples]
validationWords = [x.gtText for x in validationSamples]
self.img_partitions = [trainSamples,validationSamples]
self.word_partitions = [trainWords,validationWords]
# number of randomly chosen samples per epoch for training
self.numTrainSamplesPerEpoch = 25000
# list of all chars in dataset
self.charList = sorted(list(chars))
self.train_steps = int(np.ceil(len(self.word_partitions[0]) / self.batchSize))
self.valid_steps = int(np.ceil(len(self.word_partitions[1]) / self.batchSize))
def truncateLabel(self, text):
# ctc_loss can't compute loss if it cannot find a mapping between text label and input
# labels. Repeat letters cost double because of the blank symbol needing to be inserted.
# If a too-long label is provided, ctc_loss returns an infinite gradient
cost = 0
for i in range(len(text)):
if i != 0 and text[i] == text[i-1]:
cost += 2
else:
cost += 1
if cost > self.maxTextLen:
return text[:i]
return text
def getIteratorInfo(self):
"current batch index and overall number of batches"
return (self.currIdx // self.batchSize + 1, len(self.samples) // self.batchSize)
def hasNext(self):
"iterator"
return self.currIdx + self.batchSize <= len(self.samples)
def getNext(self):
"iterator"
if self.train == True:
j = 0
else:
j = 1
batchRange = range(self.currIdx, self.currIdx + self.batchSize)
gtTexts = [self.img_partitions[j][i].gtText for i in batchRange]
imgs = [pp.preprocess(os.path.join(self.filePath,self.img_partitions[j][i].filePath),self.imgSize) for i in batchRange]
self.currIdx += self.batchSize
return Batch(gtTexts, imgs)
|
# -*- coding: utf-8 -*-
r"""
Navier-Stokes equations for incompressible fluid flow in 2D.
Find :math:`\ul{u}`, :math:`p` such that:
.. math::
\int_{\Omega} \nu\ \nabla \ul{v} : \nabla \ul{u}
+ \int_{\Omega} ((\ul{u} \cdot \nabla) \ul{u}) \cdot \ul{v}
- \int_{\Omega} p\ \nabla \cdot \ul{v}
= 0
\;, \quad \forall \ul{v} \;,
\int_{\Omega} q\ \nabla \cdot \ul{u}
= 0
\;, \quad \forall q \;.
The mesh is created by ``gen_block_mesh()`` function.
View the results using::
$ ./postproc.py user_block.vtk -b
"""
from __future__ import absolute_import
from sfepy.discrete.fem.meshio import UserMeshIO
from sfepy.mesh.mesh_generators import gen_block_mesh
# Mesh dimensions.
dims = [0.1, 0.1]
# Mesh resolution: increase to improve accuracy.
shape = [51, 51]
def mesh_hook(mesh, mode):
"""
Generate the block mesh.
"""
if mode == 'read':
mesh = gen_block_mesh(dims, shape, [0, 0], name='user_block',
verbose=False)
return mesh
elif mode == 'write':
pass
filename_mesh = UserMeshIO(mesh_hook)
regions = {
'Omega' : 'all',
'Left' : ('vertices in (x < -0.0499)', 'facet'),
'Right' : ('vertices in (x > 0.0499)', 'facet'),
'Bottom' : ('vertices in (y < -0.0499)', 'facet'),
'Top' : ('vertices in (y > 0.0499)', 'facet'),
'Walls' : ('r.Left +v r.Right +v r.Bottom', 'facet'),
}
materials = {
'fluid' : ({'viscosity' : 1.00e-2},),
}
fields = {
'velocity': ('real', 'vector', 'Omega', 2),
'pressure': ('real', 'scalar', 'Omega', 1),
}
variables = {
'u' : ('unknown field', 'velocity', 0),
'v' : ('test field', 'velocity', 'u'),
'p' : ('unknown field', 'pressure', 1),
'q' : ('test field', 'pressure', 'p'),
}
ebcs = {
'1_Walls' : ('Walls', {'u.all' : 0.0}),
'0_Driven' : ('Top', {'u.0' : 1.0, 'u.1' : 0.0}),
'Pressure' : ('Bottom', {'p.0' : 0.0}),
}
integrals = {
'i' : 4,
}
equations = {
'balance' :
"""+ dw_div_grad.i.Omega(fluid.viscosity, v, u)
+ dw_convect.i.Omega(v, u)
- dw_stokes.i.Omega(v, p) = 0""",
'incompressibility' :
"""dw_stokes.i.Omega(u, q) = 0""",
}
solvers = {
'ls' : ('ls.scipy_direct', {}),
'newton' : ('nls.newton', {
'i_max' : 15,
'eps_a' : 1e-10,
'eps_r' : 1.0,
}),
}
|
# Copyright (C) 2019-2021, François-Guillaume Fernandez.
# This program is licensed under the Apache License version 2.
# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.
import pytest
import torch
from torch.nn import Linear
from torch.nn.functional import cross_entropy, log_softmax
from holocron.nn import functional as F
from holocron import nn
def _test_loss_function(loss_fn, same_loss=0., multi_label=False):
num_batches = 2
num_classes = 4
# 4 classes
x = torch.ones(num_batches, num_classes)
x[:, 0, ...] = 10
x.requires_grad_(True)
# Identical target
if multi_label:
target = torch.zeros_like(x)
target[:, 0] = 1.
else:
target = torch.zeros(num_batches, dtype=torch.long)
assert abs(loss_fn(x, target).item() - same_loss) < 1e-3
assert torch.allclose(
loss_fn(x, target, reduction='none'),
same_loss * torch.ones(num_batches, dtype=x.dtype),
atol=1e-3
)
# Check that class rescaling works
x = torch.rand(num_batches, num_classes, requires_grad=True)
if multi_label:
target = torch.rand(x.shape)
else:
target = (num_classes * torch.rand(num_batches)).to(torch.long)
weights = torch.ones(num_classes)
assert loss_fn(x, target).item() == loss_fn(x, target, weight=weights).item()
# Check that ignore_index works
assert loss_fn(x, target).item() == loss_fn(x, target, ignore_index=num_classes).item()
# Ignore an index we are certain to be in the target
if multi_label:
ignore_index = torch.unique(target.argmax(dim=1))[0].item()
else:
ignore_index = torch.unique(target)[0].item()
assert loss_fn(x, target).item() != loss_fn(x, target, ignore_index=ignore_index)
# Check backprop
loss = loss_fn(x, target, ignore_index=0)
loss.backward()
# Test reduction
assert torch.allclose(
loss_fn(x, target, reduction='sum'),
loss_fn(x, target, reduction='none').sum(),
atol=1e-6
)
assert torch.allclose(
loss_fn(x, target, reduction='mean'),
loss_fn(x, target, reduction='sum') / target.shape[0],
atol=1e-6
)
def test_focal_loss():
# Common verification
_test_loss_function(F.focal_loss)
num_batches = 2
num_classes = 4
x = torch.rand(num_batches, num_classes, 20, 20)
target = (num_classes * torch.rand(num_batches, 20, 20)).to(torch.long)
# Value check
assert torch.allclose(F.focal_loss(x, target, gamma=0), cross_entropy(x, target), atol=1e-5)
# Equal probabilities
x = torch.ones(num_batches, num_classes, 20, 20)
assert torch.allclose(
(1 - 1 / num_classes) * F.focal_loss(x, target, gamma=0),
F.focal_loss(x, target, gamma=1),
atol=1e-5
)
assert repr(nn.FocalLoss()) == "FocalLoss(gamma=2.0, reduction='mean')"
def test_ls_celoss():
num_batches = 2
num_classes = 4
# Common verification
_test_loss_function(F.ls_cross_entropy, 0.1 / num_classes * (num_classes - 1) * 9)
x = torch.rand(num_batches, num_classes, 20, 20)
target = (num_classes * torch.rand(num_batches, 20, 20)).to(torch.long)
# Value check
assert torch.allclose(F.ls_cross_entropy(x, target, eps=0), cross_entropy(x, target), atol=1e-5)
assert torch.allclose(
F.ls_cross_entropy(x, target, eps=1),
-1 / num_classes * log_softmax(x, dim=1).sum(dim=1).mean(),
atol=1e-5
)
assert repr(nn.LabelSmoothingCrossEntropy()) == "LabelSmoothingCrossEntropy(eps=0.1, reduction='mean')"
def test_multilabel_cross_entropy():
num_batches = 2
num_classes = 4
# Common verification
_test_loss_function(F.multilabel_cross_entropy, multi_label=True)
x = torch.rand(num_batches, num_classes, 20, 20)
target = torch.zeros_like(x)
target[:, 0] = 1.
# Value check
assert torch.allclose(F.multilabel_cross_entropy(x, target), cross_entropy(x, target.argmax(dim=1)), atol=1e-5)
assert repr(nn.MultiLabelCrossEntropy()) == "MultiLabelCrossEntropy(reduction='mean')"
def test_complement_cross_entropy():
num_batches = 2
num_classes = 4
x = torch.rand((num_batches, num_classes, 20, 20), requires_grad=True)
target = (num_classes * torch.rand(num_batches, 20, 20)).to(torch.long)
# Backprop
out = F.complement_cross_entropy(x, target, ignore_index=0)
out.backward()
assert repr(nn.ComplementCrossEntropy()) == "ComplementCrossEntropy(gamma=-1, reduction='mean')"
def test_mc_loss():
num_batches = 2
num_classes = 4
chi = 2
# 4 classes
x = torch.ones(num_batches, chi * num_classes)
x[:, 0, ...] = 10
target = torch.zeros(num_batches, dtype=torch.long)
mod = Linear(chi * num_classes, chi * num_classes)
# Check backprop
for reduction in ['mean', 'sum', 'none']:
for p in mod.parameters():
p.grad = None
train_loss = F.mutual_channel_loss(mod(x), target, ignore_index=0, reduction=reduction)
if reduction == 'none':
assert train_loss.shape == (num_batches,)
train_loss = train_loss.sum()
train_loss.backward()
assert isinstance(mod.weight.grad, torch.Tensor)
# Check type casting of weights
for p in mod.parameters():
p.grad = None
class_weights = torch.ones(num_classes, dtype=torch.float16)
ignore_index = 0
criterion = nn.MutualChannelLoss(weight=class_weights, ignore_index=ignore_index, chi=chi)
train_loss = criterion(mod(x), target)
train_loss.backward()
assert isinstance(mod.weight.grad, torch.Tensor)
assert repr(criterion) == f"MutualChannelLoss(reduction='mean', chi={chi}, alpha=1)"
def test_mixuploss():
num_batches = 8
num_classes = 10
# Generate inputs
x = torch.rand((num_batches, num_classes, 20, 20))
target_a = torch.rand((num_batches, num_classes, 20, 20))
target_b = torch.rand((num_batches, num_classes, 20, 20))
lam = 0.9
# Take a criterion compatible with one-hot encoded targets
criterion = nn.MultiLabelCrossEntropy()
mixup_criterion = nn.MixupLoss(criterion)
# Check the repr
assert repr(mixup_criterion) == f"Mixup_{repr(criterion)}"
# Check the forward
out = mixup_criterion(x, target_a, target_b, lam)
assert out.item() == (lam * criterion(x, target_a) + (1 - lam) * criterion(x, target_b))
assert mixup_criterion(x, target_a, target_b, 1).item() == criterion(x, target_a)
assert mixup_criterion(x, target_a, target_b, 0).item() == criterion(x, target_b)
def test_cb_loss():
num_batches = 2
num_classes = 4
x = torch.rand(num_batches, num_classes, 20, 20)
beta = 0.99
num_samples = 10 * torch.ones(num_classes, dtype=torch.long)
# Identical target
target = (num_classes * torch.rand(num_batches, 20, 20)).to(torch.long)
base_criterion = nn.LabelSmoothingCrossEntropy()
base_loss = base_criterion(x, target).item()
criterion = nn.ClassBalancedWrapper(base_criterion, num_samples, beta=beta)
assert isinstance(criterion.criterion, nn.LabelSmoothingCrossEntropy)
assert criterion.criterion.weight is not None
# Value tests
assert torch.allclose(criterion(x, target), (1 - beta) / (1 - beta ** num_samples[0]) * base_loss, atol=1e-5)
# With pre-existing weights
base_criterion = nn.LabelSmoothingCrossEntropy(weight=torch.ones(num_classes, dtype=torch.float32))
base_weights = base_criterion.weight.clone()
criterion = nn.ClassBalancedWrapper(base_criterion, num_samples, beta=beta)
assert not torch.equal(base_weights, criterion.criterion.weight)
assert torch.allclose(criterion(x, target), (1 - beta) / (1 - beta ** num_samples[0]) * base_loss, atol=1e-5)
assert repr(criterion) == "ClassBalancedWrapper(LabelSmoothingCrossEntropy(eps=0.1, reduction='mean'), beta=0.99)"
|
"""Eager mode TF policy built using build_tf_policy().
It supports both traced and non-traced eager execution modes."""
import functools
import logging
import numpy as np
from ray.util.debug import log_once
from ray.rllib.evaluation.episode import _flatten_action
from ray.rllib.models.catalog import ModelCatalog
from ray.rllib.policy.policy import Policy, LEARNER_STATS_KEY, ACTION_PROB, \
ACTION_LOGP
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils import add_mixins
from ray.rllib.utils.annotations import override
from ray.rllib.utils.framework import try_import_tf
tf = try_import_tf()
logger = logging.getLogger(__name__)
def _convert_to_tf(x):
if isinstance(x, SampleBatch):
x = {k: v for k, v in x.items() if k != SampleBatch.INFOS}
return tf.nest.map_structure(_convert_to_tf, x)
if isinstance(x, Policy):
return x
if x is not None:
x = tf.nest.map_structure(
lambda f: tf.convert_to_tensor(f) if f is not None else None, x)
return x
def _convert_to_numpy(x):
if x is None:
return None
try:
return tf.nest.map_structure(lambda component: component.numpy(), x)
except AttributeError:
raise TypeError(
("Object of type {} has no method to convert to numpy.").format(
type(x)))
def convert_eager_inputs(func):
@functools.wraps(func)
def _func(*args, **kwargs):
if tf.executing_eagerly():
args = [_convert_to_tf(x) for x in args]
# TODO(gehring): find a way to remove specific hacks
kwargs = {
k: _convert_to_tf(v)
for k, v in kwargs.items()
if k not in {"info_batch", "episodes"}
}
return func(*args, **kwargs)
return _func
def convert_eager_outputs(func):
@functools.wraps(func)
def _func(*args, **kwargs):
out = func(*args, **kwargs)
if tf.executing_eagerly():
out = tf.nest.map_structure(_convert_to_numpy, out)
return out
return _func
def _disallow_var_creation(next_creator, **kw):
v = next_creator(**kw)
raise ValueError("Detected a variable being created during an eager "
"forward pass. Variables should only be created during "
"model initialization: {}".format(v.name))
def traced_eager_policy(eager_policy_cls):
"""Wrapper that enables tracing for all eager policy methods.
This is enabled by the --trace / "eager_tracing" config."""
class TracedEagerPolicy(eager_policy_cls):
def __init__(self, *args, **kwargs):
self._traced_learn_on_batch = None
self._traced_compute_actions = None
self._traced_compute_gradients = None
self._traced_apply_gradients = None
super(TracedEagerPolicy, self).__init__(*args, **kwargs)
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def learn_on_batch(self, samples):
if self._traced_learn_on_batch is None:
self._traced_learn_on_batch = tf.function(
super(TracedEagerPolicy, self).learn_on_batch,
autograph=False)
return self._traced_learn_on_batch(samples)
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def compute_actions(self,
obs_batch,
state_batches,
prev_action_batch=None,
prev_reward_batch=None,
info_batch=None,
episodes=None,
explore=None,
timestep=None,
**kwargs):
obs_batch = tf.convert_to_tensor(obs_batch)
state_batches = _convert_to_tf(state_batches)
prev_action_batch = _convert_to_tf(prev_action_batch)
prev_reward_batch = _convert_to_tf(prev_reward_batch)
if self._traced_compute_actions is None:
self._traced_compute_actions = tf.function(
super(TracedEagerPolicy, self).compute_actions,
autograph=False)
return self._traced_compute_actions(
obs_batch, state_batches, prev_action_batch, prev_reward_batch,
info_batch, episodes, explore, timestep, **kwargs)
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def compute_gradients(self, samples):
if self._traced_compute_gradients is None:
self._traced_compute_gradients = tf.function(
super(TracedEagerPolicy, self).compute_gradients,
autograph=False)
return self._traced_compute_gradients(samples)
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def apply_gradients(self, grads):
if self._traced_apply_gradients is None:
self._traced_apply_gradients = tf.function(
super(TracedEagerPolicy, self).apply_gradients,
autograph=False)
return self._traced_apply_gradients(grads)
TracedEagerPolicy.__name__ = eager_policy_cls.__name__
TracedEagerPolicy.__qualname__ = eager_policy_cls.__qualname__
return TracedEagerPolicy
def build_eager_tf_policy(name,
loss_fn,
get_default_config=None,
postprocess_fn=None,
stats_fn=None,
optimizer_fn=None,
gradients_fn=None,
apply_gradients_fn=None,
grad_stats_fn=None,
extra_learn_fetches_fn=None,
extra_action_fetches_fn=None,
before_init=None,
before_loss_init=None,
after_init=None,
make_model=None,
action_sampler_fn=None,
log_likelihood_fn=None,
mixins=None,
obs_include_prev_action_reward=True,
get_batch_divisibility_req=None):
"""Build an eager TF policy.
An eager policy runs all operations in eager mode, which makes debugging
much simpler, but has lower performance.
You shouldn't need to call this directly. Rather, prefer to build a TF
graph policy and use set {"eager": true} in the trainer config to have
it automatically be converted to an eager policy.
This has the same signature as build_tf_policy()."""
base = add_mixins(Policy, mixins)
class eager_policy_cls(base):
def __init__(self, observation_space, action_space, config):
assert tf.executing_eagerly()
self.framework = "tf"
Policy.__init__(self, observation_space, action_space, config)
self._is_training = False
self._loss_initialized = False
self._sess = None
if get_default_config:
config = dict(get_default_config(), **config)
if before_init:
before_init(self, observation_space, action_space, config)
self.config = config
self.dist_class = None
if action_sampler_fn:
if not make_model:
raise ValueError("`make_model` is required if "
"`action_sampler_fn` is given")
else:
self.dist_class, logit_dim = ModelCatalog.get_action_dist(
action_space, self.config["model"])
if make_model:
self.model = make_model(self, observation_space, action_space,
config)
else:
self.model = ModelCatalog.get_model_v2(
observation_space,
action_space,
logit_dim,
config["model"],
framework="tf",
)
self._state_in = [
tf.convert_to_tensor(np.array([s]))
for s in self.model.get_initial_state()
]
input_dict = {
SampleBatch.CUR_OBS: tf.convert_to_tensor(
np.array([observation_space.sample()])),
SampleBatch.PREV_ACTIONS: tf.convert_to_tensor(
[_flatten_action(action_space.sample())]),
SampleBatch.PREV_REWARDS: tf.convert_to_tensor([0.]),
}
self.model(input_dict, self._state_in, tf.convert_to_tensor([1]))
if before_loss_init:
before_loss_init(self, observation_space, action_space, config)
self._initialize_loss_with_dummy_batch()
self._loss_initialized = True
if optimizer_fn:
self._optimizer = optimizer_fn(self, config)
else:
self._optimizer = tf.train.AdamOptimizer(config["lr"])
if after_init:
after_init(self, observation_space, action_space, config)
@override(Policy)
def postprocess_trajectory(self,
sample_batch,
other_agent_batches=None,
episode=None):
assert tf.executing_eagerly()
# Call super's postprocess_trajectory first.
sample_batch = Policy.postprocess_trajectory(self, sample_batch)
if postprocess_fn:
return postprocess_fn(self, sample_batch, other_agent_batches,
episode)
return sample_batch
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def learn_on_batch(self, samples):
with tf.variable_creator_scope(_disallow_var_creation):
grads_and_vars, stats = self._compute_gradients(samples)
self._apply_gradients(grads_and_vars)
return stats
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def compute_gradients(self, samples):
with tf.variable_creator_scope(_disallow_var_creation):
grads_and_vars, stats = self._compute_gradients(samples)
grads = [g for g, v in grads_and_vars]
return grads, stats
@override(Policy)
@convert_eager_inputs
@convert_eager_outputs
def compute_actions(self,
obs_batch,
state_batches,
prev_action_batch=None,
prev_reward_batch=None,
info_batch=None,
episodes=None,
explore=None,
timestep=None,
**kwargs):
explore = explore if explore is not None else \
self.config["explore"]
timestep = timestep if timestep is not None else \
self.global_timestep
# TODO: remove python side effect to cull sources of bugs.
self._is_training = False
self._state_in = state_batches
if tf.executing_eagerly():
n = len(obs_batch)
else:
n = obs_batch.shape[0]
seq_lens = tf.ones(n, dtype=tf.int32)
input_dict = {
SampleBatch.CUR_OBS: tf.convert_to_tensor(obs_batch),
"is_training": tf.constant(False),
}
if obs_include_prev_action_reward:
input_dict.update({
SampleBatch.PREV_ACTIONS: tf.convert_to_tensor(
prev_action_batch),
SampleBatch.PREV_REWARDS: tf.convert_to_tensor(
prev_reward_batch),
})
# Custom sampler fn given (which may handle self.exploration).
if action_sampler_fn is not None:
state_out = []
action, logp = action_sampler_fn(
self,
self.model,
input_dict,
self.observation_space,
self.action_space,
explore,
self.config,
timestep=timestep)
# Use Exploration object.
else:
with tf.variable_creator_scope(_disallow_var_creation):
# Call the exploration before_compute_actions hook.
self.exploration.before_compute_actions(timestep=timestep)
model_out, state_out = self.model(input_dict,
state_batches, seq_lens)
action, logp = self.exploration.get_exploration_action(
model_out,
self.dist_class,
self.model,
timestep=timestep,
explore=explore)
extra_fetches = {}
if logp is not None:
extra_fetches.update({
ACTION_PROB: tf.exp(logp),
ACTION_LOGP: logp,
})
if extra_action_fetches_fn:
extra_fetches.update(extra_action_fetches_fn(self))
# Increase our global sampling timestep counter by 1.
self.global_timestep += 1
return action, state_out, extra_fetches
@override(Policy)
def compute_log_likelihoods(self,
actions,
obs_batch,
state_batches=None,
prev_action_batch=None,
prev_reward_batch=None):
seq_lens = tf.ones(len(obs_batch), dtype=tf.int32)
input_dict = {
SampleBatch.CUR_OBS: tf.convert_to_tensor(obs_batch),
"is_training": tf.constant(False),
}
if obs_include_prev_action_reward:
input_dict.update({
SampleBatch.PREV_ACTIONS: tf.convert_to_tensor(
prev_action_batch),
SampleBatch.PREV_REWARDS: tf.convert_to_tensor(
prev_reward_batch),
})
# Custom log_likelihood function given.
if log_likelihood_fn:
log_likelihoods = log_likelihood_fn(
self, self.model, actions, input_dict,
self.observation_space, self.action_space, self.config)
# Default log-likelihood calculation.
else:
dist_inputs, _ = self.model(input_dict, state_batches,
seq_lens)
action_dist = self.dist_class(dist_inputs, self.model)
log_likelihoods = action_dist.logp(actions)
return log_likelihoods
@override(Policy)
def apply_gradients(self, gradients):
self._apply_gradients(
zip([(tf.convert_to_tensor(g) if g is not None else None)
for g in gradients], self.model.trainable_variables()))
@override(Policy)
def get_exploration_info(self):
return _convert_to_numpy(self.exploration.get_info())
@override(Policy)
def get_weights(self):
variables = self.variables()
return [v.numpy() for v in variables]
@override(Policy)
def set_weights(self, weights):
variables = self.variables()
assert len(weights) == len(variables), (len(weights),
len(variables))
for v, w in zip(variables, weights):
v.assign(w)
def variables(self):
"""Return the list of all savable variables for this policy."""
return self.model.variables()
@override(Policy)
def is_recurrent(self):
return len(self._state_in) > 0
@override(Policy)
def num_state_tensors(self):
return len(self._state_in)
@override(Policy)
def get_initial_state(self):
return self.model.get_initial_state()
def get_session(self):
return None # None implies eager
def get_placeholder(self, ph):
raise ValueError(
"get_placeholder() is not allowed in eager mode. Try using "
"rllib.utils.tf_ops.make_tf_callable() to write "
"functions that work in both graph and eager mode.")
def loss_initialized(self):
return self._loss_initialized
@override(Policy)
def export_model(self, export_dir):
pass
@override(Policy)
def export_checkpoint(self, export_dir):
pass
def _get_is_training_placeholder(self):
return tf.convert_to_tensor(self._is_training)
def _apply_gradients(self, grads_and_vars):
if apply_gradients_fn:
apply_gradients_fn(self, self._optimizer, grads_and_vars)
else:
self._optimizer.apply_gradients(grads_and_vars)
def _compute_gradients(self, samples):
"""Computes and returns grads as eager tensors."""
self._is_training = True
with tf.GradientTape(persistent=gradients_fn is not None) as tape:
# TODO: set seq len and state-in properly
state_in = []
for i in range(self.num_state_tensors()):
state_in.append(samples["state_in_{}".format(i)])
self._state_in = state_in
self._seq_lens = None
if len(state_in) > 0:
self._seq_lens = tf.ones(
samples[SampleBatch.CUR_OBS].shape[0], dtype=tf.int32)
samples["seq_lens"] = self._seq_lens
model_out, _ = self.model(samples, self._state_in,
self._seq_lens)
loss = loss_fn(self, self.model, self.dist_class, samples)
variables = self.model.trainable_variables()
if gradients_fn:
class OptimizerWrapper:
def __init__(self, tape):
self.tape = tape
def compute_gradients(self, loss, var_list):
return list(
zip(self.tape.gradient(loss, var_list), var_list))
grads_and_vars = gradients_fn(self, OptimizerWrapper(tape),
loss)
else:
grads_and_vars = list(
zip(tape.gradient(loss, variables), variables))
if log_once("grad_vars"):
for _, v in grads_and_vars:
logger.info("Optimizing variable {}".format(v.name))
grads = [g for g, v in grads_and_vars]
stats = self._stats(self, samples, grads)
return grads_and_vars, stats
def _stats(self, outputs, samples, grads):
fetches = {}
if stats_fn:
fetches[LEARNER_STATS_KEY] = {
k: v
for k, v in stats_fn(outputs, samples).items()
}
else:
fetches[LEARNER_STATS_KEY] = {}
if extra_learn_fetches_fn:
fetches.update(
{k: v
for k, v in extra_learn_fetches_fn(self).items()})
if grad_stats_fn:
fetches.update({
k: v
for k, v in grad_stats_fn(self, samples, grads).items()
})
return fetches
def _initialize_loss_with_dummy_batch(self):
# Dummy forward pass to initialize any policy attributes, etc.
action_dtype, action_shape = ModelCatalog.get_action_shape(
self.action_space)
dummy_batch = {
SampleBatch.CUR_OBS: np.array(
[self.observation_space.sample()]),
SampleBatch.NEXT_OBS: np.array(
[self.observation_space.sample()]),
SampleBatch.DONES: np.array([False], dtype=np.bool),
SampleBatch.ACTIONS: tf.nest.map_structure(
lambda c: np.array([c]), self.action_space.sample()),
SampleBatch.REWARDS: np.array([0], dtype=np.float32),
}
if obs_include_prev_action_reward:
dummy_batch.update({
SampleBatch.PREV_ACTIONS: dummy_batch[SampleBatch.ACTIONS],
SampleBatch.PREV_REWARDS: dummy_batch[SampleBatch.REWARDS],
})
for i, h in enumerate(self._state_in):
dummy_batch["state_in_{}".format(i)] = h
dummy_batch["state_out_{}".format(i)] = h
if self._state_in:
dummy_batch["seq_lens"] = np.array([1], dtype=np.int32)
# Convert everything to tensors.
dummy_batch = tf.nest.map_structure(tf.convert_to_tensor,
dummy_batch)
# for IMPALA which expects a certain sample batch size.
def tile_to(tensor, n):
return tf.tile(tensor,
[n] + [1 for _ in tensor.shape.as_list()[1:]])
if get_batch_divisibility_req:
dummy_batch = tf.nest.map_structure(
lambda c: tile_to(c, get_batch_divisibility_req(self)),
dummy_batch)
# Execute a forward pass to get self.action_dist etc initialized,
# and also obtain the extra action fetches
_, _, fetches = self.compute_actions(
dummy_batch[SampleBatch.CUR_OBS], self._state_in,
dummy_batch.get(SampleBatch.PREV_ACTIONS),
dummy_batch.get(SampleBatch.PREV_REWARDS))
dummy_batch.update(fetches)
postprocessed_batch = self.postprocess_trajectory(
SampleBatch(dummy_batch))
# model forward pass for the loss (needed after postprocess to
# overwrite any tensor state from that call)
self.model.from_batch(dummy_batch)
postprocessed_batch = tf.nest.map_structure(
lambda c: tf.convert_to_tensor(c), postprocessed_batch.data)
loss_fn(self, self.model, self.dist_class, postprocessed_batch)
if stats_fn:
stats_fn(self, postprocessed_batch)
@classmethod
def with_tracing(cls):
return traced_eager_policy(cls)
eager_policy_cls.__name__ = name + "_eager"
eager_policy_cls.__qualname__ = name + "_eager"
return eager_policy_cls
|
#!/usr/bin/env python3
############################################################################################
# #
# Program purpose: Checks if a tuple exists in a list of tuples. #
# Program Author : Happi Yvan <ivensteinpoker@gmail.com> #
# Creation Date : December 19, 2019 #
# #
############################################################################################
import random
def random_tuples_list(low: int = 0, high: int = 10, size: int = 10):
if size < 0:
raise ValueError(f"Invalid size 'f{size}'. Must be > 0")
return [(random.randint(low, high), random.randint(low, high)) for _ in range(size)]
def do_checking(tuple_list: list) -> None:
stop, user_data = False, ''
while not stop:
try:
user_data = input("Enter tuple (as -> a b) to check existence [type '.' to stop]: ").strip()
if user_data == '.':
stop = True
else:
temp = user_data.split(' ')
if len(temp) != 2:
raise ValueError('Invalid input. Must be as -> a b')
(a, b) = int(temp[0].strip()), int(temp[1].strip())
print(f'Checking if ({a}, {b}) is found in tuples above ... '
f'{"[FOUND]" if tuple_list.count((a, b)) > 0 else "[NOT FOUND]"}')
except ValueError as ve:
print(f'[WARNING]: {ve}')
except TypeError as te:
print(f'[ERROR]: {te}')
if __name__ == "__main__":
new_tuples = random_tuples_list(low=0, high=10, size=10)
print(f'New random tuples: {new_tuples}')
# let's now do the checking
do_checking(tuple_list=new_tuples) |
# -*- coding: utf-8 -*-
"""
# @file name : RMB_data_augmentation.py
# @author : Jianhua Ma
# @date : 20210330
# @brief : Based on lec06/train_lenet.py, add the data augmentation.
"""
import os
import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
import torchvision.transforms as transforms
import torch.optim as optim
from matplotlib import pyplot as plt
import sys
from model.lenet import LeNet
from tools.my_dataset import RMBDataset
from tools.common_tools import set_seed, transform_invert
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
path_lenet = os.path.join(BASE_DIR, "..", "model", "lenet.py")
path_tools = os.path.join(BASE_DIR, "..", "tools", "common_tools.py")
assert os.path.exists(path_lenet), f"{path_lenet} not exist, please place lenet.py in the {os.path.dirname(path_lenet)}"
assert os.path.exists(path_tools), f"{path_tools} not exist, please place common_tools.py in the {os.path.dirname(path_tools)}"
hello_pytorch_DIR = os.path.abspath(os.path.dirname(__file__)+os.path.sep+"..")
sys.path.append(hello_pytorch_DIR)
set_seed()
rmb_label = {"1": 0, "100": 1}
# 参数设置
MAX_EPOCH = 10
BATCH_SIZE = 16
LR = 0.01
log_interval = 10
val_interval = 1
MOMENTUM = 0.9
# step 1/5: data preparation
split_dir = os.path.join(BASE_DIR, "..", "data", "rmb_split")
if not os.path.exists(split_dir):
raise Exception(r"data {} not exist, go back to split_dataset.py to generate data".format(split_dir))
train_dir = os.path.join(split_dir, "train")
valid_dir = os.path.join(split_dir, "valid")
norm_mean = [0.485, 0.456, 0.406]
norm_std = [0.229, 0.224, 0.225]
# add the data augmentation: transforms.RandomGrayscale(p=0.9)
train_transform = transforms.Compose([
transforms.Resize((32, 32)),
transforms.RandomCrop(32, padding=4),
transforms.RandomGrayscale(p=0.9),
transforms.ToTensor(),
transforms.Normalize(norm_mean, norm_std),
])
valid_transform = transforms.Compose([
transforms.Resize((32, 32)),
transforms.ToTensor(),
transforms.Normalize(norm_mean, norm_std),
])
# build MyDataset instance
train_data = RMBDataset(data_dir=train_dir, transform=train_transform)
valid_data = RMBDataset(data_dir=valid_dir, transform=valid_transform)
# build DataLoader
train_loader = DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True)
valid_loader = DataLoader(dataset=valid_data, batch_size=BATCH_SIZE)
# step 2/5: model
net = LeNet(classes=2)
net.initialize_weights()
# step 3/5: loss function
criterion = nn.CrossEntropyLoss()
# step 4/5: optimizer
# select optimizer
optimizer = optim.SGD(net.parameters(), lr=LR, momentum=MOMENTUM)
# set up the learning rate reducing strategy
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)
# step 5/5: training
train_curve, valid_curve = [], []
for epoch in range(MAX_EPOCH):
loss_mean = 0.
correct = 0.
total = 0.
net.train()
for i, data in enumerate(train_loader):
# forward
inputs, labels = data
outputs = net(inputs)
# backward
optimizer.zero_grad()
loss = criterion(outputs, labels)
loss.backward()
# update weights
optimizer.step()
# count classification prediction
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).squeeze().sum().numpy()
# append every batch's mean loss, loss.item()
loss_mean += loss.item()
train_curve.append(loss.item())
# print training information in every 10 batch steps, loss_mean is the mean loss in 10 batches
if (i+1) % log_interval == 0:
loss_mean = loss_mean / log_interval
print("Training:Epoch[{:0>3}/{:0>3}] Iteration[{:0>3}/{:0>3}] Loss: {:.4f} Acc:{:.2%}".format(
epoch, MAX_EPOCH, i+1, len(train_loader), loss_mean, correct / total))
loss_mean = 0.
scheduler.step() # 更新学习率
# validate the model in every epoch
if (epoch+1) % val_interval == 0:
correct_val = 0.
total_val = 0.
loss_val = 0.
net.eval()
with torch.no_grad():
for j, data in enumerate(valid_loader):
inputs, labels = data
outputs = net(inputs)
loss = criterion(outputs, labels)
_, predicted = torch.max(outputs.data, 1)
total_val += labels.size(0)
correct_val += (predicted == labels).squeeze().sum().numpy()
# every batch's valid mean loss
loss_val += loss.item()
# every epoch's valid mean loss
loss_val_epoch = loss_val / len(valid_loader)
valid_curve.append(loss_val_epoch)
# valid_curve.append(loss.item()) # 20191022改,记录整个epoch样本的loss,注意要取平均
print("Valid:\t Epoch[{:0>3}/{:0>3}] Iteration[{:0>3}/{:0>3}] Loss: {:.4f} Acc:{:.2%}".format(
epoch, MAX_EPOCH, j+1, len(valid_loader), loss_val_epoch, correct_val / total_val))
train_x = range(len(train_curve))
train_y = train_curve
train_iters = len(train_loader)
# since train_curve record every batch's loss, but train_curve recoder every epoch's loss
# so we need to amplify and interpolate more points between the valid point
valid_x = np.arange(1, len(valid_curve)+1) * train_iters * val_interval
valid_y = valid_curve
plt.plot(train_x, train_y, label='Train')
plt.plot(valid_x, valid_y, label='Valid')
plt.legend(loc='upper right')
plt.ylabel('loss value')
plt.xlabel('Iteration')
plt.show()
# ============================ inference ============================
# BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# test_dir = os.path.join(BASE_DIR, "test_data")
#
# test_data = RMBDataset(data_dir=test_dir, transform=valid_transform)
# valid_loader = DataLoader(dataset=test_data, batch_size=1)
#
# for i, data in enumerate(valid_loader):
# # forward
# inputs, labels = data
# outputs = net(inputs)
# _, predicted = torch.max(outputs.data, 1)
#
# rmb = 1 if predicted.numpy()[0] == 0 else 100
# print("model predict {} yuan".format(rmb))
# img_tensor = inputs[0, ...] # C H W
# img = transform_invert(img_tensor, train_transform)
# plt.imshow(img)
# plt.title("LeNet got {} Yuan".format(rmb))
# plt.show()
# plt.pause(0.5)
# plt.close()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# __coconut_hash__ = 0xc33625b4
# Compiled with Coconut version 1.2.3-post_dev1 [Colonel]
# Coconut Header: --------------------------------------------------------
from __future__ import print_function, absolute_import, unicode_literals, division
import sys as _coconut_sys, os.path as _coconut_os_path
_coconut_file_path = _coconut_os_path.dirname(_coconut_os_path.abspath(__file__))
_coconut_sys.path.insert(0, _coconut_file_path)
from __coconut__ import _coconut, _coconut_MatchError, _coconut_tail_call, _coconut_tco, _coconut_igetitem, _coconut_compose, _coconut_pipe, _coconut_starpipe, _coconut_backpipe, _coconut_backstarpipe, _coconut_bool_and, _coconut_bool_or, _coconut_minus, _coconut_map, _coconut_partial
from __coconut__ import *
_coconut_sys.path.remove(_coconut_file_path)
# Compiled Coconut: ------------------------------------------------------
import urllib
import zipfile
import os
import shutil
from dataget.utils import get_file
from dataget.dataset import ImageDataSetWithMetadata
from dataget.api import register_dataset
from multiprocessing import Pool
from dataget.utils import OS_SPLITTER
TRAINING_SET_URL = "http://benchmark.ini.rub.de/Dataset/GTSRB_Final_Training_Images.zip"
TEST_SET_URL = "http://benchmark.ini.rub.de/Dataset/GTSRB_Final_Test_Images.zip"
TEST_CSV_URL = "http://benchmark.ini.rub.de/Dataset/GTSRB_Final_Test_GT.zip"
@register_dataset
class GermanTrafficSigns(ImageDataSetWithMetadata):
def __init__(self, *args, **kwargs):
super(GermanTrafficSigns, self).__init__(*args, **kwargs)
self._training_images_path = os.path.join(self.training_set.path, "GTSRB/Final_Training/Images")
self._test_images_path = os.path.join(self.test_set.path, "GTSRB/Final_Test/Images")
@property
def _raw_extension(self):
return "ppm"
@property
def help(self):
return "TODO"
def reqs(self, **kwargs):
return super(GermanTrafficSigns, self).reqs() + ""
def _download(self, **kwargs):
get_file(TRAINING_SET_URL, self.path, "training-set.zip")
get_file(TEST_CSV_URL, self.path, "test-set.csv.zip")
get_file(TEST_SET_URL, self.path, "test-set.zip")
def _extract_training_set(self, **kwargs):
import pandas as pd
print("extracting training-set.zip")
with zipfile.ZipFile(os.path.join(self.path, "training-set.zip"), 'r') as zip_ref:
for file in zip_ref.namelist():
# skip directories
if os.path.basename(file):
if file.endswith(".csv") or file.endswith(self.raw_extension):
# print(file)
# print(self.path)
# os.path.join(self.path, file) |> print
structure = (_coconut.operator.methodcaller("split", "/"))(file)
filename = structure[-1]
class_id = (str)((int)(structure[-2]))
if not (os.path.exists)(os.path.join(self.training_set.path, class_id)):
(os.makedirs)(os.path.join(self.training_set.path, class_id))
if file.endswith(".csv"):
filename = "{}.csv".format(class_id)
# copy file (taken from zipfile's extract)
path = os.path.join(self.training_set.path, class_id, filename)
source = zip_ref.open(file)
target = open(path, "wb")
with source, target:
shutil.copyfileobj(source, target)
if file.endswith(".csv"):
df = pd.read_csv(path, sep=";")
df.columns = (list)((_coconut.functools.partial(map, _coconut.operator.methodcaller("lower")))(df.columns))
df.rename(columns={'classid': 'class_id'}, inplace=True)
df.to_csv(path, index=False)
def _extract_test_set(self, **kwargs):
print("extracting test-set.zip")
with zipfile.ZipFile(os.path.join(self.path, "test-set.zip"), 'r') as zip_ref:
for file in zip_ref.namelist():
# skip directories
if os.path.basename(file):
if file.endswith(self.raw_extension):
structure = (_coconut.operator.methodcaller("split", "/"))(file)
filename = structure[-1]
path = os.path.join(self.test_set.path, filename)
# copy file (taken from zipfile's extract)
source = zip_ref.open(file)
target = open(path, "wb")
with source, target:
shutil.copyfileobj(source, target)
print("extracting test-set.csv.zip")
with (_coconut_partial(zipfile.ZipFile, {1: 'r'}, 2))(os.path.join(self.path, "test-set.csv.zip")) as zip_ref:
path = os.path.join(self.test_set.path, "test-set.csv")
# copy file (taken from zipfile's extract)
source = zip_ref.open("GT-final_test.csv")
target = open(path, "wb")
with source, target:
shutil.copyfileobj(source, target)
with (_coconut_partial(open, {1: "r"}, 2))(os.path.join(self.test_set.path, "test-set.csv")) as f:
txt = f.read().replace(";", ",")
with (_coconut_partial(open, {1: "w"}, 2))(os.path.join(self.test_set.path, "test-set.csv")) as f:
f.write(txt)
self._structure_folder_from_csv(self.test_set.path)
#remove old csv
(os.remove)(os.path.join(self.test_set.path, "test-set.csv"))
def _structure_folder_from_csv(self, dir_path):
import pandas as pd
print("organizing test-set")
csv_files = (_coconut.functools.partial(map, _coconut.functools.partial(os.path.join, dir_path)))((_coconut.functools.partial(filter, _coconut.operator.methodcaller("endswith", ".csv")))(os.listdir(dir_path)))
df = (pd.concat)((_coconut.functools.partial(map, pd.read_csv))(csv_files))
df.columns = (list)((_coconut.functools.partial(map, _coconut.operator.methodcaller("lower")))(df.columns))
df.rename(columns={'classid': 'class_id'}, inplace=True)
groups = df.groupby(["class_id"])
for class_id, group in groups:
group = group.copy()
class_path = os.path.join(dir_path, str(class_id))
group_csv_path = os.path.join(class_path, str(class_id)) + ".csv"
for i, row in group.iterrows():
file_path = os.path.join(class_path, row.filename)
current_file_path = os.path.join(dir_path, row.filename)
if not os.path.exists(class_path):
os.makedirs(class_path)
# move files
os.rename(current_file_path, file_path)
#create group csv
group.to_csv(group_csv_path, index=False)
def _extract(self, **kwargs):
self._extract_training_set(**kwargs)
self._extract_test_set(**kwargs)
def process_dataframe(self, dataframe, **kwargs):
# print(dataframe.iloc[0].class_id)
pass
|
from django import forms
class DownloadForm(forms.Form):
post = forms.CharField()
|
# -*- coding: utf-8 -*-
import os
import re
from datetime import datetime
import scrapy
from pymongo import MongoClient
from sinastock.items import NewsItem
class SinastockSpiderSpider(scrapy.Spider):
name = 'sinastock_spider'
allowed_domains = ['finance.sina.com.cn']
mongo = MongoClient(
host=os.environ.get('MONGO_HOST') or 'localhost',
port=int(os.environ.get('MONGO_PORT') or 27017)
)
db = mongo[os.environ.get('MONGO_DB') or 'crawlab_test']
col = db.get_collection(os.environ.get('CRAWLAB_COLLECTION') or 'stock_news')
def start_requests(self):
col = self.db['stocks']
for s in col.find({}):
code, ex = s['ts_code'].split('.')
for i in range(10):
url = f'http://vip.stock.finance.sina.com.cn/corp/view/vCB_AllNewsStock.php?symbol={ex.lower()}{code}&Page={i + 1}'
yield scrapy.Request(
url=url,
callback=self.parse,
meta={'ts_code': s['ts_code']}
)
def parse(self, response):
for a in response.css('.datelist > ul > a'):
url = a.css('a::attr("href")').extract_first()
item = NewsItem(
title=a.css('a::text').extract_first(),
url=url,
source='sina',
stocks=[response.meta['ts_code']]
)
yield scrapy.Request(
url=url,
callback=self.parse_detail,
meta={'item': item}
)
def parse_detail(self, response):
item = response.meta['item']
text = response.css('#artibody').extract_first()
pre = re.compile('>(.*?)<')
text = ''.join(pre.findall(text))
item['text'] = text.replace('\u3000', '')
item['ts_str'] = response.css('.date::text').extract_first()
if item['text'] is None or item['ts_str'] is None:
pass
else:
item['ts'] = datetime.strptime(item['ts_str'], '%Y年%m月%d日 %H:%M')
yield item
|
# Authors: Michael Hawes, Tony Tran
# Date: 12 Novemebr 2016
# Hack RPI
import jinja2
import html2text
import requests
import queue
import re
import nltk
import json
from collections import Counter
from bs4 import BeautifulSoup
from google import search
from stop_words import get_stop_words
from stemming.porter2 import stem
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
import requests
from multiprocessing import Pool
from flask import Flask, render_template, request, redirect, url_for, abort, session
non_word_list = ['[',']','*', '(',')','\\','/','&']
word_count = []
lemma = WordNetLemmatizer()
stop = stopwords.words('english')
# env = jinja2.Environment()
# env.globals.update(zip=zip)
app = Flask(__name__)
app.config['SECRET_KEY'] = 'F34TF$($e34D';
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == "POST":
session['message'] = request.form['message'] # get search text
return redirect(url_for('gallery'))
return render_template('index.html')
@app.route('/gallery')
def gallery():
phrase = session['message'] # get input text from index
top_5 = get_key_words(phrase, gen_sums = None)
resulting_words = []
for i in top_5:
resulting_words += [i[0] + ' ']
new_phrase = ''.join(resulting_words)
#print("new phrase to search with ",new_phrase)
urls = get_key_words(new_phrase, gen_sums = 1)
p = Pool ()
listy = p.map(get_summary,urls)
return render_template('gallery.html', results=listy, urls=urls, zip=zip)
def soupify(url):
soup = BeautifulSoup(url.text,"html.parser")
for script in soup(["script", "style"]):
script.extract() # rip it out
text = soup.get_text() # get text
lines = (line.strip() for line in text.splitlines()) # break into lines and remove leading and trailing space on each
chunks = (phrase.strip() for line in lines for phrase in line.split(" ")) # break multi-headlines into a line each
text = '\n'.join(chunk for chunk in chunks if chunk) # drop blank lines
return text
def get_count(texts):
for text in texts:
for word in text.split():
word = word.lower()
if word not in stop:
if word not in non_word_list:
if not word.isdigit():
stemword = lemma.lemmatize(word)
#stemword = stem(stemword)
word_count.append(stemword)
cnt = Counter(word_count)
queue2 = []
#print(cnt)
not_done = len(queue2)
return cnt.most_common()[:6]
def get_key_words(phrase, gen_sums=None):
url_list = []
urls = []
if gen_sums == None:
for x in search(phrase, stop = 1):
try:
r = requests.get(x)
urls += [r]
except ConnectionError:
print("Skipping over the url...")
pool = Pool()
texts = pool.map(soupify,urls)
for text in texts:
for word in text.split():
word = word.lower()
if word not in stop:
if word not in non_word_list:
if not word.isdigit():
stemword = lemma.lemmatize(word)
#stemword = stem(stemword)
word_count.append(stemword)
cnt = Counter(word_count)
queue2 = []
not_done = len(queue2)
return cnt.most_common()[:6]
else: #done filtering
for x in search(phrase, stop = 1):
try:
r = requests.get(x)
url_list += [x]
except ConnectionError:
print('Skipping')
return url_list
def get_summary(url):
api_key = open('api_key.txt','r').readline().strip()
params = {"url": url, "apikey":api_key}
r = requests.get("https://api.havenondemand.com/1/api/sync/extractconcepts/v1", params = params)
sum_list = []
if r.status_code == 200:
list2 = r.json()['concepts']
for dic in list2:
sum_list += [dic['concept'] + ' ']
summary = ''.join(sum_list)
return summary
@app.route('/result', methods=['GET', 'POST'])
def result():
if request.method == "POST":
session['radios'] = request.form['radios'] # get search text
result = session['radios']
if result == 'true':
session['url'] = request.form['url'] # got chosen url
#print(session['url'])
r = requests.get(session['url'])
string = ''
if r.status_code == 200:
text = soupify(r)
list2 = get_count([text])
for key in list2:
string += key[0] + ' '
phrase = string
top_5 = get_key_words(phrase, gen_sums = None)
resulting_words = []
for i in top_5:
resulting_words += [i[0] + ' ']
new_phrase = ''.join(resulting_words)
#print("new phrase to search with ",new_phrase)
urls = get_key_words(new_phrase, gen_sums = 1)
p = Pool ()
listy = p.map(get_summary,urls)
return render_template('result.html', results=listy, urls=urls, zip=zip)
if __name__ == '__main__':
app.run(debug=True)
|
from django.shortcuts import render
from django.shortcuts import HttpResponse
from .forms import FoodFitnessForm
from django.contrib.auth.models import User
# function to test with
def index(request):
return HttpResponse("You made it.")
# function to create new user
def createUser(request):
form = FoodFitnessForm(request.POST or None)
context = {
"form": form
}
if request.method == "POST":
print(request.POST)
User.objects.create_user(request.POST["username"], request.POST["calories"], request.POST["date"])
return render(request, "authenticationCwApp/confirmUser.html")
return render(request, 'authenticationCwApp/createUser.html', context)
# function to confirm new user
def confirmUser(request):
form = FoodFitnessForm(request.GET or None)
context = {
"form": form
}
if request.method == 'GET':
User.objects.create_user(request.GET["username"], "", request.GET["calories"], request.GET["date"])
form.save()
return HttpResponse("New Food Calorie Tracker Created!!!!!")
return render(request, "authenticationCwApp/confirmUser.html", context)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('news', '0005_informationpage'),
('wagtailimages', '0006_add_verbose_names'),
('wagtailcore', '0001_squashed_0016_change_page_url_path_to_text_field'),
('event', '0004_auto_20151201_1011'),
]
operations = [
migrations.DeleteModel(
name='watchlist',
),
]
|
#
# @lc app=leetcode.cn id=120 lang=python3
#
# [120] 三角形最小路径和
#
from typing import List
class Solution:
# 一行一行向下叠加(也可向上叠加)
# 将累加的路径和填入下一行中
def minimumTotal(self, triangle: List[List[int]]) -> int:
try:
row_count = len(triangle)
if row_count == 0:
return 0
for r in range(1, row_count):
pre_row = triangle[r-1]
cur_row = triangle[r]
col_count = len(cur_row)
# 每一行的左右两边界只能由上一行的两边界抵达
cur_row[0] += pre_row[0]
cur_row[-1] += pre_row[-1]
for c in range(1, col_count-1):
cur_row[c] += min(pre_row[c-1], pre_row[c])
return min(triangle[-1])
except:
return 0
|
from datetime import datetime
def get_departures():
departures = []
with open('../data/departure-data.txt') as f:
for line in f.read().splitlines():
departure = get_departure(line)
if departure:
departures.append(departure)
return departures
def get_departure(line):
"""Return a tuple containing two datetime objects."""
# If the line begins with an asterisk (*), return None
# Get the planned and actual departures as strings by
# splitting the line on a tab character into a list
# Assign the first item in the list to planned and the
# second item to actual
# Convert the planned departure time to a datetime and
# assign the result to date_planned
# For those lines that have an actual departure time,
# convert the actual departure time to a datetime and
# assign the result to date_actual.
# For lines that don't have an actual departure date, assign
# None to date_actual.
# Return a tuple with date_planned and date_actual.
return (date_planned, date_actual)
def left_ontime(departure):
planned = departure[0]
actual = departure[1]
if not actual:
return False
return actual == planned
# Write the following four functions. They should
# all return a boolean value
def left_early(departure):
pass
def left_late(departure):
pass
def left_next_day(departure):
pass
def did_not_run(departure):
pass
def main():
departures = get_departures()
ontime_departures = [d for d in departures if left_ontime(d)]
early_departures = [d for d in departures if left_early(d)]
late_departures = [d for d in departures if left_late(d)]
next_day_departures = [d for d in departures if left_next_day(d)]
cancelled_trips = [d for d in departures if did_not_run(d)]
print(f"""Total Departures: {len(departures)}
Ontime Departures: {len(ontime_departures)}
Early Departures: {len(early_departures)}
Late Departures: {len(late_departures)}
Next Day Departures: {len(next_day_departures)}
Cancelled Trips: {len(cancelled_trips)}""")
main() |
import prepare_settings
from unittest import TestCase, main
from django_documents.documents import Model
from django_documents import related
from django_documents import fields
from django_documents.serializer import JsonSerializer, JsonUnSerializer
"""
class Category(Model):
name = fields.CharField()
subcategories = related.ListOf('Category')
"""
from app.models import MyDjangomodel
class TestRecursiveDefinitionDjOneOf(TestCase):
def test(self):
mymodel = MyDjangomodel()
mysecond_model = MyDjangomodel()
mymodel.mymodel = mysecond_model
"""
class TestRecursiveDefinitionListOf(TestCase):
def test(self):
slapen = Category(name="slapen")
hotel = Category(name="hotel")
slapen.subcategories = [hotel]
json = JsonSerializer().serialize(slapen)
retrieved_slapen = JsonUnSerializer().unserialize(json)
self.assertEqual(slapen.name, retrieved_slapen.name)
self.assertEqual(slapen.subcategories[0].name, retrieved_slapen.subcategories[0].name)
"""
class Mymodel(Model):
# mymodel = related.OneOf('Mymodel')
id = fields.CharField(blank = False, null = False, max_length = 36, auto_created = True)
site = fields.CharField(max_length = 40, blank = True, null = True)
name = fields.CharField(max_length = 40)
rootCategories = related.ListOf('Mymodel')
class TestRecursiveDefinitionOneOf(TestCase):
def test(self):
mymodel = Mymodel()
mysecond_model = Mymodel()
mymodel.mymodel = mysecond_model
json = JsonSerializer().serialize(mymodel)
retrieved_mymodel = JsonUnSerializer().unserialize(json)
def test_describe(self):
import simplejson as json
print json.dumps(Mymodel._meta.describe(recursive=True))
if __name__ == '__main__':
main() |
import pywikibot
from pywikibot.data import api
import wikiSQL as SQL
import re
'''
Items
'''
def getItem(site, wdItem, token):
request = api.Request(site=site,
action='wbgetentities',
format='json',
ids=wdItem)
return request.submit()
def getItems(site, itemtitle, language):
'''
Query Params for Items
site --
itemtitle --
language -- en, de, fr
'''
params = { 'action' :'wbsearchentities' , 'format' : 'json' , 'language' : language, 'type' : 'item', 'search': itemtitle}
request = api.Request(site=site,**params)
return request.submit()
def CreateItem(repo, ENDPOINT, row):
'''
creates a new item
row (list) - list with title, description,
'''
#TODO: quatsch? anders
new_item = pywikibot.ItemPage(repo)
if 'FR MiMoText' in row and row['FR MiMoText'] != '':
fr = row['FR MiMoText']
elif 'FR' in row and row['FR'] != '':
fr = row['FR']
else:
fr = ''
if 'EN MiMoText' in row and row['EN MiMoText'] != '':
en = row['EN MiMoText']
elif 'EN' in row and row['EN'] != '':
en = row['EN']
else:
en = ''
if 'DE MiMoText' in row and row['DE MiMoText'] != '':
de = row['DE MiMoText']
elif 'DE' in row and row['DE'] != '':
de = row['DE']
else:
de = ''
label_dict = {'fr': fr, 'de': de, 'en': en}
new_item.editLabels(labels=label_dict, summary="Setting labels")
if 'description-fr' in row or 'description-de' in row or 'description-en' in row:
desc_dict = {'fr': row['description-fr'], 'de': row['description-de'], 'en': row['description-en']}
new_item.editDescriptions(descriptions=desc_dict, summary="Setting descriptions")
return new_item
def CreateAuthor(name, repo):
'''
Create author with name as Label for en,de,fr
name (string) -- author name
'''
#Setting Labels
new_item = pywikibot.ItemPage(repo)
label_dict = {'fr': name, 'de': name, 'en':name}
new_item.editLabels(labels=label_dict, summary="Setting labels")
#Add name as monolingual Statement
new_claim = pywikibot.Claim(repo, 'p9')
target = pywikibot.WbMonolingualText(name, "fr")
new_claim.setTarget(target)
new_item.addClaim(new_claim, summary="Adding claim P9")
#Set (P2) instance of (Q10)) human
new_claim = pywikibot.Claim(repo, 'P2')
target = pywikibot.ItemPage(repo, 'Q10')
new_claim.setTarget(target)
new_item.addClaim(new_claim, summary="Adding claim P2")
#Set (P30) occupation (Q15) author
new_claim = pywikibot.Claim(repo, 'P30')
target = pywikibot.ItemPage(repo, 'Q15')
new_claim.setTarget(target)
new_item.addClaim(new_claim, summary="Adding claim P9")
return new_item.getID()
def CreateWerk(title, repo):
'''
Create novel with title as Label for en,de,fr
title (string) -- title of novel
'''
#Setting Labels
new_item = pywikibot.ItemPage(repo)
label_dict = {'fr': title, 'de': title, 'en':title}
new_item.editLabels(labels=label_dict, summary="Setting labels")
#Set (P2) instance of (Q2) literary work (P14) stated in (Q1)
new_ref = pywikibot.Claim(repo, 'P14', True)
ref_target = pywikibot.ItemPage(repo, 'Q1')
new_ref.setTarget(ref_target)
sources = []
sources.append(new_ref)
new_claim = pywikibot.Claim(repo, 'P2')
target = pywikibot.ItemPage(repo, 'Q2')
new_claim.setTarget(target)
new_claim.addSources(sources, summary="Adding reference Q1")
new_item.addClaim(new_claim, summary="Adding claim P2")
#Set (P39) genre (Q12) fictional prose (P14) stated in (Q1)
new_ref = pywikibot.Claim(repo, 'P14', True)
ref_target = pywikibot.ItemPage(repo, 'Q1')
new_ref.setTarget(ref_target)
sources = []
sources.append(new_ref)
new_claim = pywikibot.Claim(repo, 'P39')
target = pywikibot.ItemPage(repo, 'Q12')
new_claim.setTarget(target)
new_claim.addSources(sources, summary="Adding reference Q1")
new_item.addClaim(new_claim, summary="Adding claim P39")
return new_item
'''
Properties
'''
def getProperties(site, itemtitle):
'''
Query Params for Property
'''
params = { 'action' :'wbsearchentities' , 'format' : 'json' , 'language' : 'en', 'type' : 'property', 'search': itemtitle}
request = api.Request(site=site,**params)
return request.submit()
def GetProperty(site, item):
'''
return PID of Property
'''
wikiEntries = getProperties(site, item)
if wikiEntries['search']:
QID = 0
for elem in wikiEntries['search']:
if elem['label'] == item:
QID = elem['id']
return QID
return
def CreateProperty(repo, title, datatypeProp):
'''
creates a new property
title (string) - property name
datatypeProp (string) - type of property, need to be wikibase compliant
'''
# TODO: Exchange CheckForProperty with SparQL search?
if CheckForProperty(repo, title):
new_item = pywikibot.PropertyPage(repo,datatype=datatypeProp)
label_dict = {'fr': title, 'de': title, 'en':title}
new_item.editLabels(labels=label_dict, summary='Setting labels')
return new_item.getID()
return
def CreateReferenz(repo, ENDPOINT, claim, ref_prop, ref_target):
'''
add referenz to given claim
claim (claim) -- claim to add the reference
ref_prop (string) -- PID
ref_Target (string) -- QID or PID
'''
if ref_target[:1] != 'Q':
ref_target = SQL.GetEntryOverSPARQL(ENDPOINT,ref_target, SQL.SparQL_Mode.QID)
targetFound = False
for claimSources in claim.getSources():
for referenceProperty, referenceClaims in claimSources.items():
if referenceProperty == ref_prop:
for referenceClaim in referenceClaims:
if ref_target == referenceClaim.getTarget().id:
targetFound = True
if not targetFound:
target = pywikibot.ItemPage(repo, ref_target)
ref = pywikibot.Claim(repo, ref_prop, True)
ref.setTarget(target)
sources = []
sources.append(ref)
claim.addSources(sources, summary="Adding reference " + ref_target)
def CreateClaim(repo, ENDPOINT, item, prop, target, references = None):
#TODO change description after
#TODO check if claim is already set
'''
Add a new Claim to given Item
Keyword arguments:
item (ItemPage) -- given item
prop (str) -- PID of statement property
target (str) -- QID of linked target
references (list of tuples) -- (reference_property, reference_target) default None
'''
#print(item, prop, target, references)
targetFound = False
itemClaims = item.get()['claims']
for claimProperty, claims in itemClaims.items():
if claimProperty == prop:
for claim in claims:
if target == "99.11": #TODO: different werk
claimTargetId = None
else:
claimTargetId = claim.getTarget().id
if claimTargetId == target:
targetFound = True
if references:
for ref in references:
CreateReferenz(repo, ENDPOINT, claim, ref[0], ref[1])
if not targetFound:
print(repo)
print(prop)
propPage= pywikibot.PropertyPage(repo, prop)
propPage.get()
print(propPage._type)
claim = ''
claim = pywikibot.Claim(repo, prop)
print(prop)
sTarget = SetTarget(repo,ENDPOINT, prop, target)
if sTarget:
claim.setTarget(sTarget)
if references:
for ref in references:
if target and ref[0] and ref[1]:
CreateReferenz(repo,ENDPOINT,claim, ref[0], ref[1])
item.addClaim(claim, summary='Adding claim ' + prop)
def getClaim(item, pid, ENDPOINT=None, REPO=None, target=None):
#print(item)
#TODO: items with more then one matching property
itemClaims = item.get()['claims']
for claimProperty, claims in itemClaims.items():
if claimProperty == pid:
for claim in claims:
if not REPO or not ENDPOINT:
return claim
if not target:
return None
if re.match(r"^Q\d+$", target):
qid = target
else:
propertyPage = pywikibot.PropertyPage(REPO, pid)
propertyPage.get(True)
if propertyPage._type == "wikibase-item":
qid = SQL.GetEntryOverSPARQL(ENDPOINT, target)
if not qid:
qid = SQL.GetEntryOverSPARQL(ENDPOINT, target, lang="fr")
if not qid:
qid = SQL.GetEntryOverSPARQL(ENDPOINT, target, lang="de")
if claim.target_equals(qid):
return claim
return None
def SetTarget(repo,ENDPOINT, prop, target):
'''
Set Target based on property Type
prop (string) -- property to be linked
target (string) -- target of the link
'''
propertyPage = pywikibot.PropertyPage(repo, prop)
propertyPage.get()
if propertyPage._type == 'wikibase-item':
print(target, "TARGET")
if not re.match(r"Q\d+$", target):
t1 = SQL.GetEntryOverSPARQL(ENDPOINT, target, lang='fr')
target = SQL.GetEntryOverSPARQL(ENDPOINT,target)
if not target:
target = t1
if not target:
return
print(target)
return pywikibot.ItemPage(repo, target)
elif propertyPage._type == 'wikibase-property':
return pywikibot.PropertyPage(repo, target)
elif propertyPage._type == 'monolingualtext':
return pywikibot.WbMonolingualText(target, "fr")
elif propertyPage._type == 'time':
return pywikibot.WbTime(int(target), calendarmodel = 'http://www.wikidata.org/entity/Q1985727')
elif propertyPage._type == 'external_id':
return target
elif propertyPage._type == 'url':
return target
elif propertyPage._type == 'commonsMedia':
return target
elif propertyPage._type == 'quantity':
return pywikibot.WbQuantity(target)
elif propertyPage._type == 'globe-coordinate':
print("Coordinates")
coordinates = target.split(',')
lat = coordinates[0]
lon = coordinates[1]
precision = "0.000001"
print(lat, lon, precision)
coord = pywikibot.Coordinate(lat, lon, precision=precision)
print(str(coord))
return coord
else:
return target
def CheckForEntry(site, item):
'''
check if item is in wikibase
'''
wikiEntries = getItems(site, item, 'en')
if not wikiEntries['search']:
return True
else:
itemFound = False
for elem in wikiEntries['search']:
if elem['label'] == item:
itemFound = True
return not itemFound
def CheckForProperty(site, elem):
'''
check if property is in wikibse
'''
wikiEntries = getProperties(site, elem)
if not wikiEntries['search']:
return True
else:
itemFound = False
for e in wikiEntries['search']:
if e['label'] == elem:
itemFound = True
return not itemFound
def GetEntry(site, item, lang):
'''
return QID for item
lang -- en, de, fr
TODO: change to SparQL?
'''
wikiEntries = getItems(site, item, lang)
if wikiEntries['search']:
QID = 0
for elem in wikiEntries['search']:
if elem['match']['text'] == item:
QID = elem['id']
return QID
return
|
from django import forms
from django.contrib import admin, messages
from django.utils import timezone
from tinymce.widgets import TinyMCE
from .models import Article
# Register your models here.
class ArticleForm(forms.ModelForm):
content = forms.CharField(widget=TinyMCE())
class ArticleAdmin(admin.ModelAdmin):
form = ArticleForm
readonly_fields = (
'created',
'created_by',
)
list_display = (
'title',
'created',
'created_by',
'article_type'
)
list_filter = [
'article_type',
]
def save_model(self, request, obj, form, change):
# Override form save, to post user and current time
if change:
if not (request.user == obj.created_by or request.user.member.is_privileged()):
messages.add_message(request, messages.ERROR,
'To change an article, you have to be its creator or president/webmaster.')
return
else:
obj.created = timezone.now()
obj.created_by = request.user
super(ArticleAdmin, self).save_model(request, obj, form, change)
admin.site.register(Article, ArticleAdmin)
|
#!/usr/bin/env python3
from argparse import ArgumentParser
import logging
import bitcoin
import json
import codecs
import urllib3
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
http = urllib3.PoolManager()
def get_hash_by_height(height):
url = 'https://blockchain.info/block-height/'+str(height)+'?format=json'
logger.info('url %s' % url)
r = http.request('GET', url)
text = json.loads(r.data)
return text['blocks'][0]['hash']
def serialize_header(height):
url = 'https://blockchain.info/block-height/'+str(height)+'?format=json'
logger.info('url %s' % url)
r = http.request('GET', url)
text = json.loads(r.data)
inp = text['blocks'][0]
return inp
def blockHashHex(number):
hexHead = hex(number)[2:-1] # snip off the 0x and trailing L
hexHead = '0' * (64 - len(hexHead)) + hexHead
return hexHead
def main():
parser = ArgumentParser()
parser.add_argument('--startBlock', required=True, default=625332, type=int, help='block number to start fetching from')
args = parser.parse_args()
logger.info('startBlock: %s' % args.startBlock)
height = args.startBlock
realHead = get_hash_by_height(height)
bhStr = serialize_header(height)
logger.info("@@@ {0}: {1}".format(height, bhStr))
logger.info("Block header: %s" % bhStr)
if __name__ == '__main__':
main()
|
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base Test Class to be used by test suites."""
def get_token_header():
"""Get the token header json."""
return {
'alg': 'RS256',
'typ': 'JWT',
'kid': 'sbc-auth-cron-job'
}
def get_claims(app_request=None, role: str = 'account_holder', username: str = 'CP0001234',
login_source: str = 'PASSCODE'):
"""Return the claim with the role param."""
claim = {
'jti': 'a50fafa4-c4d6-4a9b-9e51-1e5e0d102878',
'exp': 31531718745,
'iat': 1531718745,
'iss': app_request.config[
'JWT_OIDC_ISSUER'] if app_request else 'http://localhost:8081/auth/realms/demo',
'aud': 'sbc-auth-web',
'sub': '15099883-3c3f-4b4c-a124-a1824d6cba84',
'typ': 'Bearer',
'realm_access':
{
'roles':
[
'{}'.format(role)
]
},
'preferred_username': username,
'username': username,
'loginSource': login_source
}
return claim
|
# trainToyOTflow.py
# training driver for the two-dimensional toy problems
import argparse
import os
import time
import datetime
import torch.optim as optim
import numpy as np
import math
import lib.toy_data as toy_data
import lib.utils as utils
from lib.utils import count_parameters
from src.plotter import plot4
from src.OTFlowProblem import *
import config
cf = config.getconfig()
if cf.gpu: # if gpu on platform
def_viz_freq = 100
def_batch = 4096
def_niter = 1500
else: # if no gpu on platform, assume debugging on a local cpu
def_viz_freq = 100
def_batch = 2048
def_niter = 1000
parser = argparse.ArgumentParser('OT-Flow')
parser.add_argument(
'--data', choices=['swissroll', '8gaussians', 'pinwheel', 'circles', 'moons', '2spirals', 'checkerboard', 'rings'],
type=str, default='8gaussians'
)
parser.add_argument("--nt" , type=int, default=8, help="number of time steps")
parser.add_argument("--nt_val", type=int, default=8, help="number of time steps for validation")
parser.add_argument('--alph' , type=str, default='1.0,100.0,5.0')
parser.add_argument('--m' , type=int, default=32)
parser.add_argument('--nTh' , type=int, default=2)
parser.add_argument('--niters' , type=int , default=def_niter)
parser.add_argument('--batch_size' , type=int , default=def_batch)
parser.add_argument('--val_batch_size', type=int , default=def_batch)
parser.add_argument('--lr' , type=float, default=0.1)
parser.add_argument("--drop_freq" , type=int , default=100, help="how often to decrease learning rate")
parser.add_argument('--weight_decay', type=float, default=0.0)
parser.add_argument('--lr_drop' , type=float, default=2.0)
parser.add_argument('--optim' , type=str , default='adam', choices=['adam'])
parser.add_argument('--prec' , type=str , default='single', choices=['single','double'], help="single or double precision")
parser.add_argument('--save' , type=str, default='experiments/cnf/toy')
parser.add_argument('--viz_freq', type=int, default=def_viz_freq)
parser.add_argument('--val_freq', type=int, default=1)
parser.add_argument('--gpu' , type=int, default=0)
parser.add_argument('--sample_freq', type=int, default=25)
args = parser.parse_args()
args.alph = [float(item) for item in args.alph.split(',')]
# get precision type
if args.prec =='double':
prec = torch.float64
else:
prec = torch.float32
# get timestamp for saving models
start_time = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
# logger
utils.makedirs(args.save)
logger = utils.get_logger(logpath=os.path.join(args.save, 'logs'), filepath=os.path.abspath(__file__))
logger.info("start time: " + start_time)
logger.info(args)
device = torch.device('cuda:' + str(args.gpu) if torch.cuda.is_available() else 'cpu')
def compute_loss(net, x, nt):
Jc , cs = OTFlowProblem(x, net, [0,1], nt=nt, stepper="rk4", alph=net.alph)
return Jc, cs
if __name__ == '__main__':
torch.set_default_dtype(prec)
cvt = lambda x: x.type(prec).to(device, non_blocking=True)
# neural network for the potential function Phi
d = 2
alph = args.alph
nt = args.nt
nt_val = args.nt_val
nTh = args.nTh
m = args.m
net = Phi(nTh=nTh, m=args.m, d=d, alph=alph)
net = net.to(prec).to(device)
optim = torch.optim.Adam(net.parameters(), lr=args.lr, weight_decay=args.weight_decay ) # lr=0.04 good
logger.info(net)
logger.info("-------------------------")
logger.info("DIMENSION={:} m={:} nTh={:} alpha={:}".format(d,m,nTh,alph))
logger.info("nt={:} nt_val={:}".format(nt,nt_val))
logger.info("Number of trainable parameters: {}".format(count_parameters(net)))
logger.info("-------------------------")
logger.info(str(optim)) # optimizer info
logger.info("data={:} batch_size={:} gpu={:}".format(args.data, args.batch_size, args.gpu))
logger.info("maxIters={:} val_freq={:} viz_freq={:}".format(args.niters, args.val_freq, args.viz_freq))
logger.info("saveLocation = {:}".format(args.save))
logger.info("-------------------------\n")
end = time.time()
best_loss = float('inf')
bestParams = None
# setup data [nSamples, d]
# use one batch as the entire data set
x0 = toy_data.inf_train_gen(args.data, batch_size=args.batch_size)
x0 = cvt(torch.from_numpy(x0))
x0val = toy_data.inf_train_gen(args.data, batch_size=args.val_batch_size)
x0val = cvt(torch.from_numpy(x0val))
log_msg = (
'{:5s} {:6s} {:9s} {:9s} {:9s} {:9s} {:9s} {:9s} {:9s} {:9s} '.format(
'iter', ' time','loss', 'L (L_2)', 'C (loss)', 'R (HJB)', 'valLoss', 'valL', 'valC', 'valR'
)
)
logger.info(log_msg)
time_meter = utils.AverageMeter()
net.train()
for itr in range(1, args.niters + 1):
# train
optim.zero_grad()
loss, costs = compute_loss(net, x0, nt=nt)
loss.backward()
optim.step()
time_meter.update(time.time() - end)
log_message = (
'{:05d} {:6.3f} {:9.3e} {:9.3e} {:9.3e} {:9.3e} '.format(
itr, time_meter.val , loss, costs[0], costs[1], costs[2]
)
)
# validate
if itr % args.val_freq == 0 or itr == args.niters:
with torch.no_grad():
net.eval()
test_loss, test_costs = compute_loss(net, x0val, nt=nt_val)
# add to print message
log_message += ' {:9.3e} {:9.3e} {:9.3e} {:9.3e} '.format(
test_loss, test_costs[0], test_costs[1], test_costs[2]
)
# save best set of parameters
if test_loss.item() < best_loss:
best_loss = test_loss.item()
best_costs = test_costs
utils.makedirs(args.save)
best_params = net.state_dict()
torch.save({
'args': args,
'state_dict': best_params,
}, os.path.join(args.save, start_time + '_{:}_alph{:}_{:}_m{:}_checkpt.pth'.format(args.data,int(alph[1]),int(alph[2]),m)))
net.train()
logger.info(log_message) # print iteration
# create plots
if itr % args.viz_freq == 0:
with torch.no_grad():
net.eval()
curr_state = net.state_dict()
net.load_state_dict(best_params)
nSamples = 20000
p_samples = cvt(torch.Tensor( toy_data.inf_train_gen(args.data, batch_size=nSamples) ))
y = cvt(torch.randn(nSamples,d)) # sampling from the standard normal (rho_1)
sPath = os.path.join(args.save, 'figs', start_time + '_{:04d}.png'.format(itr))
plot4(net, p_samples, y, nt_val, sPath, doPaths=True, sTitle='{:s} - loss {:.2f} , C {:.2f} , alph {:.1f} {:.1f} '
' nt {:d} m {:d} nTh {:d} '.format(args.data, best_loss, best_costs[1], alph[1], alph[2], nt, m, nTh))
net.load_state_dict(curr_state)
net.train()
# shrink step size
if itr % args.drop_freq == 0:
for p in optim.param_groups:
p['lr'] /= args.lr_drop
print("lr: ", p['lr'])
# resample data
if itr % args.sample_freq == 0:
# resample data [nSamples, d+1]
logger.info("resampling")
x0 = toy_data.inf_train_gen(args.data, batch_size=args.batch_size) # load data batch
x0 = cvt(torch.from_numpy(x0)) # convert to torch, type and gpu
end = time.time()
logger.info("Training Time: {:} seconds".format(time_meter.sum))
logger.info('Training has finished. ' + start_time + '_{:}_alph{:}_{:}_m{:}_checkpt.pth'.format(args.data,int(alph[1]),int(alph[2]),m))
|
#Crie um programa que leia quanto de dinheiro a pessoa tem na sua carteira e mostra quantos dólares ela pode comprar. Considerando que o dólar custa 3,27 Reais
name = str(input("Qual é o seu nome? "))
carteira = float(input("Quanto dinheiro na carteira você tem, {}? ".format(name)))
carteira_convertida = float(carteira/3.27)
print("Olá {}, você tem {}R$ em sua carteira.".format(name, carteira))
print("Você pode comprar até: {:.2}$".format(carteira_convertida)) |
import time
import json
from elasticsearch import Elasticsearch, helpers
from controllers.logging import log_info, log_error
class EsClient(Elasticsearch):
# Class Attributes
default_settings = json.load(open('config/settings.json'))
es_host = default_settings.get('services')['elasticsearch']['host']
connection = Elasticsearch(es_host)
# Constructor
def __init__(self, host=es_host):
super().__init__(hosts=[host])
# Methods
def is_alive(self):
return self.connection.ping()
def es_load_defaults(self, discovery, roots, collections):
try:
if not self.indices.exists(discovery.get('_index')):
log_info(f"Creating {discovery.get('_index')} index...")
self.indices.create(index=discovery.get('_index'))
for root in roots:
if not self.indices.exists(root.get('_index')):
log_info(f"Creating {root.get('_index')} index...")
self.indices.create(index=root.get('_index'))
root_to_update = roots[0]
root_to_update.update({
"_source": {
'collections': collections
}
})
log_info(f"Loading data in discovery and root indices...")
bulk_data = [discovery, root_to_update, roots[1]]
helpers.bulk(self, bulk_data)
return {
"result": True
}
except Exception as e:
log_error(e)
return {
"result": False
}
def get_docs(self, index: str):
try:
res = self.connection.search(index=index, size=10, sort='_id')
results = []
for result in res['hits']['hits']:
response = {}
response.update(result['_source'])
response.update({
'id': result['_id']
})
results.append(response)
return {
"data": results,
"total": res['hits']['total']['value'],
}
except Exception as e:
log_error(e)
raise
def get_doc(self, index: str, doc_id: str):
try:
res = self.connection.get(index=index, id=doc_id)
return {
"data": res.get('_source'),
}
except Exception as e:
log_error(e)
raise
def store_doc(self, index: str, data: object, doc_id=int(round(time.time() * 1000))):
try:
res = self.connection.index(
index=index,
id=doc_id,
body=data,
refresh='wait_for'
)
return {
"index": res['_index'],
"id": res['_id'],
"result": res['result']
}
except Exception as e:
log_error(e)
raise
def store_docs(self, index: str, data: list):
try:
def yield_bulk_data(bulk_data):
for doc in bulk_data:
yield {
"_index": index,
"_id": doc['id'],
"_source": doc
}
res = helpers.bulk(
self,
yield_bulk_data(data)
)
return {
"result": res
}
except Exception as e:
log_error(e)
raise
def delete_doc(self, index: str, doc_id: str):
try:
res = self.connection.delete(index=index, id=doc_id)
return {
"index": res['_index'],
"id": res['_id'],
"result": res['result']
}
except Exception as e:
log_error(e)
raise
def delete_doc_by_query(self, index: str, query: dict):
try:
res = self.connection.delete_by_query(index=index, body=query)
return {
"index": index,
"result": res
}
except Exception as e:
log_error(e)
raise
def update_doc(self, index: str, data: object, doc_id: str):
try:
res = self.connection.update(
index=index,
id=doc_id,
body={
"doc": data
},
refresh='wait_for'
)
return {
"index": res['_index'],
"id": res['_id'],
"result": res
}
except Exception as e:
log_error(e)
raise
|
import os
import json
import copy
import shutil
import numpy as np
from collections import defaultdict
dir_save = "./demo/combined_rec_vector_segments"
preds_root = "./demo/all_rec_vector_segments"
if os.path.isdir(dir_save):
shutil.rmtree(dir_save)
os.makedirs(dir_save)
fname_base_to_fnames_map = defaultdict(list)
for root, _, fnames in os.walk(preds_root):
for fname in fnames:
fname_base = fname.split("_")[0]
fname_base_to_fnames_map[fname_base].append(os.path.join(root, fname))
for fname_base, fnames in fname_base_to_fnames_map.items():
res = {"boxes": [], "vector_masks": [], "scores": [], "labels": [], "keypoints": [], "kp_boxes": []}
for fname in fnames:
f = open(fname)
data = json.load(f)
f.close()
res["boxes"].extend(data["boxes"])
res["vector_masks"].extend(data["vector_masks"])
res["scores"].extend(data["scores"])
res["labels"].extend(data["labels"])
res["keypoints"].extend(data["keypoints"])
res["kp_boxes"].extend(data["kp_boxes"])
f = open(os.path.join(dir_save, "%s_vector_results.json" % fname_base), "w")
json.dump(res, f)
f.close()
|
import pytest_spec.basic as basic
from pytest_mock import mocker
def test_loop_define(mocker):
mocker.patch.object(basic, "loop_define", side_effect=[2, 3, 0])
mocker.patch.object(basic, "hello")
basic.loop_sample_define()
basic.loop_define.assert_any_call(1)
assert len(basic.loop_define.mock_calls) == 3
assert len(basic.hello.mock_calls) == 2
assert basic.loop_define.mock_calls == [
mocker.call(1),
mocker.call(1),
mocker.call(1),
]
|
from django.test import TestCase
from tools.testing import TestData
from logistics.models import StockWish
from internalise.models import InternaliseDocument, InternaliseLine, DataValidityError
from stock.models import Stock
from stock.stocklabel import OrderLabel
from order.models import OrderLine
class InternaliseTests(TestCase, TestData):
def setUp(self):
self.setup_base_data()
def test_creation_function_stock_single_article(self):
StockWish.create_stock_wish(user_modified=self.user_1, articles_ordered=[[self.articletype_1, 20],
[self.articletype_2, 20]])
self.create_suporders(article_1=5)
self.create_packingdocuments(article_1=4)
st = Stock.objects.get(article=self.articletype_1, labeltype__isnull=True)
IN_STOCK = 4
self.assertEqual(st.count, IN_STOCK)
cost = st.book_value
INTERNALISE_ART_1 = 2
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[(self.articletype_1,
INTERNALISE_ART_1,
None, None)],
memo="Foo")
st = Stock.objects.get(article=self.articletype_1, labeltype__isnull=True)
self.assertEqual(st.count, IN_STOCK-INTERNALISE_ART_1)
doc = InternaliseDocument.objects.get()
self.assertEqual(doc.memo, "Foo")
line = InternaliseLine.objects.get()
self.assertEqual(line.internalise_document, doc)
self.assertEqual(line.cost, cost)
self.assertEqual(line.article_type, self.articletype_1)
self.assertFalse(line.label_type)
self.assertFalse(line.identifier)
self.assertEqual(line.count, INTERNALISE_ART_1)
def test_creation_function_stock_two_articles(self):
IN_STOCK_ART_1 = 6
IN_STOCK_ART_2 = 5
self.create_stockwish(article_1=IN_STOCK_ART_1, article_2=IN_STOCK_ART_2)
self.create_suporders(article_1=IN_STOCK_ART_1, article_2=IN_STOCK_ART_2)
self.create_packingdocuments(article_1=IN_STOCK_ART_1, article_2=IN_STOCK_ART_2)
st_1 = Stock.objects.get(article=self.articletype_1)
st_2 = Stock.objects.get(article=self.articletype_2)
self.assertEqual(st_1.count, IN_STOCK_ART_1)
self.assertEqual(st_2.count, IN_STOCK_ART_2)
INTERN_ART_1 = 3
INTERN_ART_2 = 4
cost_1 = st_1.book_value
cost_2 = st_2.book_value
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
(self.articletype_1, INTERN_ART_1, None, None),
(self.articletype_2, INTERN_ART_2, None, None)],
memo="Foo2")
doc = InternaliseDocument.objects.get()
self.assertEqual(InternaliseLine.objects.count(), 2)
il_1 = InternaliseLine.objects.get(article_type=self.articletype_1)
il_2 = InternaliseLine.objects.get(article_type=self.articletype_2)
self.assertEqual(il_1.cost, cost_1)
self.assertEqual(il_2.cost, cost_2)
self.assertEqual(il_1.internalise_document, doc)
self.assertEqual(il_2.internalise_document, doc)
self.assertEqual(il_1.count, INTERN_ART_1)
self.assertEqual(il_2.count, INTERN_ART_2)
self.assertFalse(il_1.label_type)
self.assertFalse(il_1.identifier)
self.assertFalse(il_2.label_type)
self.assertFalse(il_2.identifier)
def test_creation_function_mixed(self):
IN_STOCK_ART_1 = 6
CUST_ORDER_1 = 5
self.create_stockwish(article_1=IN_STOCK_ART_1, article_2=0)
order = self.create_custorders(article_1=CUST_ORDER_1, article_2=0)
self.create_suporders(article_1=IN_STOCK_ART_1+CUST_ORDER_1, article_2=0)
self.create_packingdocuments(article_1=IN_STOCK_ART_1+CUST_ORDER_1, article_2=0)
st_1 = Stock.objects.get(labeltype__isnull=True)
st_2 = Stock.objects.get(labeltype="Order")
cost_1 = st_1.book_value
cost_2 = st_2.book_value
self.assertEqual(st_1.count, IN_STOCK_ART_1)
self.assertEqual(st_2.count, CUST_ORDER_1)
INTERN_ART_1 = 3
INTERN_ART_2 = 4
ols = OrderLine.objects.filter(state='A', wishable__sellabletype__articletype=self.articletype_1)
self.assertEqual(len(ols), CUST_ORDER_1)
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
(self.articletype_1, INTERN_ART_1, None, None),
(self.articletype_1, INTERN_ART_2, OrderLabel, order.id)],
memo="Foo2")
st_1 = Stock.objects.get(labeltype__isnull=True)
self.assertEqual(st_1.count, IN_STOCK_ART_1-INTERN_ART_1)
st_2 = Stock.objects.get(labeltype="Order")
self.assertEqual(st_2.count, CUST_ORDER_1-INTERN_ART_2)
doc = InternaliseDocument.objects.get()
self.assertEqual(InternaliseLine.objects.count(), 2)
il_1 = InternaliseLine.objects.get(article_type=self.articletype_1, label_type__isnull=True)
il_2 = InternaliseLine.objects.get(article_type=self.articletype_1, label_type__isnull=False)
self.assertEqual(il_1.cost, cost_1)
self.assertEqual(il_2.cost, cost_2)
self.assertEqual(il_1.internalise_document, doc)
self.assertEqual(il_2.internalise_document, doc)
self.assertEqual(il_1.count, INTERN_ART_1)
self.assertEqual(il_2.count, INTERN_ART_2)
self.assertFalse(il_1.label_type)
self.assertFalse(il_1.identifier)
self.assertEqual(il_2.label_type, "Order")
self.assertEqual(il_2.identifier, order.id)
ols = OrderLine.objects.filter(state='A', wishable__sellabletype__articletype=self.articletype_1)
self.assertEqual(len(ols), CUST_ORDER_1-INTERN_ART_2)
def test_just_enough_articles(self):
IN_STOCK_ART_1 = 6
self.create_stockwish(article_1=IN_STOCK_ART_1, article_2=0)
self.create_suporders(article_1=IN_STOCK_ART_1, article_2=0)
self.create_packingdocuments(article_1=IN_STOCK_ART_1, article_2=0)
st = Stock.objects.get()
self.assertEqual(st.count, IN_STOCK_ART_1)
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
[self.articletype_1, IN_STOCK_ART_1, None, None]
],
memo="Foo3")
stock = Stock.objects.all()
self.assertEqual(len(stock), 0)
def test_just_enough_articles_labeled(self):
CUST_ORDERED_ART_1 = 6
order = self.create_custorders(article_1=CUST_ORDERED_ART_1, article_2=0, othercost_1=0, othercost_2=0)
self.create_suporders(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_packingdocuments(article_1=CUST_ORDERED_ART_1, article_2=0)
st = Stock.objects.get()
self.assertEqual(st.count, CUST_ORDERED_ART_1)
ols = OrderLine.objects.filter(state='A')
self.assertEqual(len(ols), CUST_ORDERED_ART_1)
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
[self.articletype_1, CUST_ORDERED_ART_1,
OrderLabel, order.id]],
memo="Foo3")
stock = Stock.objects.all()
self.assertEqual(len(stock), 0)
ols = OrderLine.objects.filter(state='I')
self.assertEqual(len(ols), CUST_ORDERED_ART_1)
def test_too_many_articles_labeled(self):
CUST_ORDERED_ART_1 = 6
order = self.create_custorders(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_suporders(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_packingdocuments(article_1=CUST_ORDERED_ART_1, article_2=0)
st = Stock.objects.get()
self.assertEqual(st.count, CUST_ORDERED_ART_1)
with self.assertRaises(DataValidityError):
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
[self.articletype_1, CUST_ORDERED_ART_1+1,
OrderLabel, order.id]],
memo="Foo3")
def test_too_many_articles_labeled_loose(self):
CUST_ORDERED_ART_1 = 6
order = self.create_custorders(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_suporders(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_packingdocuments(article_1=CUST_ORDERED_ART_1, article_2=0)
st = Stock.objects.get()
self.assertEqual(st.count, CUST_ORDERED_ART_1)
with self.assertRaises(DataValidityError):
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
[self.articletype_1, CUST_ORDERED_ART_1-2,
OrderLabel, order.id],
[self.articletype_1, 3, OrderLabel, order.id]],
memo="Foo3")
def test_too_many_articles_stock(self):
CUST_ORDERED_ART_1 = 6
self.create_stockwish(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_suporders(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_packingdocuments(article_1=CUST_ORDERED_ART_1, article_2=0)
st = Stock.objects.get()
self.assertEqual(st.count, CUST_ORDERED_ART_1)
with self.assertRaises(DataValidityError):
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
[self.articletype_1, CUST_ORDERED_ART_1+1,
None, None]],
memo="Foo3")
def test_too_many_articles_stock_loose(self):
CUST_ORDERED_ART_1 = 6
self.create_stockwish(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_suporders(article_1=CUST_ORDERED_ART_1, article_2=0)
self.create_packingdocuments(article_1=CUST_ORDERED_ART_1, article_2=0)
st = Stock.objects.get()
self.assertEqual(st.count, CUST_ORDERED_ART_1)
with self.assertRaises(DataValidityError):
InternaliseDocument.create_internal_products_document(user=self.user_1,
articles_with_information=[
[self.articletype_1, CUST_ORDERED_ART_1-2,
None, None],
[self.articletype_1, 3, None, None]],
memo="Foo3")
|
# 3.uzdevums
my_name = str(input('Enter sentence '))
words = my_name.split()
rev_list = [word[::-1] for word in words]
rev_string=" ".join(rev_list)
result=rev_string.capitalize()
print(result)
print(" ".join([w[::-1] for w in my_name.split()]).capitalize()) |
from .optimize_ar import Optimize_Acquisition_Rate
|
from setuptools import find_packages, setup
setup(
name='tmc_summarizer',
packages=find_packages(),
version='0.1.1',
description='Process raw TMC data into more functional formats',
author='Aaron Fraint, AICP',
entry_points="""
[console_scripts]
tmc=tmc_summarizer.cli:main
""",
)
|
from time import sleep
#https://www.youtube.com/watch?v=hWrFEU_605g&ab_channel=LinusTechTips
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import QThread, QObject, pyqtSignal, QRunnable, QThreadPool
from pytube import YouTube
class Runnable(QRunnable):
def __init__(self, n):
super().__init__()
self.n = n
def run(self):
global console, URL, PATH, progress, radio1, radio2, radio3
try:
if radio1.isChecked():
print('downloading 1080p')
yt = YouTube(URL)
itag = 137
console.append('<fetching_media>')
console.append('<.mp4 1080p>')
ys = yt.streams.get_by_itag(itag)
elif radio2.isChecked():
print('downloading 720p')
yt = YouTube(URL)
itag = 136
console.append('<fetching_media>')
console.append('<.mp4 720p>')
ys = yt.streams.get_by_itag(itag)
elif radio3.isChecked():
print('downloading .webm 50kbps')
yt = YouTube(URL)
itag = 249
console.append('<fetching_media>')
console.append('<.webm 50kbps>')
ys = yt.streams.get_by_itag(itag)
else:
print('error')
console.append('<ERROR>')
console.append('<Please_select_file_type>')
except:
print('error')
console.append('<ERROR>')
console.append('<Please_enter_valid_url>')
else:
Ui_MainWindow.fifty(Ui_MainWindow())
console.append('<connected_to_server>')
ys.download(PATH)
Ui_MainWindow.hundred(Ui_MainWindow())
self.finished.emit()
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(472, 305)
font = QtGui.QFont()
font.setFamily("Small Fonts")
font.setBold(False)
font.setWeight(50)
MainWindow.setFont(font)
MainWindow.setStyleSheet("background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:1, stop:0 rgba(160, 245, 190, 255), stop:1 rgba(83, 44, 255, 255))")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(100, 10, 271, 51))
font = QtGui.QFont()
font.setFamily("Trebuchet MS")
font.setPointSize(36)
self.label.setFont(font)
self.label.setAutoFillBackground(False)
self.label.setObjectName("label")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(20, 190, 431, 31))
font = QtGui.QFont()
font.setFamily("Trebuchet MS")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.pushButton.setFont(font)
self.pushButton.setObjectName("pushButton")
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setGeometry(QtCore.QRect(20, 70, 431, 21))
self.lineEdit.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.lineEdit.setObjectName("lineEdit")
self.lineEdit_2 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_2.setGeometry(QtCore.QRect(20, 90, 431, 21))
self.lineEdit_2.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.lineEdit_2.setObjectName("lineEdit_2")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(280, 120, 171, 16))
self.label_4.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(280, 160, 171, 16))
self.label_5.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(280, 140, 171, 16))
self.label_6.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.label_6.setObjectName("label_6")
self.textBrowser = QtWidgets.QTextBrowser(self.centralwidget)
self.textBrowser.setGeometry(QtCore.QRect(120, 120, 151, 58))
self.textBrowser.setStyleSheet("background-color: rgb(255, 255, 255)")
self.textBrowser.setObjectName("textBrowser")
self.progressBar = QtWidgets.QProgressBar(self.centralwidget)
self.progressBar.setGeometry(QtCore.QRect(20, 230, 431, 16))
self.progressBar.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.progressBar.setProperty("value", 0)
self.progressBar.setObjectName("progressBar")
self.radioButton = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton.setChecked(True)
self.radioButton.setGeometry(QtCore.QRect(20, 160, 91, 16))
self.radioButton.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.radioButton.setObjectName("radioButton")
self.radioButton_2 = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_2.setGeometry(QtCore.QRect(20, 140, 91, 16))
self.radioButton_2.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.radioButton_2.setObjectName("radioButton_2")
self.radioButton_3 = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_3.setGeometry(QtCore.QRect(20, 120, 91, 16))
self.radioButton_3.setStyleSheet("background-color:qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0 rgba(171, 160, 245, 255), stop:1 rgba(255, 255, 255, 255))")
self.radioButton_3.setObjectName("radioButton_3")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 472, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
global title, console, progress, radio1, radio2, radio3, button, length, views
views = self.label_5
length = self.label_6
button = self.pushButton
radio1 = self.radioButton
radio2 = self.radioButton_2
radio3 = self.radioButton_3
progress = self.progressBar
title = self.label_4
console = self.textBrowser
button.clicked.connect(self.start_threading)
def fifty(self):
global progress, title, length, views
print('test')
progress.setProperty("value", 50)
def hundred(self):
global progress, button
button.setText('<DONE>')
print('test2')
progress.setProperty("value", 100)
def start_threading(self):
global console, URL, PATH, radio1
PATH = self.lineEdit_2.text()
URL = self.lineEdit.text()
console.clear()
pool = QThreadPool.globalInstance()
for i in range(1):
runnable = Runnable(i)
pool.start(runnable)
print('1')
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label.setText(_translate("MainWindow", "downy v0.01"))
self.pushButton.setText(_translate("MainWindow", "download"))
self.lineEdit.setText(
_translate("MainWindow", "Enter URL"))
self.lineEdit_2.setText(
_translate("MainWindow", "c:/videos"))
self.label_4.setText(_translate("MainWindow", " TITLE:"))
self.label_5.setText(_translate("MainWindow", " VIEWS:"))
self.label_6.setText(_translate("MainWindow", " LENGTH:"))
self.textBrowser.setHtml(_translate("MainWindow",
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><console></p></body></html>"))
self.radioButton.setText(_translate("MainWindow", "1080p mp4"))
self.radioButton_2.setText(_translate("MainWindow", "720p mp4"))
self.radioButton_3.setText(_translate("MainWindow", "mp3 50kbps"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
from cloudant import Cloudant
from flask import Flask, render_template, request, jsonify
import atexit
import cf_deployment_tracker
import os
import json
# Emit Bluemix deployment event
cf_deployment_tracker.track()
app = Flask(__name__)
db_name = 'mydb'
client = None
db = None
# On Bluemix, get the port number from the environment variable PORT
# When running this app on the local machine, default the port to 8000
port = int(os.getenv('PORT', 8000))
@app.route('/')
@app.route("/index")
def index():
return flask.render_template('index.html')
@app.route('/predict', methods=['POST'])
def make_prediction():
if(request.method == 'POST'):
file = request.files['image'].read()
if not file:
return render_template('index.html', label="No file")
image = cv2.imdecode(np.fromstring(file, np.uint8), cv2.IMREAD_UNCHANGED)
gray=cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)
edged = cv2.Canny(image, 10, 250)
(_,cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
idx = 0
def sort_contours(cnts, method="left-to-right"):
# initialize the reverse flag and sort index
reverse = False
i = 0
# handle if we need to sort in reverse
if method == "right-to-left" or method == "bottom-to-top":
reverse = True
# handle if we are sorting against the y-coordinate rather than
# the x-coordinate of the bounding box
if method == "top-to-bottom" or method == "bottom-to-top":
i = 1
# construct the list of bounding boxes and sort them from top to
# bottom
boundingBoxes = [cv2.boundingRect(c) for c in cnts]
(cnts, boundingBoxes) = zip(*sorted(zip(cnts, boundingBoxes),
key=lambda b:b[1][i], reverse=reverse))
# return the list of sorted contours and bounding boxes
return cnts
cnts = sort_contours(cnts)
for c in cnts:
x,y,w,h = cv2.boundingRect(c)
if w>50 and h>50:
idx+=1
new_img=image[y:y+h,x:x+w]
cv2.imwrite(str(idx) + '.jpg', new_img)
#cv2.imshow("im",image)
cv2.waitKey(0)
features = []
for i in range(1, idx+1):
image = cv2.imread(str(i)+'.jpg', 0)
resized = cv2.resize(image, (45, 45), interpolation = cv2.INTER_AREA)
feature = np.array(resized).flatten()
features.append(feature)
features = pd.DataFrame(features)
predictions = model.predict(features)
myListInitial = predictions.tolist()
myList = []
for var in myListInitial:
if var == '11':
myList.append('+')
elif var == '12':
myList.append('-')
elif var == '13':
myList.append('*')
elif var == '14':
myList.append('/')
else:
myList.append(var)
str1 = ''.join(myList)
ans = eval(str1)
return render_template('index.html', label=ans)
@atexit.register
def shutdown():
if client:
client.disconnect()
if __name__ == '__main__':
model = joblib.load('modelKaggle1.pkl')
app.run(host='0.0.0.0', port=port, debug=True)
|
import logging
import curses
from core.config import theme
logger = logging.getLogger('colors')
colors = {}
def init():
color_definitions = theme['colors']
for i, color_name in enumerate(color_definitions.keys()):
foreground, background = color_definitions[color_name]
curses.init_pair(i + 1, foreground, background)
colors[color_name] = curses.color_pair(i + 1)
|
#
# BitBake Graphical GTK User Interface
#
# Copyright (C) 2011-2012 Intel Corporation
#
# Authored by Joshua Lock <josh@linux.intel.com>
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
# Authored by Shane Wang <shane.wang@intel.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gtk
import os
from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton, HobAltButton
"""
The following are convenience classes for implementing GNOME HIG compliant
BitBake GUI's
In summary: spacing = 12px, border-width = 6px
"""
class SettingsUIHelper():
def gen_label_widget(self, content):
label = gtk.Label()
label.set_alignment(0, 0)
label.set_markup(content)
label.show()
return label
def gen_label_info_widget(self, content, tooltip):
table = gtk.Table(1, 10, False)
label = self.gen_label_widget(content)
info = HobInfoButton(tooltip, self)
table.attach(label, 0, 1, 0, 1, xoptions=gtk.FILL)
table.attach(info, 1, 2, 0, 1, xoptions=gtk.FILL, xpadding=10)
return table
def gen_spinner_widget(self, content, lower, upper, tooltip=""):
hbox = gtk.HBox(False, 12)
adjust = gtk.Adjustment(value=content, lower=lower, upper=upper, step_incr=1)
spinner = gtk.SpinButton(adjustment=adjust, climb_rate=1, digits=0)
spinner.set_value(content)
hbox.pack_start(spinner, expand=False, fill=False)
info = HobInfoButton(tooltip, self)
hbox.pack_start(info, expand=False, fill=False)
hbox.show_all()
return hbox, spinner
def gen_combo_widget(self, curr_item, all_item, tooltip=""):
hbox = gtk.HBox(False, 12)
combo = gtk.combo_box_new_text()
hbox.pack_start(combo, expand=False, fill=False)
index = 0
for item in all_item or []:
combo.append_text(item)
if item == curr_item:
combo.set_active(index)
index += 1
info = HobInfoButton(tooltip, self)
hbox.pack_start(info, expand=False, fill=False)
hbox.show_all()
return hbox, combo
def entry_widget_select_path_cb(self, action, parent, entry):
dialog = gtk.FileChooserDialog("", parent,
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
text = entry.get_text()
dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
HobAltButton.style_button(button)
button = dialog.add_button("Open", gtk.RESPONSE_YES)
HobButton.style_button(button)
response = dialog.run()
if response == gtk.RESPONSE_YES:
path = dialog.get_filename()
entry.set_text(path)
dialog.destroy()
def gen_entry_widget(self, content, parent, tooltip="", need_button=True):
hbox = gtk.HBox(False, 12)
entry = gtk.Entry()
entry.set_text(content)
entry.set_size_request(350,30)
if need_button:
table = gtk.Table(1, 10, False)
hbox.pack_start(table, expand=True, fill=True)
table.attach(entry, 0, 9, 0, 1, xoptions=gtk.SHRINK)
image = gtk.Image()
image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
open_button = gtk.Button()
open_button.set_image(image)
open_button.connect("clicked", self.entry_widget_select_path_cb, parent, entry)
table.attach(open_button, 9, 10, 0, 1, xoptions=gtk.SHRINK)
else:
hbox.pack_start(entry, expand=True, fill=True)
if tooltip != "":
info = HobInfoButton(tooltip, self)
hbox.pack_start(info, expand=False, fill=False)
hbox.show_all()
return hbox, entry
|
import os
import unittest
import sqlalchemy as sa
conn = None
creds = None
engine = None
def parse_connection_string(connstr):
creds = {}
s = None
if connstr.startswith('postgresql://'):
creds['use_sqlite'] = False
s = connstr[len('postgresql://'):]
elif connstr.startswith('sqlite:///'):
creds['use_sqlite'] = True
creds['dbname'] = connstr[len('sqlite:///'):]
return creds
else:
creds['use_sqlite'] = True
creds['dbname'] = connstr
return creds
creds['dbname'] = None
if s:
t = s.split('/', 2)
if len(t) == 2:
creds['dbname'] = t[1]
s = t[0]
if not creds['dbname']:
raise ValueError("Unrecognized connection string format: %s" % connstr)
host_port = None
user_pass = None
if s:
t = s.split('@', 2)
if len(t) == 2:
user_pass = t[0].split(':')
host_port = t[1].split(':')
elif len(t) == 1:
user_pass = ['', '']
host_port = t[0].split(':')
if len(host_port) == 1:
host_port.append('5432')
if len(user_pass) == 1:
user_pass.append('')
if not host_port:
raise ValueError("Unrecognized connection string format: %s" % connstr)
if host_port[0]:
creds['host'] = host_port[0]
else:
raise ValueError("Unrecognized connection string format: %s" % connstr)
creds['port'] = host_port[1]
if user_pass[0]:
creds['username'] = user_pass[0]
else:
creds['username'] = os.environ.get('USER')
creds['password'] = user_pass[1]
return creds
def connect():
global creds
global conn
global engine
assert isinstance(creds, dict)
host = creds.get('host', 'localhost')
port = creds.get('port', 5432)
dbname = creds.get('dbname', 'postgres')
user = creds.get('username', 'postgres')
password = creds.get('password', '')
use_sqlite = creds.get('use_sqlite')
if use_sqlite:
connstr = 'sqlite:///' + dbname
else:
connstr = 'postgresql://{3}:{4}@{0}:{1}/{2}'.format(host, port, dbname,
user, password)
engine = sa.create_engine(connstr)
conn = engine.connect()
return conn
def version():
global conn
rs = conn.execute("SELECT version()")
for r in rs:
return r[0]
def fmt_datetime(ts):
if str(engine).startswith('Engine(postgresql://'):
return "'%s'" % ts
return "datetime('%s')" % ts
def fmt_nullable(v):
return "'%s'" % v if v is not None else 'null'
def quote_string(s):
return s.replace("'", "''")
def dump_table(table):
"""
Utility function for testing purpose.
"""
rs = conn.execute('SELECT * FROM %s' % table)
found = False
for r in rs:
print('[%s] ' % table + ','.join(r).replace('\n', '\\n'))
found = True
if not found:
print('[%s] No record.' % table)
class TestDb(unittest.TestCase):
def setUp(self):
self.default_user = os.environ['USER']
def test_parse_connection_string_001(self):
self.assertEquals({'use_sqlite': True,
'dbname': 'foo.db'},
parse_connection_string('sqlite:///foo.db'))
def test_parse_connection_string_002(self):
self.assertEquals({'use_sqlite': True,
'dbname': 'bar.db'},
parse_connection_string('bar.db'))
def test_parse_connection_string_011(self):
self.assertEquals({'use_sqlite': False,
'host': 'localhost',
'port': '5432',
'dbname': 'testdb',
'username': self.default_user,
'password': ''},
parse_connection_string('postgresql://localhost/testdb'))
def test_parse_connection_string_012(self):
self.assertEquals({'use_sqlite': False,
'host': 'localhost',
'port': '1234',
'dbname': 'testdb',
'username': self.default_user,
'password': ''},
parse_connection_string('postgresql://localhost:1234/testdb'))
def test_parse_connection_string_013(self):
self.assertEquals({'use_sqlite': False,
'host': 'localhost',
'port': '5432',
'dbname': 'testdb',
'username': 'foo',
'password': ''},
parse_connection_string('postgresql://foo@localhost/testdb'))
def test_parse_connection_string_014(self):
self.assertEquals({'use_sqlite': False,
'host': 'localhost',
'port': '1234',
'dbname': 'testdb',
'username': 'foo',
'password': ''},
parse_connection_string('postgresql://foo@localhost:1234/testdb'))
def test_parse_connection_string_015(self):
self.assertEquals({'use_sqlite': False,
'host': 'localhost',
'port': '5432',
'dbname': 'testdb',
'username': 'foo',
'password': 'bar'},
parse_connection_string('postgresql://foo:bar@localhost/testdb'))
def test_parse_connection_string_016(self):
self.assertEquals({'use_sqlite': False,
'host': 'localhost',
'port': '1234',
'dbname': 'testdb',
'username': 'foo',
'password': 'bar'},
parse_connection_string('postgresql://foo:bar@localhost:1234/testdb'))
def test_parse_connection_string_021(self):
with self.assertRaises(ValueError) as cm:
parse_connection_string('postgresql://localhost')
self.assertEquals('Unrecognized connection string format: postgresql://localhost',
str(cm.exception))
with self.assertRaises(ValueError) as cm:
parse_connection_string('postgresql://localhost/')
self.assertEquals('Unrecognized connection string format: postgresql://localhost/',
str(cm.exception))
with self.assertRaises(ValueError) as cm:
parse_connection_string('postgresql:///testdb')
self.assertEquals('Unrecognized connection string format: postgresql:///testdb',
str(cm.exception))
def test_connect_001(self):
global creds
creds = parse_connection_string('postgresql://postgres@127.0.0.1/postgres')
self.assertIsNotNone(connect())
def test_connect_002(self):
global creds
creds = parse_connection_string('postgresql://postgres@127.0.0.1/postgres')
creds['use_sqlite'] = True
self.assertIsNotNone(connect())
def test_version_001(self):
global creds
creds = parse_connection_string('postgresql://postgres@127.0.0.1/postgres')
connect()
self.assertTrue(version().startswith('PostgreSQL '))
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
from matplotlib import cm
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.style
import matplotlib as mpl
import sys
#seaborn-colorblind, ggplot, fivethirtyeight, bmh seaborn-pastel are decent
#mpl.style.use('bmh')
mpl.style.use('bmh')
print('plotting...')
if(len(sys.argv) < 2):
print("Usage: ./plotting-passes.py name1 csv1 name2 csv2 ... x-label-name")
print("Name is used for legend and csv is used to pull passes from")
print("Expects csv to containt lower_bound and passes columns")
#for example to compare passes of flat and schoolbus (lower bound is added by default)
print("Example python plotting-passes.py flat flat-results.csv schoolbus schoolbus-results.csv essential-stitch-collection")
#for example to compare optimal passes and lower bound
print("Example python plotting-passes.py optimal all-laces-6.csv enum-6")
exit(0)
frames = list()
legends = []
n = len(sys.argv)-2
x_label = sys.argv[-1]
big_frame = pd.DataFrame()
cols = []
for i in range(1, n, 2):
f = pd.DataFrame.from_csv(sys.argv[i+1], parse_dates = False)
frames.append(f.sort_values('lower_bound'))
if i == 1:
big_frame['lb']=frames[-1].lower_bound
legends.append('lb')
cols.append(frames[-1].lower_bound)
big_frame[sys.argv[i]]=frames[-1].passes
legends.append(sys.argv[i]);
cols.append(frames[-1].passes)
cols[-1] = [max(0, min(x, 10)) for x in cols[-1]]
fig, ax = plt.subplots(len(cols),1, sharex=True, sharey=True)
#fig.set_size_inches(4, 3)
#big_frame.plot(ax=ax, kind='hist', stacked=False, alpha=0.85)
#big_frame.plot(ax=ax, kind='hist', stacked = False)
#ax.hist(cols)
plt.xticks([ 2, 4, 6, 8, 10], ['2', '4','6', '8', '>10'])
cols.reverse()
legends.reverse()
#print(big_frame)
print(legends)
i = 0
for i in range(0,len(cols)):
ax[i].hist(cols[i], bins=[2,3,4,5,6,7,8,9,10, 11], color='#AE8BD9' if i%2 else '#36087F')
ax[i].set_yticks([])
#ax[i].set_yticks([0, 500, 1000])
ax[i].set_ylabel(legends[i])
ax[i].set_xlabel(x_label)
ax[i].grid(False)
ax[i].tick_params(axis=u'both', which=u'both',length=0)
#ax[i].legend(str(legends[i]), frameon=False)
#ax[i].set_title(legends[i])
#y_axis = ax[i].axes.get_y_axis()
#y_axis.set_ticks([])
#x_axis = ax[i].axes.get_x_axis()
#plt.xlabel(ax[i], "P")
#frame.hist(ax=ax[0,i], column='passes')
i = i + 1
fig.subplots_adjust(hspace=0)
for a in ax:
a.label_outer()
#ax = big_frame.hist( column=legends, stacked =True)
#ax.legend(legends, frameon=False);
#y_axis = ax.axes.get_yaxis()
#y_axis.set_ticks([])
#x_axis = ax.axes.get_xaxis()
#x_axis.set_ticks([])
#ax.set_ylim(0, 10)
#plt.xlabel("Passes")
#plt.ylabel(x_label)
plt.show()
fig.savefig("plot.pdf", bbox_inches='tight' )
|
import tensorflow as tf
from data_types.training_result import TrainingResult
from data_types.training_set import TrainingSet
from timeseries.build import compile_and_fit
from timeseries.multi_output.residual_wrapper import ResidualWrapper
from timeseries.window_generator import WindowGenerator
def evaluate_residual_lstm_multi_output(
training_set: TrainingSet
) -> TrainingResult:
residual_lstm = ResidualWrapper(
tf.keras.Sequential([
tf.keras.layers.LSTM(32, return_sequences=True),
tf.keras.layers.Dense(
training_set.num_features,
# The predicted deltas should start small.
# Therefore, initialize the output layer with zeros.
kernel_initializer=tf.initializers.zeros())
]))
wide_window = WindowGenerator(
input_width=24,
label_width=24,
shift=1,
label_columns=['T (degC)'],
training_set=training_set
)
compile_and_fit(residual_lstm, wide_window)
metric_index = residual_lstm.metrics_names.index('mean_absolute_error')
return TrainingResult(
validation_performance=residual_lstm.evaluate(wide_window.val)[metric_index],
performance=residual_lstm.evaluate(wide_window.test, verbose=0)[metric_index]
) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.