content
stringlengths 5
1.05M
|
|---|
from rest_framework import serializers
class IndexSerializer(serializers.Serializer):
content = serializers.StringRelatedField()
class Meta:
fields = ['content']
|
# Generated by Django 3.2.7 on 2021-10-23 13:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('social', '0004_alter_recipe_prep_time_units'),
]
operations = [
migrations.AlterField(
model_name='recipe',
name='cooking_time_units',
field=models.CharField(choices=[('minutes', 'minutes'), ('hours', 'hours')], default='minutes', max_length=25),
),
migrations.AlterField(
model_name='recipe',
name='prep_time_units',
field=models.CharField(choices=[('minutes', 'minutes'), ('hours', 'hours')], default='minutes', max_length=20),
),
]
|
def print_rel_notes(name, org, repo, version, outs=None, setup_file="",
deps_method="", toolchains_method=""):
tarball_name = ":%s-%s.tar.gz" % (repo, version)
cmd = [
"$(location @rules_pkg//releasing:print_rel_notes)",
"--org=%s" % org,
"--repo=%s" % repo,
"--version=%s" % version,
"--tarball=$(location %s)" % tarball_name,
]
if setup_file:
cmd.append("--setup_file=%s" % setup_file)
if deps_method:
cmd.append("--deps_method=%s" % deps_method)
if toolchains_method:
cmd.append("--toolchains_method=%s" % toolchains_method)
cmd.append(">$@")
native.genrule(
name = "relnotes",
srcs = [
tarball_name,
],
outs = outs or ["relnotes.txt"],
cmd = " ".join(cmd),
tools = [
"@rules_pkg//releasing:print_rel_notes",
],
)
|
#-*- coding: ISO-8859-1 -*-
def _():
import sys
if sys.platform == 'cli':
import clr
clr.AddReference('IronPython.Wpf')
_()
del _
from _wpf import *
|
# !/Users/xxpang/anaconda3/bin/python3
# -*- coding: utf-8 -*-
import logging
logging.basicConfig(level=logging.INFO)
import asyncio
import os
import json
import time
from datetime import datetime
from aiohttp import web
def index(request):
return web.Response(body=b'<html><title>127.0.0.1</title><h1>Awesome</h1><html>', content_type='text/html')
async def init(loop):
app = web.Application(loop=loop)
app.router.add_route('GET', '/', index)
srv = await loop.create_server(app.make_handler(), '127.0.0.1', 9000)
logging.info('server started at http://127.0.0.1:9000...')
return srv
loop = asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
|
#!/usr/bin/env python3
"""
Advent of Code 2017: Day #
"""
import os
from shared.readdayinput import readdayinput
def first_half(dayinput):
"""
first half solver:
"""
lines = dayinput.split('\n')
programs = {}
for line in lines:
key, val = line.split(' <-> ')
val = [int(x) for x in val.split(', ')]
programs[int(key)] = val
connections = [0]
index = 0
while index < len(connections):
to_check = connections[index]
for p in programs[to_check]:
if p not in connections:
connections.append(p)
index += 1
return len(connections)
def second_half(dayinput):
"""
second half solver:
"""
lines = dayinput.split('\n')
programs = {}
for line in lines:
key, val = line.split(' <-> ')
val = [int(x) for x in val.split(', ')]
programs[int(key)] = val
connections = [0]
max_p = max([programs.keys()])
total = set()
for prog in programs.keys():
connections = [prog]
index = 0
while index < len(connections):
to_check = connections[index]
for p in programs[to_check]:
if p not in connections:
connections.append(p)
index += 1
total.add(''.join([str(x) for x in sorted(set(connections))]))
return len(total)
def app():
"""
runs day application
"""
dayinput = readdayinput()
half_one = first_half(dayinput)
half_two = second_half(dayinput)
print(half_one, half_two)
if __name__ == "__main__":
"""
MAIN APP
"""
app()
|
import pygame
import lib
import lib.constants as const
import lib.common as common
import ai.neat.neat as neat
from ai.neatinterface.smartcar import SmartCar
pygame.init()
# game specific neat interface
# this straps on to the original Core class
# by inheriting it and overriding necessary methods
# and adding extensions
class NeatCore(lib.Core):
# game specific variables
_num_input = 6
_num_output = 4
# overriden methods
def __init__(self):
super().__init__()
self.population = neat.Population(
self._num_input,
self._num_output,
pop_size=common.settings.num_cars
)
self.best_score = 0
self.walls = None
return
def new_game(self):
super().new_game()
self.best_score = 0
# preprocessing data for later use for optimization
# for tile in self.env.track.track_tiles:
# tile.scaled_neighbor_walls = sensor.get_scaled_neighbor_walls(tile)
return
def new_cars(self):
return [SmartCar(self.env.track.start_tile, genome) for genome in self.population.genomes]
def update(self):
common.events.update()
common.settings.update()
# only cycle through cars alive in the environment for optimization
for car in self.env.cars:
car.think(self.get_x(car))
self.env.update()
self.best_score = max(self.best_score, self.env.score)
def game_over(self):
if self.env.game_over():
# added incentives
scores = [
car.score \
# negate crossing the start line bonus
- const.LAP_BONUS \
# strong time incentive once a lap is finished
+ car.time_bonus * car.laps * 10 \
# if the direction of the car is closer to the direction of
# the tile grid, give reward
+ (180 - abs(car.get_sensor_data()["degrees"])) * 10 \
for car in self.cars
]
self.population.score_genomes(scores)
self.population.evolve_population()
return True
else:
return False
def get_info_surface(self):
num_survived = sum([
car.genome.genome_type == "survived" and car.alive
for car in self.env.cars
])
num_mutated = sum([
car.genome.genome_type == "mutated" and car.alive
for car in self.env.cars
])
num_bred = sum([
car.genome.genome_type == "bred" and car.alive
for car in self.env.cars
])
texts = [
f" Game: {self.game_count}",
f" Best Score: {self.best_score}",
f" Alive: {self.env.num_alive}",
f" (Blue) Survived: {num_survived}",
f" (Green) Mutated: {num_mutated}",
f" (Yellow) Bred: {num_bred}",
]
return common.display.texts_to_surface(texts)
def get_debug_surface(self):
texts = [
" Top Speed: {0: .1f}".format(
max([car.speed for car in self.env.cars])
),
" FPS: {}".format(common.clock.get_FPS()),
]
return common.display.texts_to_surface(texts)
# extended methods
def get_x(self, car):
if car.alive:
sensor_data = car.get_sensor_data()
return [
car.speed,
sensor_data["degrees"] / 180,
sensor_data["front"] / const.TILE_SIZE,
sensor_data["back"] / const.TILE_SIZE,
sensor_data["left"] / const.TILE_SIZE,
sensor_data["right"] / const.TILE_SIZE,
]
# this part shouldn't really happen since
# only living cars are called to think
else:
return [0] * self._num_input
|
from django.contrib import admin
from .models import Container
from .actions import start_containers, stop_containers, restart_containers
class ContainerAdmin(admin.ModelAdmin):
list_display = ['container_id', 'name', '_image','_networks', '_ip', 'status', 'active']
actions = [start_containers, stop_containers, restart_containers]
list_filter = ['active', 'app']
def _image(self, obj):
if obj.image:
return obj.image.image_tag
return '-'
def _networks(self, obj):
return ','.join([net.name for net in obj.networks.all()])
def _ip(self, obj):
if obj.ip:
return f'{obj.ip}:{obj.port}'
return ''
_ip.short_description = "Container IP (Local)"
admin.site.register(Container, ContainerAdmin)
|
from uliweb.orm import *
import datetime
from uliweb.i18n import ugettext_lazy as _
from uliweb import functions
from . import encrypt_password, check_password
from uliweb.utils.common import get_var
class User(Model):
username = Field(str, verbose_name=_('Username'), max_length=30, unique=True, index=True, nullable=False)
nickname = Field(str, verbose_name=_('Nick Name'), max_length=30)
email = Field(str, verbose_name=_('Email'), max_length=40)
password = Field(str, verbose_name=_('Password'), max_length=128)
is_superuser = Field(bool, verbose_name=_('Is Superuser'))
last_login = Field(datetime.datetime, verbose_name=_('Last Login'), nullable=True)
date_join = Field(datetime.datetime, verbose_name=_('Joined Date'), auto_now_add=True)
image = Field(FILE, verbose_name=_('Portrait'), max_length=256)
active = Field(bool, verbose_name=_('Active Status'))
locked = Field(bool, verbose_name=_('Lock Status'))
deleted = Field(bool, verbose_name=_('Deleted'))
auth_type = Field(str, max_length=20, default='default', verbose_name=_('Auth type'))
def set_password(self, raw_password):
self.password = encrypt_password(raw_password)
# self.save()
def check_password(self, raw_password):
"""
Returns a boolean of whether the raw_password was correct. Handles
encryption formats behind the scenes.
"""
return check_password(raw_password, self.password)
def get_image_url(self):
if self.image:
return functions.get_href(self.image)
else:
return functions.url_for_static('images/user%dx%d.jpg' % (50, 50))
@classmethod
def get_default_image_url(cls, size=50):
return functions.url_for_static('images/user%dx%d.jpg' % (size, size))
def __str__(self):
return (self.nickname or self.username) + (_('(Deleted)') if self.deleted else '')
class Meta:
display_field = 'username'
class AddForm:
fields = ['username', 'nickname', 'email', 'is_superuser']
class EditForm:
fields = ['email']
class AdminEditForm:
fields = ['email', 'is_superuser']
class DetailView:
fields = ['username', 'nickname', 'email', 'is_superuser', 'date_join', 'last_login']
class Table:
fields = [
{'name':'username'},
{'name':'nickname'},
{'name':'email'},
{'name':'is_superuser'},
{'name':'date_join'},
{'name':'last_login'},
{'name':'deleted'},
]
class UserGroup(Model):
name = Field(str, max_length=128, verbose_name=_('Name'), index=True, nullable=True)
parent = SelfReference(verbose_name=_('Parent Group'), collection_name='children', nullable=True, default=0)
users = ManyToMany('user', verbose_name=_('Users'), collection_name='groups')
deleted = Field(bool, verbose_name=_('Deleted'))
created_time = Field(datetime.datetime, verbose_name=_('Created Datetime'), auto_now_add=True)
number_of_children = Field(int, verbose_name=_('Number of Children'))
number_of_people = Field(int, verbose_name=_('Number of People'))
order = Field(int, verbose_name=_('Order'), default=9999)
type = Field(CHAR, max_length=1, verbose_name=_('Group Type'), choices=get_var('AUTH/USER_GROUP_TYPE'))
auth_type = Field(str, max_length=20, default='default', verbose_name=_('Auth type'))
def __str__(self):
return self.name
@classmethod
def OnInit(cls):
Index('usergroup_idx', cls.c.parent, cls.c.name)
|
import random
class DiffieHellman:
def __init__(self, generator, prime_divider):
if prime_divider <= 2:
raise ValueError('prime_divider must be gross than 2')
self.__prime_divider = prime_divider
self.__private_key = random.randint(2, prime_divider - 1)
self.__public_key = pow(
generator, self.__private_key, prime_divider
)
@property
def public_key(self):
return self.__public_key
def make_secret(self, B):
return pow(B, self.__private_key, self.__prime_divider)
|
import sys
import bisect
def fib(n):
a, b = 0, 1
while b < n:
yield b
a, b = b, a + b
fibArray = list(fib(100000005))
sys.stdin = open('input.txt')
numTest = int(input())
for itertest in range(numTest):
N = int(input())
print '%d =' % N,
idx = bisect.bisect_right(fibArray, N)
result = []
for i in reversed(range(1, idx)):
if N >= fibArray[i]:
result.append('1')
N -= fibArray[i]
else:
result.append('0')
print '%s (fib)' % ''.join(result)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This code is from https://github.com/DaikiShimada/masalachai
"""
import numpy as np
import six
from six.moves.urllib import request
import tarfile
fname = 'cifar-10-python.tar.gz'
batches = 5
batchsize = 10000
category_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
def download():
url = 'https://www.cs.toronto.edu/~kriz'
request.urlretrieve(url+'/'+fname, fname)
def convert_train_image():
data = np.zeros((batches*batchsize, 3, 32, 32), dtype=np.uint8)
labels = np.zeros((batches*batchsize), dtype=np.uint8)
with tarfile.open(fname, 'r:gz') as f:
dir_name = 'cifar-10-batches-py'
for i in six.moves.range(batches):
batch_name = dir_name + '/data_batch_' + str(i+1)
r_data = f.extractfile(batch_name)
batch = six.moves.cPickle.load(r_data)
data[i*batchsize:(i+1)*batchsize] = batch['data'].reshape(batchsize, 3, 32, 32)
labels[i*batchsize:(i+1)*batchsize] = batch['labels']
return data, labels
def convert_test_image():
with tarfile.open(fname, 'r:gz') as f:
dir_name = 'cifar-10-batches-py'
batch_name = dir_name + '/test_batch'
r_data = f.extractfile(batch_name)
batch = six.moves.cPickle.load(r_data)
data = batch['data'].reshape(batchsize, 3, 32, 32)
labels = np.asarray(batch['labels']).astype(np.uint8)
return data, labels
def load(name='cifar10.pkl'):
with open(name, 'rb') as data:
cifar10 = six.moves.cPickle.load(data)
return cifar10
if __name__ == '__main__':
download()
train_data, train_labels = convert_train_image()
train = {'data': train_data,
'target': train_labels,
'size': len(train_labels),
'categories': len(category_names),
'category_names': category_names}
test_data, test_labels = convert_test_image()
test = {'data': test_data,
'target': test_labels,
'size': len(test_labels),
'categories': len(category_names),
'category_names': category_names}
data = {'train': train, 'test': test}
out_name = 'cifar10.pkl'
with open(out_name, 'wb') as out_data:
six.moves.cPickle.dump(data, out_data, -1)
|
from PyQt6.QtWidgets import QApplication, QMainWindow, QSpinBox
from PyQt6.QtCore import Qt, QSize
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle("Moja aplikacja")
self.widget = QSpinBox()
self.widget.setMinimum(-20)
self.widget.setMaximum(20)
self.widget.setSuffix("C")
self.widget.setSingleStep(2)
self.widget.valueChanged.connect(self.zmiana_wartosci)
self.setCentralWidget(self.widget)
def zmiana_wartosci(self, i):
# print(self.widget.text)
print(i)
app = QApplication([])
window = MainWindow()
window.show()
app.exec()
|
from django.contrib.auth.base_user import BaseUserManager
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.validators import UnicodeUsernameValidator
from django.db import models
class CustomUserManager(BaseUserManager):
def create_user(self, email, username, first_name,
last_name, password):
if not email:
raise ValueError('Нужно указать email')
user = self.model(
email=self.normalize_email(email),
username=username,
first_name=first_name,
last_name=last_name,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, username, first_name,
last_name, password):
user = self.create_user(
email,
password=password,
username=username,
first_name=first_name,
last_name=last_name,
)
user.is_active = True
user.is_admin = True
user.is_staff = True
user.save(using=self._db)
return user
class CustomUser(AbstractUser):
username_validator = UnicodeUsernameValidator()
username = models.CharField(
'username',
max_length=150,
unique=True,
help_text=('Required. 150 characters or fewer. '
'Letters, digits and @/./+/-/_ only.'),
validators=[username_validator],
error_messages={
'unique': "A user with that username already exists.",
},
)
first_name = models.CharField('first name', max_length=150)
last_name = models.CharField('last name', max_length=150)
email = models.EmailField('email address', max_length=254, unique=True)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ('username', 'first_name', 'last_name')
objects = CustomUserManager()
def has_perm(self, perm, obj=None):
return True
def has_module_perms(self, app_label):
return True
|
from bs4 import BeautifulSoup
class GoogleExtractionModule(object):
def find_queries(html):
""" Finds queries and extracts them from Google SQL documentation on
cloud.google.com.
Code blocks are in <code> tags with parent <pre> tags.
Args:
html: HTML response which contains HTML text
Returns
A list of queries in the form of strings.
"""
soup = BeautifulSoup(html.text, "html.parser")
queries = []
code_blocks = soup.find_all("code")
for block in code_blocks:
if block.parent.name == "pre":
queries += [block.contents[0]]
return queries
|
class BufferedGraphicsManager(object):
""" Provides access to the main buffered graphics context object for the application domain. """
Current = None
|
#!/usr/bin/env python3
"""
USAGE:
yb_sysprocs_bulk_xfer.py [options]
PURPOSE:
Transformed subset active bulk transfers (ybload & ybunload) from sys.load and sys.unload.
OPTIONS:
See the command line help message for all options.
(yb_sysprocs_bulk_xfer.py --help)
Output:
The report as a formatted table, pipe seperated value rows, or inserted into a database table.
"""
from yb_sp_report_util import SPReportUtil
class report_bulk_xfer(SPReportUtil):
"""Issue the ybsql commands used to create the user objects report."""
config = {
'description': 'Transformed subset active bulk transfers (ybload & ybunload) from sys.load and sys.unload.'
, 'report_sp_location': 'sysviews'
, 'report_default_order': 'start_time' }
def execute(self):
return self.build()
def main():
print(report_bulk_xfer().execute())
exit(0)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# coding: utf-8
#
# Author: Kazuto Nakashima
# URL: http://kazuto1011.github.io
# Created: 2017-10-11
from __future__ import print_function
import argparse
import torch
import torchvision
from smooth_grad import SmoothGrad
from torchvision import transforms
def main(args):
# Load the synset words
idx2cls = list()
with open('samples/synset_words.txt') as lines:
for line in lines:
line = line.strip().split(' ', 1)[1]
line = line.split(', ', 1)[0].replace(' ', '_')
idx2cls.append(line)
# Setup a classification model
print('Loading a model...', end='')
model = torchvision.models.resnet152(pretrained=True)
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
print('finished')
# Setup the SmoothGrad
smooth_grad = SmoothGrad(model=model, cuda=args.cuda, sigma=args.sigma,
n_samples=args.n_samples, guided=args.guided)
# Predict without adding noises
smooth_grad.load_image(filename=args.image, transform=transform)
prob, idx = smooth_grad.forward()
# Generate the saliency images of top 3
for i in range(0, 3):
print('{:.5f}\t{}'.format(prob[i], idx2cls[idx[i]]))
smooth_grad.generate(
filename='results/{}'.format(idx2cls[idx[i]]), idx=idx[i])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='SmoothGrad visualization')
parser.add_argument('--image', type=str, required=True)
parser.add_argument('--sigma', type=float, default=0.20)
parser.add_argument('--n_samples', type=int, default=100)
parser.add_argument('--no-cuda', action='store_true', default=False)
parser.add_argument('--guided', action='store_true', default=False)
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
main(args)
|
from PuzzleLib.Models.Nets.Inception import loadInceptionBN, loadInceptionV3
from PuzzleLib.Models.Nets.LeNet import loadLeNet
from PuzzleLib.Models.Nets.MiniYolo import loadMiniYolo
from PuzzleLib.Models.Nets.NiN import loadNiNImageNet
from PuzzleLib.Models.Nets.OpenPoseCOCO import loadCOCO
from PuzzleLib.Models.Nets.OpenPoseMPI import loadMPI
from PuzzleLib.Models.Nets.ResNet import loadResNet
from PuzzleLib.Models.Nets.UNet import loadUNet
from PuzzleLib.Models.Nets.VGG import loadVGG
from PuzzleLib.Models.Nets.WaveToLetter import loadW2L
|
i=20
j=2
i/j
print(i)
|
from pathlib import Path
import fire
from composo import ioc
from appdirs import user_config_dir
def main():
ioc.App.config.from_yaml(Path(user_config_dir("composo")) / "config.yaml")
app = ioc.App.app()
fire.Fire(app)
def run():
app = ioc.App.app()
app.new("test", lang="shell")
if __name__ == "__main__":
run()
|
from .Visualizer import visualize_binary_tree
|
# from app import db
import pymongo
from pprint import pprint
import dns
import pprint
from bson.json_util import dumps, _json_convert
from bson.objectid import ObjectId
con = pymongo.MongoClient('mongodb+srv://root-condor:root-condor@condormarket-kmpgf.mongodb.net/CondorMarket?retryWrites=true')
db = con.CondorMarket
from flask import Response
from flask_restful import Resource
prods = db.products
img = "https://upload.wikimedia.org/wikipedia/commons/thumb/7/76/Simple_cardboard_box.svg/1280px-Simple_cardboard_box.svg.png"
# p = {
# "productName": "pythontest",
# "productPrice": 50,
# "productCategory": "testing python",
# "productSeller": "Me",
# "productDescription": "None",
# "productQuantity": 20,
# "productImg": img
# }
# print(prods.insert_one(p).inserted_id)
class getAllProducts(Resource):
def get(self):
prod = (list(prods.find().sort("productName", pymongo.ASCENDING)))
for i in range(len(prod)):
auxP = prod[i]
auxOi = _json_convert(auxP["_id"])
auxP["_id"] = auxOi["$oid"]
prod[i] = auxP
if prod:
data = {}
data["products"] = prod
data = dumps(data)
resp = Response(data, status=200, mimetype='application/json')
return resp
class getOneProduct(Resource):
def get(self, id):
prod = prods.find_one({'_id': ObjectId(id)})
print(prod)
auxP = prod
auxOi = _json_convert(auxP["_id"])
auxP["_id"] = auxOi["$oid"]
prod = auxP
data = {}
data["product"] = prod
return data
class getCategories(Resource):
def get(self):
cat = []
prod = list(prods.find().sort("productName", pymongo.ASCENDING))
for i in prod:
if i["productCategory"] not in cat:
if i["productCategory"] != "":
cat.append(i["productCategory"])
else:
cat.append("No Category")
data = {}
data["categories"] = cat
return data
class getProductsByCategory(Resource):
def get(self, productCategory):
if productCategory != "No Category":
prod = list(prods.find({"productCategory": productCategory}))
else:
prod = list(prods.find({"productCategory": ""}))
for i in range(len(prod)):
auxP = prod[i]
auxOi = _json_convert(auxP["_id"])
auxP["_id"] = auxOi["$oid"]
prod[i] = auxP
print(prod)
data = {}
data["product"] = prod
data = _json_convert(data)
return data
|
def setup_package():
pass
def teardown_package():
pass
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^victim', views.index, name='index'),
url(r'^form', views.form, name='form'),
]
|
def ready_operations(bot):
print("Logged in as")
print(bot.user.name)
print(bot.user.id)
print('------')
github = "See my GitHub for help and other information. " \
"https://github.com/ElectricNinja315/harvest-bot"
meme_music = "billy bragg the internationale"
champions = [
"Aatrox",
"Ahri",
"Akali",
"Alistar",
"Ammumu",
"Anivia",
"Annie",
"Ashe",
"Aurelion Sol",
"Azir",
"Bard",
"Blitzcrank",
"Brand",
"Braum",
"Caitlyn",
"Camille",
"Cassiopeia",
"Cho'gath",
"Corki",
"Darius",
"Diana",
"Dr. Mundo",
"Draven",
"Ekko",
"Elise",
"Evelynn",
"Ezreal",
"Fiddlesticks",
"Fiora",
"Fizz",
"Galio",
"Gangplank",
"Garen",
"Gnar",
"Gragas",
"Graves",
"Hecarim",
"Heimerdinger",
"Illaoi",
"Irelia",
"Ivern",
"Janna",
"Jarvan IV",
"Jax",
"Jayce",
"Jhin",
"Jinx",
"Kalista",
"Karma",
"Karthus",
"Kassadin",
"Katarina",
"Kayle",
"Kayn",
"Kennen",
"Kha'Zix",
"Kindred",
"Kled",
"Kog'Maw",
"LeBlanc",
"Lee Sin",
"Leona",
"Lissandra",
"Lucian",
"Lulu",
"Lux",
"Malphite",
"Malzahar",
"Maokai",
"Master Yi",
"Miss Fortune",
"Mordekaiser",
"Morgana",
"Nami",
"Nasus",
"Nautilus",
"Nidalee",
"Nocturne",
"Nunu",
"Olaf",
"Orianna",
"Ornn",
"Pantheon",
"Poppy",
"Quinn",
"Rakan",
"Rammus",
"Rek'Sai",
"Renekton",
"Rengar",
"Riven",
"Rumble",
"Ryze",
"Sejuani",
"Shaco",
"Shen",
"Shyvana",
"Singed",
"Sion",
"Sivir",
"Skarner",
"Sona",
"Soraka",
"Swain",
"Syndra",
"Tahm Kench",
"Taliyah",
"Talon",
"Taric",
"Teemo",
"Thresh",
"Tristana",
"Trundle",
"Tryndamere",
"Twisted Fate",
"Twitch",
"Udyr",
"Urgot",
"Varus",
"Vayne",
"Veigar",
"Vel'Koz",
"Vi",
"Viktor",
"Vladimir",
"Volibear",
"Warwick",
"Wukong",
"Xaya",
"Xerath",
"Xin Zhao",
"Yasuo",
"Yorick",
"Zac",
"Zed",
"Ziggs",
"Zilean",
"Zoe",
"Zyra"
]
builds = [
"Full AP",
"Full AD",
"Full lethality",
"Full tank",
"Full attack speed",
"Full movement speed",
"On-hit",
"Crit",
"Full CDR",
"Hybrid AP + AD",
"Bruiser",
"Full lifesteal"
]
roles = [
"ADC",
"mid",
"top",
"supp",
"jungle"
]
|
import gpbasics.global_parameters as global_param
global_param.ensure_init()
from sklearn.cluster import DBSCAN
from typing import List
import numpy as np
import gpbasics.KernelBasics.PartitioningModel as pm
import gpbasics.DataHandling.DataInput as di
import gpbasics.Metrics.Metrics as met
import time
import logging
class ClusterCenterCriterion(pm.PartitionCriterion):
def __init__(self, cluster_center: np.ndarray):
assert len(cluster_center.shape) == 1, "Invalid cluster center given: shape=%s. " \
"Shape of cluster center has to be [d,]." % str(cluster_center.shape)
super(ClusterCenterCriterion, self).__init__(pm.PartitioningClass.SMALLEST_DISTANCE)
self.cluster_center: np.ndarray = cluster_center
def get_score(self, x_vector: np.ndarray) -> np.ndarray:
return np.sqrt(np.sum(np.square(x_vector - self.cluster_center), axis=1))
def deepcopy(self):
return ClusterCenterCriterion(self.cluster_center.copy())
def get_json(self) -> dict:
return {"type": "cluster_center", "center": self.cluster_center.tolist()}
class DbscanModel(pm.PartitioningModel):
def __init__(self, ignored_dimensions: List[int]):
super(DbscanModel, self).__init__(pm.PartitioningClass.SMALLEST_DISTANCE, ignored_dimensions)
self.max_window_size = 500
def automatic_init_criteria(self, data_input: di.DataInput, optimize_metric: met.MetricType,
model_selection_metric: met.MetricType):
logging.info("Setting up Automatically determining Partitioning Criteria by KMeans")
clusters_k = int(data_input.n_train / self.max_window_size)
filtered_x_train = self.filter_data_by_ignored_dimensions(data_input.data_x_train)
logging.info("Starting determining Partitioning Criteria by KMeans. k=%i, shape=%s"
% (clusters_k, str(filtered_x_train.shape)))
eps_factor = float(np.abs(clusters_k * 2))
min_samples = float(np.abs((data_input.n_train / self.max_window_size)))
start_time = time.time()
eps = (np.sum(np.max(data_input.data_x_train) - np.min(data_input.data_x_train))) / eps_factor
dbscan = DBSCAN(eps=eps, min_samples=min_samples).fit(filtered_x_train)
logging.info("Finished partitioning in %f s" % (time.time() - start_time))
partition_sizes = []
partition_sizes.append(len(dbscan.labels_[dbscan.labels_ == -1]))
for i in range(np.max(dbscan.labels_) + 1):
partition_sizes.append(len(dbscan.labels_[dbscan.labels_ == i]))
logging.info("Partition (#%i) sizes statistics: min %f, max %f, mean %f, median %f"
% (len(partition_sizes), np.min(partition_sizes), np.max(partition_sizes),
float(np.mean(partition_sizes)), float(np.median(partition_sizes))))
partitioning: List[ClusterCenterCriterion] = []
logging.info("Initializing partition criteria by means of previously determined cluster centers.")
for cluster_center in dbscan.cluster_centers_:
partitioning.append(ClusterCenterCriterion(cluster_center))
self.init_partitioning(partitioning)
def deepcopy(self):
partitioning: List[ClusterCenterCriterion] = [pc.deepcopy() for pc in self.partitioning]
copied_self = DbscanModel(self.ignored_dimensions.copy())
copied_self.init_partitioning(partitioning)
return copied_self
|
import re
my_str = '''Write a regular expression that
can find all amounts of money in a text. Your
expression should be able to deal with different formats
and currencies, for example £50,000 and £117.83m as well as 300p,
500m euro, 338bn euros, $150bn and $92.88. Make sure that
you can at least detect amounts in Pounds, Dollars and Euros, and $100m with that 100p'''
#patt = re.compile(r'\$?\d{1,}\.\d{2}')
#patt = re.compile(r'\d+(?:\.\d+)?')
patt = re.compile(r'\d+(?:.(\d+))+((?:(m)+)|(?:(bn)+)|(?:(p)+))?')
matches = patt.finditer(my_str)
for match in matches :
print(match)
|
import os
import yaml
import pprint
import traceback
import src.libs.gen_utils as gen_utils
from src.classes.software import Software
from src.definitions import BACKUP_FOLDER, TEMPLATES_FOLDER
from getpass import getuser
from jinja2 import Template
class Themer():
def __init__(self, config_file, **kwargs):
self.softwares = []
self.config_file = config_file
self.configuration = self.parse_theme()
def instanciate_softwares(self):
for software_config in self.configuration["softwares"]:
software_config["templates"] = self.expand_template(software_config["templates"])
software = Software(
software_config["name"],
software_config["templates"],
software_config.get("refresh",[]),
BACKUP_FOLDER
)
self.softwares.append(software)
def expand_template(self, templates):
expanded_templates = []
for template in templates:
template["src"] = os.path.join(TEMPLATES_FOLDER, template["src"])
expanded_templates.append(template)
return expanded_templates
def execute(self):
self.instanciate_softwares()
gen_utils.create_backup_dir(BACKUP_FOLDER)
for software in self.softwares:
print("Configuring {} ...".format(software.name))
software.configure()
def parse_theme(self):
parsed_theme = gen_utils.expand_wilcards(self.config_file)
return gen_utils.load_yaml(parsed_theme)
def __str__(self):
pp = pprint.PrettyPrinter()
return pp.pformat(self.configuration)
|
import pathlib
from setuptools import setup, find_packages
from fcrawler import __version__
here = pathlib.Path(__file__).parent.resolve()
long_description = (here / 'README.md').read_text(encoding='utf-8')
setup(
name="fcrawler",
version=__version__,
description="Python application that can be used to copy files of a given file type from a folder directory.",
long_description=long_description,
long_description_content_type="text/markdown",
author="tharindu.dev",
author_email="tharindu.nm@yahoo.com",
url="https://github.com/truethari/fcrawler",
keywords="files copy directory crawl",
license='MIT',
project_urls={
"Bug Tracker": "https://github.com/truethari/fcrawler/issues",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
],
packages=['fcrawler'],
include_package_data=True,
install_requires=["pyfiglet"],
entry_points={
"console_scripts": [
"fcrawler=fcrawler.__main__:main",
]
},
)
|
# coding: utf-8
import tensorflow as tf
import numpy as np
import os
from settings import DIR2SAVE_CARD_NO_RECORDS, \
CARDNO_IMG_HEIGHT, CARDNO_IMG_CHANNELS, DIR2SAVE_CARDNO_EVAL_RECORDS, \
encode_map, decode_map
import threading
dir2save_train_records = DIR2SAVE_CARD_NO_RECORDS
dir2save_eval_records = DIR2SAVE_CARDNO_EVAL_RECORDS
_record_format = 'records-{:>04}-of-{:>04}.tfrecords'
def int64_feature(data):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[data]))
def int64_list_feature(data):
return tf.train.Feature(int64_list=tf.train.Int64List(value=data))
def bytes_feature(data):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[data]))
def encode_label2list(label):
rst = []
for ch in label:
if ch != '_':
rst.append(encode_map[ch])
return rst
def decode2label(codes):
return [decode_map[code] for code in codes]
def create_tf_example(encoded_image, label, width, height=CARDNO_IMG_HEIGHT, channels=CARDNO_IMG_CHANNELS):
return tf.train.Example(features=tf.train.Features(feature={
'image_raw': bytes_feature(encoded_image),
'label_raw': bytes_feature(bytes(label.encode('utf-8'))),
'width': int64_feature(width),
'height': int64_feature(height),
'channels': int64_feature(channels)
}))
class CreateTFRecordsThread(threading.Thread):
"""
用于多线程写TFRecord
"""
def __init__(self, images, labels, widths, filename, *args, **kwargs):
super(CreateTFRecordsThread, self).__init__(*args, **kwargs)
self.images = images
self.labels = labels
self.filename = filename
self.widths = widths
def run(self):
with tf.python_io.TFRecordWriter(self.filename) as writer:
examples = [create_tf_example(image, label, width) for image, label, width in
zip(self.images, self.labels, self.widths)]
for example in examples:
writer.write(example.SerializeToString())
print("{} has been done.".format(self.filename))
def create_tf_records(images, labels, widths, shard_num=5):
"""
将传入的图片和标签转换成TFRecord格式
:param images:
:param labels:
:param widths:
:param shard_num:
:return:
"""
eval_data_divider = int(len(images) * 0.95)
eval_images, eval_labels, eval_widths = [data[eval_data_divider:] for data in (images, labels, widths)]
eval_filename = os.path.join(dir2save_eval_records, 'eval.tfrecords')
train_images, train_labels, train_widths = [data[:eval_data_divider] for data in (images, labels, widths)]
train_shard_size = len(train_images) // shard_num
threads = []
print('Writing {} tf records for evaluation.'.format(len(eval_images)))
threads.append(CreateTFRecordsThread(eval_images, eval_labels, eval_widths, eval_filename))
print('Writing {} tf records for training.'.format(len(train_images)))
for start_i in range(shard_num):
shard_start_i = start_i * train_shard_size
shard_end_i = (start_i + 1) * train_shard_size if start_i < shard_num - 1 else len(train_images)
shard_filename = os.path.join(dir2save_train_records,
_record_format.format(start_i, shard_num))
threads.append(
CreateTFRecordsThread(train_images[shard_start_i: shard_end_i],
train_labels[shard_start_i: shard_end_i],
train_widths[shard_start_i: shard_end_i], shard_filename))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
print('done')
def parse_labels_to_sparse_tuple(labels):
"""
将label转换成tf.ctc_losss所需要的tf.SparseTensor类型
:param labels:
:return:
"""
indices = []
values = []
for i, label_seq in enumerate(labels):
indices_items = zip([i] * len(label_seq), range(len(label_seq)))
indices.extend(indices_items)
values.extend(label_seq)
indices = np.asarray(indices, np.int64)
max_seq_len = indices.max(0)[1] + 1
shape = [len(labels), max_seq_len]
return indices, values, shape
def parse_label_bytes_to_sparse_tuple(label_bytes):
"""
将bytes类型的label先转码, 再转换成 tf.SparseTensor
:param label_bytes:
:return:
"""
batch_label_strings = [label.decode('utf-8') for label in label_bytes]
batch_label_codes = [encode_label2list(label) for label in batch_label_strings]
batch_label_sparse_tuple = parse_labels_to_sparse_tuple(batch_label_codes)
return batch_label_sparse_tuple
def get_weights(shape, regularizer=None):
weight = tf.get_variable('weight', shape, initializer=tf.truncated_normal_initializer(stddev=0.1), dtype=tf.float32)
if regularizer is not None:
tf.add_to_collection('losses', regularizer(weight))
return weight
def get_bias(shape):
return tf.get_variable('bias', shape, initializer=tf.constant_initializer(value=.0), dtype=tf.float32)
|
from unittest import TestCase, TestSuite, TextTestRunner, main
from sample_problem import sample_problem
class SampleBeginnerTestCase(TestCase):
def test_sample_problem(self):
self.assertEqual(sample_problem('test1'), 'test1')
self.assertEqual(sample_problem('test2'), 'test2')
print("\n.Passed sample_problem with no errors!")
def test_one(test_name):
suite = TestSuite()
suite.addTest(SampleBeginnerTestCase(test_name))
runner = TextTestRunner()
runner.run(suite)
if __name__ == "__main__":
main()
|
import os
import pathlib
from setuptools import setup
HERE = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
VERSION = "VERSION-NOT-FOUND"
for line in (HERE / "snails.py").read_text().split("\n"):
if line.startswith("__version__"):
VERSION = eval(line.split("=")[-1])
README = (HERE / "README.rst").read_text()
REQUIREMENTS = [
"aiosmtpd"
]
if __name__ == "__main__":
setup(
name="snails",
version=VERSION,
description="minimal smtpd handler",
long_description=README,
long_description_content_type="text/x-rst",
author="Joe Cross",
author_email="joe.mcross@gmail.com",
url="https://github.com/numberoverzero/snails",
license="MIT",
platforms="any",
py_modules=["snails"],
install_requires=REQUIREMENTS,
)
|
#
#
#
# DEPRECATED - put POST handler functions in handler classes
#
#
#
#
#
#
#!/usr/bin/env python
#
# Copyright 2012 Andy Gimma
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# System libraries.
from wtforms import Form, BooleanField, TextField, validators, PasswordField, ValidationError, RadioField, SelectField
import cgi
import jinja2
import logging
import os
import json
import urllib2
import wtforms.validators
# Local libraries.
import base
import event_db
import site_db
import site_util
import cache
from datetime import datetime
import settings
from google.appengine.ext import db
import organization
import primary_contact_db
import random_password
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
template = jinja_environment.get_template('admin.html')
#CASE_LABELS = settings.CASE_LABELS
#COUNT = 26
GLOBAL_ADMIN_NAME = "Admin"
ten_minutes = 600
class AdminHandler(base.AuthenticatedHandler):
def AuthenticatedPost(self, org, event):
global_admin = False
local_admin = False
if org.name == GLOBAL_ADMIN_NAME:
global_admin = True
if org.is_admin == True and global_admin == False:
local_admin = True
if global_admin == False and local_admin == False:
self.redirect("/")
return
if self.request.get("create_contact"):
data = primary_contact_db.ContactFormFull(self.request.POST)
if data.validate():
organization_id = self.request.get("choose_organization")
try:
id = int(organization_id)
except:
return
this_organization = organization.Organization.get_by_id(id)
if not org.may_administer(this_organization):
self.abort(403)
contact = primary_contact_db.Contact(
first_name=data.first_name.data,
last_name=data.last_name.data,
title=data.title.data,
phone=data.phone.data,
email=data.email.data,
is_primary=bool(data.is_primary.data),
organization=this_organization.key(),
)
primary_contact_db.PutAndCache(contact, ten_minutes)
self.redirect("/admin-create-contact?selected_org=%s&message=Contact Created" % this_organization.key().id())
return
else:
#query_string = "SELECT * FROM Event"
#events_list = db.GqlQuery(query_string)
suggested_password = random_password.generate_password()
query_string = "SELECT * FROM Organization"
organization_list = db.GqlQuery(query_string)
self.response.out.write(template.render(
{
"form": data,
"errors": data.errors,
"create_contact": True,
"organization_list": organization_list,
}))
return
if self.request.get("create_admin"):
data = organization.OrganizationAdminForm(self.request.POST)
event_id = self.request.get("choose_event")
try:
id = int(event_id)
except:
return
this_event = event_db.Event.get_by_id(id)
if local_admin:
if not this_event.key() == event.key():
self.redirect("/")
return
if data.validate():
new_org = organization.Organization(name = data.name.data,
email = data.email.data,
phone = data.phone.data,
address = data.address.data,
city = data.city.data,
state = data.state.data,
zip_code = data.zip_code.data,
physical_presence = True,
number_volunteers = "0",
voad_member = False,
org_verified=True,
twitter = data.twitter.data,
url = data.url.data,
facebook = data.facebook.data,
incidents = [this_event.key()],
password = self.request.get("password"),
is_active = True,
is_admin = True,
)
# set all phase fields true for admin
for phase_name in new_org.get_phase_boolean_names():
setattr(new_org, phase_name, True)
new_contact = primary_contact_db.Contact(
first_name=data.contact_first_name.data,
last_name=data.contact_last_name.data,
title=data.contact_title.data,
email=data.contact_email.data,
phone=data.contact_phone.data,
is_primary=True
)
organization.PutAndCacheOrganizationAndContact(organization = new_org,
contact = new_contact,
)
self.redirect("/admin?message=Admin Created")
return
else:
# needs events lists, password, errors
query_string = "SELECT * FROM Event"
suggested_password = random_password.generate_password()
self.response.out.write(template.render(
{
"form": data,
"errors": data.errors,
"create_admin": True,
#"events_list": events_list,
"auto_password": suggested_password,
}))
return
if self.request.get("delete_org_id"):
# delete organization
try:
id = int(self.request.get("delete_org_id"))
org_by_id = organization.Organization.get_by_id(id)
except:
self.abort(400)
if not org.may_administer(org_by_id):
self.abort(403)
primary_contact_db.RemoveOrgFromContacts(org_by_id)
db.delete(org_by_id)
self.redirect("/admin")
return
if self.request.get("delete_contact_id"):
# delete contact
try:
id = int(self.request.get("delete_contact_id"))
contact_by_id = primary_contact_db.Contact.get_by_id(id)
except:
self.abort(400)
if not org.may_administer(org_by_id):
self.abort(403)
db.delete(contact_by_id)
self.redirect("/admin")
return
if self.request.get("verify_organization"):
# verify organization
try:
id = int(self.request.get("verify_organization"))
org_by_id = organization.Organization.get_by_id(id)
except:
self.abort(400)
# check we are allowed
if not org.may_administer(org_by_id):
self.abort(403)
# perform verification
org_by_id.verify()
# cache
organization.PutAndCache(org_by_id, 600)
self.redirect("/admin")
return
if self.request.get("save_org_id"):
# save org (?)
try:
id = int(self.request.get("save_org_id"))
org_by_id = organization.Organization.get_by_id(id)
except:
self.abort(400)
if not org.may_administer(org_by_id):
self.abort(403)
org_by_id.org_verified = True
organization.PutAndCache(org_by_id, 600)
self.redirect("/admin")
return
def AuthenticatedGet(self, org, event):
# get version dictionary params
try:
with open('version.json') as version_json_fd:
version_d = json.load(version_json_fd)
except:
version_d = None
# render response
if org.name == GLOBAL_ADMIN_NAME:
self.response.out.write(
template.render({
"org": org,
"global_admin": True,
"message": self.request.get('message'),
"version_d": version_d,
})
)
return
elif org.is_admin == True:
self.response.out.write(
template.render({
"org": org,
"message": self.request.get('message'),
"version_d": version_d,
})
)
return
else:
self.redirect("/")
|
import asyncio, config, discord, random, aiohttp
import logging
from .utils import instance_tools
log = logging.getLogger()
statuses = ["OwO whats n!help", "🤔🤔🤔", "👀", "(╯°□°)╯︵ ┻━┻",
"¯\_(ツ)_/¯", "┬─┬ノ(ಠ_ಠノ)", "><(((('>", "_/\__/\__0>", "ô¿ô", "°º¤ø,¸¸,ø¤º°`°º¤ø,", "=^..^=",
"龴ↀ◡ↀ龴", "^⨀ᴥ⨀^", "^⨀ᴥ⨀^", "⨌⨀_⨀⨌", "•|龴◡龴|•", "ˁ˚ᴥ˚ˀ", "⦿⽘⦿", " (╯︵╰,)",
" (╯_╰)", "㋡", "ˁ˚ᴥ˚ˀ", "\(^-^)/", "uwu", ":lurk:", "b-baka >_<",
"I-it's not like I like you or anything!", "ばか >_<", "(//・.・//)", "....T-Thanks.....",
"Hmph"]
class DiscordBotsOrgAPI:
def __init__(self, bot):
self.bot = bot
self.has_started = 0
self.token = config.dbots_key
async def postloop(self):
if not self.has_started == 1:
self.has_started = 1
while True:
log.info("Getting all servers.")
log.info("Attempting to update server count.")
i = instance_tools.InstanceTools(self.bot.instances, self.bot.redis)
guilds = await i.get_all_guilds()
game = discord.Streaming(name=random.choice(statuses), url="https://www.twitch.tv/nekoboat")
await self.bot.change_presence(activity=game)
log.info("Servers: %s" % guilds)
if self.bot.instance == 0:
async with aiohttp.ClientSession() as cs:
await cs.post(
"https://discordbots.org/api/bots/310039170792030211/stats",
json={
"server_count": int(guilds),
"shard_count": self.bot.shard_count
},
headers={
"Authorization": config.dbots_key
}
)
await cs.post(
"https://discord.bots.gg/api/v1/bots/310039170792030211/stats",
json={
"guildCount": int(guilds),
"shardCount": self.bot.shard_count
},
headers={
"Authorization": config.dpw_key
}
)
await cs.post(
"https://discord.services/api/bots/310039170792030211",
json={
"guild_count": int(guilds)
},
headers={
"Authorization": config.ds_key
}
)
await asyncio.sleep(1800)
async def on_ready(self):
await self.postloop()
def setup(bot):
bot.add_cog(DiscordBotsOrgAPI(bot))
|
env_dataset = "mnist_full"
|
# Exercício Python 026: Faça um programa que leia uma frase pelo teclado e mostre quantas vezes aparece a letra "A", e
# em que posição ela aparece a primeira vez e em que posição ela aparece a última vez.
phrase = str(input('Enter a phrase: ')).upper().strip()
count_letter = phrase.count('A')
first_letter = phrase.find('A') + 1
last_letter = phrase.rfind('A') + 1
print(count_letter, first_letter, last_letter)
|
"""
lec3
"""
my_list = [1,2,3,4,5]
print(my_list)
my_nested_list = [1,2,3,[1,2,3,4,5]]
print(my_nested_list)
my_list[0]=6
print(my_list)
print(my_list[0])
print(my_list[1])
print(my_list[-1])
print(my_list)
print(my_list[1:3])
print(my_list[:])
|
#!/usr/bin/python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""This module provides maps and sets that report unused elements."""
_monitored_values = []
def FinishMonitoring(includeDart2jsOnly, logger):
for value in _monitored_values:
if value._dart2jsOnly and not includeDart2jsOnly:
continue
value.CheckUsage(logger)
class MonitoredCollection(object):
def __init__(self, name, dart2jsOnly):
self.name = name
self._used_keys = set()
self._dart2jsOnly = dart2jsOnly
_monitored_values.append(self)
class Dict(MonitoredCollection):
"""Wrapper for a dict that reports unused keys."""
def __init__(self, name, map, dart2jsOnly=False):
super(Dict, self).__init__(name, dart2jsOnly)
self._map = map
def __getitem__(self, key):
self._used_keys.add(key)
return self._map[key]
def __setitem__(self, key, value):
self._map[key] = value
def __contains__(self, key):
self._used_keys.add(key)
return key in self._map
def __iter__(self):
return self._map.__iter__()
def get(self, key, default=None):
self._used_keys.add(key)
return self._map.get(key, default)
def keys(self):
return self._map.keys()
def CheckUsage(self, logger):
for v in sorted(self._map.keys()):
if v not in self._used_keys:
logger.warn(
'dict \'%s\' has unused key \'%s\'' % (self.name, v))
class Set(MonitoredCollection):
"""Wrapper for a set that reports unused keys."""
def __init__(self, name, a_set, dart2jsOnly=False):
super(Set, self).__init__(name, dart2jsOnly)
self._set = a_set
def __contains__(self, key):
self._used_keys.add(key)
return key in self._set
def __iter__(self):
return self._set.__iter__()
def add(self, key):
self._set += [key]
def CheckUsage(self, logger):
for v in sorted(self._set):
if v not in self._used_keys:
logger.warn('set \'%s\' has unused key \'%s\'' % (self.name, v))
|
import keen
import subprocess
import copy
from collections import defaultdict
keen.project_id = "594bd6a50935ce9ceaaaaf63"
keen.read_key = '7856E5E161A29A43701E1F65BCCB6FDD1C0DF3B3624A5212C9946D8419C98A6E8F32D0BEA5616B60E92C4567571E23B32EAF7438458814654F7DE37F885E59D15F434D19C386D975F907B890F9F546D1CE2D3DA2F400C437557150639A37AF4B'
keen.write_key = "C32909E224E40349F0EDF73F18D1ED18EDCF28B2831DA2C45A5EBF07FEA9BC61243413CC58120C65D58CC488E711B351D1CA43C97035565E6BA854C536E6AACC6E3BB1FA6034BEB8DF905628463AF380BB90A497C87ACDBB21D18F7F5A41B93B"
local_head = subprocess.check_output(['git', 'log', '-n', '1', '--pretty=format:%H']).decode("utf-8")
current_branch = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode("utf-8").strip()
def get_master_errors(stream):
hit = keen.extraction(stream, 'this_3_years', filters=[
{"property_name": "branch", "operator": "eq",
"property_value": 'HEAD'}])
if not hit:
raise ValueError('No data for master branch found')
return hit[-1]
def report_errors(stream, error_counts):
error_rep = copy.copy(error_counts)
error_rep['commit'] = local_head
error_rep['branch'] = current_branch
keen.add_event(stream, error_rep)
def find_new_errors(script, errors):
master_errors = defaultdict(int)
master_errors.update(get_master_errors(script))
new_errors = []
for err, val in errors.items():
# this checks to see if any errors types get worse
if val > master_errors[err]:
print("%s new %s errors detected" % (val - master_errors[err], err))
new_errors.append(err)
return new_errors
|
#!/usr/bin/env python3
"""Play a MIDI file.
This uses the "mido" module for handling MIDI: https://mido.readthedocs.io/
Pass the MIDI file name as first command line argument.
If a MIDI port name is passed as second argument, a connection is made.
"""
import sys
import threading
import jack
from mido import MidiFile
argv = iter(sys.argv)
next(argv)
filename = next(argv, '')
connect_to = next(argv, '')
if not filename:
sys.exit('Please specify a MIDI file')
try:
mid = iter(MidiFile(filename))
except Exception as e:
sys.exit(type(e).__name__ + ' while loading MIDI: ' + str(e))
client = jack.Client('MIDI-File-Player')
port = client.midi_outports.register('output')
event = threading.Event()
msg = next(mid)
fs = None # sampling rate
offset = 0
@client.set_process_callback
def process(frames):
global offset
global msg
port.clear_buffer()
while True:
if offset >= frames:
offset -= frames
return # We'll take care of this in the next block ...
# Note: This may raise an exception:
port.write_midi_event(offset, msg.bytes())
try:
msg = next(mid)
except StopIteration:
event.set()
raise jack.CallbackExit
offset += round(msg.time * fs)
@client.set_samplerate_callback
def samplerate(samplerate):
global fs
fs = samplerate
@client.set_shutdown_callback
def shutdown(status, reason):
print('JACK shutdown:', reason, status)
event.set()
with client:
if connect_to:
port.connect(connect_to)
print('Playing', repr(filename), '... press Ctrl+C to stop')
try:
event.wait()
except KeyboardInterrupt:
print('\nInterrupted by user')
|
import sys
import os
import logging
import sentry_sdk
from sentry_sdk.integrations.logging import LoggingIntegration
__author__ = "Noah Hummel"
loggers = dict()
_sentry_dsn = os.environ.get("SENTRY_DSN")
if _sentry_dsn:
sentry_logging = LoggingIntegration(
level=logging.DEBUG,
event_level=logging.WARNING
)
sentry_sdk.init(
dsn=_sentry_dsn,
integrations=[sentry_logging]
)
def get(name):
if not loggers.get(name):
logger = logging.getLogger(name)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
loggers[name] = logger
return loggers.get(name)
|
"""Tests for CMS app API functionality"""
import pytest
from django.contrib.contenttypes.models import ContentType
from wagtail.core.models import Page
from cms.api import ensure_home_page_and_site
from cms.models import HomePage
@pytest.mark.django_db
def test_ensure_home_page_and_site():
"""
ensure_home_page_and_site should make sure that a home page is created if one doesn't exist, it is set to be
a child of the root, and the default Wagtail page is deleted.
"""
home_page_qset = Page.objects.filter(
content_type=ContentType.objects.get_for_model(HomePage)
)
wagtail_default_page_qset = Page.objects.filter(
depth=2, content_type=ContentType.objects.get_for_model(Page)
)
assert home_page_qset.exists() is False
assert wagtail_default_page_qset.exists() is True
ensure_home_page_and_site()
assert wagtail_default_page_qset.exists() is False
home_page = home_page_qset.first()
assert home_page is not None
home_page_parents = home_page.get_ancestors()
assert home_page_parents.count() == 1
assert home_page_parents.first().is_root() is True
|
# 1.a
class ejr1a:
def __init__(self):
while True:
tabla = input("Escriba los elementos de su tabla separados por espacios: ")
tabla = tabla.split()
try:
for i in range(len(tabla)):
tabla[i] = int(tabla[i])
print("Los ordenaremos como tipo int")
except:
try:
for i in range(len(tabla)):
tabla[i] = float(tabla[i])
print("Los ordenaremos como tipo float")
except:
print("Los ordenaremos como tipo string")
break
tabla.sort()
self.tabla = tabla
def buscar(self, c, m, m0):
if type(c) == type(self.tabla[m]):
if c in self.tabla:
if c == self.tabla[m]:
print(c, "se encuentra en la posicion", m)
elif c > self.tabla[m]:
self.buscar(c, int((m0+m)/2), m)
elif c < self.tabla[m]:
self.buscar(c, int((m0-m)/2), m)
else:
print(c, "no esta en la tabla")
else:
print(c, "no esta en la tabla")
def ejecutar_1a(self):
n = input("Escriba el valor de c: ")
try:
n = int(n)
except:
try:
n = float(n)
except:
pass
self.buscar(n, int(len(self.tabla)/2), int(len(self.tabla)))
# 1.b
class ejr1b:
def __init__(self):
while True:
t = input("Escriba los elementos de su tabla separados por espacios: ")
t = t.split()
try:
for i in range(len(t)):
t[i] = int(t[i])
print("Los ordenaremos como tipo int")
except:
try:
for i in range(len(t)):
t[i] = float(t[i])
print("Los ordenaremos como tipo float")
except:
print("Los ordenaremos como tipo string")
break
r = []
self.t = t
self.r = r
def crear(self, n, m, m0):
if n == 0:
self.r.append(self.t[n])
else:
if len(self.r) == 1:
if self.r[m] == self.t[n]:
self.r.insert(m, self.t[n])
elif self.r[m] > self.t[n]:
self.r.insert(0, self.t[n])
elif self.r[m] < self.t[n]:
self.r.append(self.t[n])
else:
if self.t[n] == self.r[m]:
self.r.insert(m, self.t[n])
elif self.t[n] < self.r[m]:
if m == 0:
self.r.insert(0, self.t[n])
else:
if m0 == m or m0 > m:
self.crear(n, int(m/2), m)
else:
self.r.insert(m, self.t[n])
elif self.t[n] > self.r[m]:
if m == len(self.r) - 1:
self.r.append(self.t[n])
else:
if m0 == m or m0 < m:
self.crear(n, int((len(self.r)+m)/2), m)
else:
self.r.insert(m+1, self.t[n])
if len(self.r) < len(self.t):
self.crear(n+1, int(len(self.r)/2), int(len(self.r)/2))
def resultado(self):
print(self.r)
|
# coding: utf-8
from src.unit import PAWN
PASSANT_RIGHT = "pr"
PASSANT_LEFT = "pl"
def calculatePawnMoves(unit, player, game):
position = game.getPositionOfUnit(unit)
x = position[0]
y = position[1]
if player == game.white:
calculateWhitePawnMoves(unit, game, x, y)
elif player == game.black:
calculateBlackPawnMoves(unit, game, x, y)
calculateEnPassantMoves(unit, game, x, y)
def calculateWhitePawnMoves(unit, game, x, y):
if y + 1 < game.upperLimit:
if game.fieldIsEmpty(x, y + 1):
if y + 2 == game.upperLimit:
game.addPromotionMovesToUnit(unit, x, y + 1)
else:
unit.addMove(str(x) + str(y + 1))
if game.fieldIsEmpty(x, y + 2) and y == 1:
unit.addMove(str(x) + str(y + 2))
if x - 1 >= game.lowerLimit and not game.fieldIsEmpty(x - 1, y + 1):
if game.board[x - 1][y + 1].owner == game.black:
if y + 2 == game.upperLimit:
game.addPromotionMovesToUnit(unit, x - 1, y + 1)
else:
unit.addMove(str(x - 1) + str(y + 1))
if x + 1 < game.upperLimit and not game.fieldIsEmpty(x + 1, y + 1):
if game.board[x + 1][y + 1].owner == game.black:
if y + 2 == game.upperLimit:
game.addPromotionMovesToUnit(unit, x + 1, y + 1)
else:
unit.addMove(str(x + 1) + str(y + 1))
def calculateBlackPawnMoves(unit, game, x, y):
if y - 1 >= game.lowerLimit:
if game.fieldIsEmpty(x, y - 1):
if y - 1 == game.lowerLimit:
game.addPromotionMovesToUnit(unit, x, y - 1)
else:
unit.addMove(str(x) + str(y - 1))
if game.fieldIsEmpty(x, y - 2) and y == 6:
unit.addMove(str(x) + str(y - 2))
if x - 1 >= game.lowerLimit and not game.fieldIsEmpty(x - 1, y - 1):
if game.board[x - 1][y - 1].owner == game.white:
if y - 1 == game.lowerLimit:
game.addPromotionMovesToUnit(unit, x - 1, y - 1)
else:
unit.addMove(str(x - 1) + str(y - 1))
if x + 1 < game.upperLimit and not game.fieldIsEmpty(x + 1, y - 1):
if game.board[x + 1][y - 1].owner == game.white:
if y - 1 == game.lowerLimit:
game.addPromotionMovesToUnit(unit, x + 1, y - 1)
else:
unit.addMove(str(x + 1) + str(y - 1))
def calculateEnPassantMoves(unit, game, x, y):
owner = unit.owner
if (y == 4 and owner == game.white) or (y == 3 and owner == game.black):
if x < game.upperLimit - 1:
opponentUnit = game.board[x + 1][y]
if opponentUnit != None and opponentUnit.isPassantUnit:
unit.addMove(PASSANT_RIGHT)
if x > game.lowerLimit:
opponentUnit = game.board[x - 1][y]
if opponentUnit != None and opponentUnit.isPassantUnit:
unit.addMove(PASSANT_LEFT)
|
from dearpygui.dearpygui import *
from tkinter import Tk, filedialog
import requests
import socket
import json
ButtonEditor_id = generate_uuid()
category_combo = generate_uuid()
system_combo = generate_uuid()
obs_combo = generate_uuid()
system_selectapp_button = generate_uuid()
system_websiteurl_input = generate_uuid()
system_hotkey_input = generate_uuid()
system_runcmd_input = generate_uuid()
system_selectfolder_button = generate_uuid()
system_selectfile_button = generate_uuid()
system_selectfolder_text = generate_uuid()
system_selectfile_text = generate_uuid()
obs_switchscene_combo = generate_uuid()
obs_togglemute_combo = generate_uuid()
system = ['Run a app', 'Open website', 'Hotkey', 'Run command', 'Open Folder', 'Open File']
obs = ['Switch scene', 'Toggle mute', 'Stop recording', 'Start recording', 'Pause recording', 'Resume recording',
'Start streaming', 'Stop streaming']
category = ['System', 'OBS']
localip = [(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in
[socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1] + ':5280'
def show(sender, app_data, user_data):
key = user_data
with window(label='Edit Button ' + str(key), id=ButtonEditor_id, width=400, height=250, no_move=True, no_close=False,
no_scrollbar=False, no_resize=True, modal=True, on_close=close_cb):
add_combo(
items=category, label='Category', callback=category_cb, default_value='Choose a category to continue',
no_arrow_button=False, id=category_combo
)
add_separator()
def category_cb():
if get_value(category_combo) == category[0]:
# System
add_combo(
items=system, label='System action', callback=system_cb, default_value='Choose an action',
no_arrow_button=False, id=system_combo, parent=ButtonEditor_id
)
elif get_value(category_combo) == category[1]:
# OBS
add_combo(
items=obs, label='OBS', callback=obs_cb, default_value='Choose an action', no_arrow_button=False,
id=obs_combo, parent=ButtonEditor_id
)
def system_cb():
if get_value(system_combo) == system[0]:
# Run a app
add_button(label='Select app', id=system_selectapp_button, callback=system_selectapp_cb,
parent=ButtonEditor_id)
elif get_value(system_combo) == system[1]:
# Open website
add_input_text(id=system_websiteurl_input, label='URL', hint='https://google.com', uppercase=False,
no_spaces=True, parent=ButtonEditor_id)
elif get_value(system_combo) == system[2]:
# HotKey
add_input_text(id=system_hotkey_input, label='HotKey', hint='Ctrl, Alt, Del', parent=ButtonEditor_id)
elif get_value(system_combo) == system[3]:
# Run command
add_input_text(id=system_runcmd_input, label='Command', hint='wsl -d Ubuntu-18.04', parent=ButtonEditor_id)
elif get_value(system_combo) == system[4]:
# Open folder
add_button(id=system_selectfolder_button, label='Select folder', callback=system_selectfolder_cb,
parent=ButtonEditor_id)
add_text(default_value="Chosen Directory:", parent=ButtonEditor_id)
add_same_line(parent=ButtonEditor_id)
add_text(id=system_selectfolder_text, default_value="None", parent=ButtonEditor_id)
elif get_value(system_combo) == system[5]:
# Open File
add_button(id=system_selectfile_button, label='Select File', callback=system_selectfile_cb,
parent=ButtonEditor_id)
add_text(default_value="Chosen File:", parent=ButtonEditor_id)
add_same_line(parent=ButtonEditor_id)
add_text(id=system_selectfile_text, default_value="None", parent=ButtonEditor_id)
def obs_cb():
print(get_value(obs_combo))
if get_value(obs_combo) == obs[0]:
# Switch scene
scenes_items = []
get_scenes = requests.get(url=localip + '/obs/get_scenes')
for key, value in get_scenes.json():
print(key, value)
scenes_items.append(value['name'])
add_combo(
items=scenes_items, label='Scene', id=obs_switchscene_combo, default_value='Choose scene',
no_arrow_button=False
)
elif get_value(obs_combo) == obs[1]:
# Toggle mute
sources_items = []
add_combo(
items=sources_items, label='Source', id=obs_togglemute_combo, default_value='Choose source',
no_arrow_button=False
)
def system_selectfolder_cb():
Tk().withdraw()
file_path = filedialog.askdirectory()
print(file_path)
DirString = file_path
set_value(system_selectfolder_text, DirString)
def system_selectfile_cb():
Tk().withdraw()
file_path = filedialog.askopenfilename()
print(file_path)
DirString = file_path
set_value(system_selectfile_text, DirString)
def system_selectapp_cb():
Tk().withdraw()
file_path = filedialog.askopenfilename()
print(file_path)
DirString = file_path
set_value(system_selectfile_text, DirString)
def close_cb():
|
"""
Recreate Figures 7 and 8 from the paper
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
from pathlib import Path
from bad_seeds.plot.gen_figs import plot_all_ideal, plot_all_timelimit
def main():
figsize = (8.5 / 2.54, (8.5 / 2.54) / 1.6)
batch_sizes = (1, 8, 16, 32, 64, 128, 256, 512)
timelimits = (10, 20, 30, 40, 50, 70, 100)
data_path = Path().absolute() / "published_results"
out_path = Path().absolute()
with mpl.rc_context({"font.size": 7}):
fig, axes_ideal = plt.subplots(figsize=figsize, constrained_layout=True)
plot_all_ideal(
ax=axes_ideal,
data_path=data_path,
batch_sizes=batch_sizes,
)
fig.savefig(out_path / "figure_7.png", dpi=300)
print(f"wrote {out_path / 'figure_7.png'}")
fig, axes_timelimited = plt.subplots(figsize=figsize, constrained_layout=True)
plot_all_timelimit(
ax=axes_timelimited,
data_path=data_path,
timelimits=timelimits,
)
fig.savefig(out_path / "figure_8.png", dpi=300)
print(f"wrote {out_path / 'figure_8.png'}")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Recreate figures from paper.")
parser.add_argument(
"--show", action="store_true", help="Show the figures for interactive viewing"
)
args = parser.parse_args()
if not args.show:
mpl.use("agg")
main()
if args.show:
plt.show()
|
# -*- coding: utf-8 -*-
"""
Defines a synthetic seismogram.
:copyright: 2016 Agile Geoscience
:license: Apache 2.0
"""
import numpy as np
import matplotlib.pyplot as plt
from .curve import Curve
class Synthetic(np.ndarray):
"""
Synthetic seismograms.
"""
def __new__(cls, data, basis=None, params=None):
obj = np.asarray(data).view(cls).copy()
params = params or {}
for k, v in params.items():
setattr(obj, k, v)
if basis is not None:
setattr(obj, 'start', basis[0])
setattr(obj, 'step', basis[1]-basis[0])
return obj
def __array_finalize__(self, obj):
if obj is None:
return
if obj.size == 1:
return float(obj)
self.start = getattr(obj, 'start', 0)
self.dt = getattr(obj, 'dt', 0.001)
self.name = getattr(obj, 'name', 'Synthetic')
@property
def stop(self):
"""
Compute stop rather than storing it.
"""
return self.start + self.shape[0] * self.dt
@property
def basis(self):
"""
Compute basis rather than storing it.
"""
precision_adj = self.dt / 100
return np.arange(self.start, self.stop - precision_adj, self.dt)
def as_curve(self, start=None, stop=None):
"""
Get the synthetic as a Curve, in depth. Facilitates plotting along-
side other curve data.
"""
params = {'start': start or getattr(self, 'z start', None),
'mnemonic': 'SYN',
'step': 0.1524
}
return Curve(data, params=params)
def plot(self, ax=None, return_fig=False, **kwargs):
"""
Plot a synthetic.
Args:
ax (ax): A matplotlib axis.
legend (Legend): For now, only here to match API for other plot
methods.
return_fig (bool): whether to return the matplotlib figure.
Default False.
Returns:
ax. If you passed in an ax, otherwise None.
"""
if ax is None:
fig = plt.figure(figsize=(2, 10))
ax = fig.add_subplot(111)
return_ax = False
else:
return_ax = True
hypertime = np.linspace(self.start, self.stop, (10 * self.size - 1) + 1)
hyperamp = np.interp(hypertime, self.basis, self)
ax.plot(hyperamp, hypertime, 'k')
ax.fill_betweenx(hypertime, hyperamp, 0, hyperamp > 0.0, facecolor='k', lw=0)
ax.invert_yaxis()
ax.set_title(self.name)
if return_ax:
return ax
elif return_fig:
return fig
else:
return None
|
"""
Tests for day 1 of 2020's Advent of Code
"""
import pytest
from aoc_cqkh42.year_2020 import day_01
@pytest.fixture
def data() -> str:
"""
Test data for day_01.
Returns
-------
data: str
"""
return '1721\n979\n366\n299\n675\n1456'
def test__find_subset_with_sum(data) -> None:
numbers = [1721, 979, 366, 299, 675, 1456]
assert day_01._find_subset_with_sum(numbers, 2020, 2) == (1721, 299)
def test_part_a(data) -> None:
assert day_01.part_a(data) == 514579
def test_part_b(data) -> None:
assert day_01.part_b(data) == 241861950
|
from __future__ import absolute_import, division, print_function, unicode_literals
from . import BaseSignal
class Signal(BaseSignal):
""" Ranking signal based on the domain presence in DMOZ
"""
def get_value(self, document, url_metadata):
return float(bool(url_metadata["domain"].dmoz_title))
|
import random
import csv
indian_first_names = [
'Vaishnavi',
'Sunthari',
'Shruti',
'Sangem',
'Ramalingam',
'Amarnath',
'Vallath',
'Suranjan',
'Shukla',
'Sanjna',
'Ramamuthe',
'Arasaratnam',
'Vamsi',
'Surendar',
'Sidda',
'Sankuratri',
'Ramanuja',
'Balakrishnan',
'Vaninadha',
'Surnilla',
'Sira',
'Sanu',
'Ramaswami',
'Varganti',
'Surupa',
'Sivaram',
'Sapra',
'Bhaskar',
'Varuni',
'Suryanarayanan',
'Smita',
'Sarasvan',
'Ramnarine',
'Bisht',
'Vasudha',
'Suvrata',
'Sohal',
'Sardesai',
'Bux',
'Vattyam',
'Swani',
'Somasundara',
'Sarmistha',
'Rangaraj',
'Chande',
'Vedula',
'Swetha',
'Sompalli',
'Sashi',
'Ranhotra',
'Chandramouleeswaran',
'Veerasamy',
'Tamhane',
'Sophia',
'Sathaye',
'Rantidev',
'Chandrasekhar',
'Vemireddy',
'Tanu',
'Soumyabrata',
'Satinder',
'Ravandur',
'Charan',
'Venkatasubramaniam',
'Tapi',
'Sowrirajan',
'Satyavati',
'Raviraj',
'Chaudhry',
'Venkateswarn',
'Tasha',
'Sreedharan',
'Saurin',
'Rebani',
'Chellaiah',
'Thadigiri',
'Sreenivasan',
'Renuka',
'Cherukuri',
'Vidvan',
'Thangaraj',
'Srijata',
'Sekariapuram',
'Rewari',
'Chetlapalli',
'Vijaya',
'Thirunarayan',
'Srila',
'Senagala',
'Rishiyur',
'Chidamber',
'Vijayarangan',
'Thukral',
'Sritharan',
'Seshaanath',
'Roshni',
'Chirag',
'Vikul',
'Thyagarajan',
'Srivatsan',
'Sethuraman',
'Rudrani',
'Chittor',
'Vinita',
'Tikoo',
'Subbanna',
'Shadilya',
'Rupesh',
'Choudhury',
'Viraf',
'Trikha',
'Subhaga',
'Shaila',
'Sachin',
'Dalmiya',
'Virendra',
'Trusha',
'Subram',
'Shalabh',
'Sadhwani',
'Dasari',
'Visalakshi',
'Tushar',
'Subramanya',
'Shamsher',
'Sahadev',
'Dawar',
'Vishwa',
'Uday',
'Suchin',
'Sharma',
'Saibal',
'Dhaliwal',
'Viswanath',
'Udutha',
'Sudesh',
'Shastri',
'Saini',
'Dhruba',
'Vivek',
'Ulla',
'Sudhakar',
'Shikha',
'Sajja',
'Dinkar',
'Vyapari',
'Unnikrishnan',
'Sugriva',
'Shinu',
'Salil',
'Dristi',
'Yaksha',
'Urimindi',
'Sukanya',
'Shivakumar',
'Samderiya',
'Durmada',
'Yamura',
'Vadakke',
'Sulagna',
'Shourov',
'Sampath',
'Elango',
'Yateen',
'Vaidya',
'Sumila',
'Shreeyash',
'Sandeep',
'Eswarapu',
'Yelsangikar',
'Vaithu',
'Suneina',
'Shripati',
'Sangam',
'Gajaren',
'Yogesh',
'Valli',
'Suppiah',
'Shubha',
'Sangha',
'Ganapathy',
'Advani',
'Vanchinathan',
'Surapaneni',
'Shukta',
'Sanjukta',
'Gargeya',
'Amra',
'Vanita',
'Surendran',
'Siddhi',
'Sanmugasunderam',
'Ghani',
'Aron',
'Varghese',
'Surpur',
'Sita',
'Sanyogita',
'Vasava',
'Suryadevara',
'Sivaramakrishnan',
'Sapthotharan',
'Giridhar',
'Behari',
'Vasudhara',
'Susarla',
'Smitha',
'Sarasvati',
'Goel',
'Bhatt',
'Vavveti',
'Swagato',
'Sohoni',
'Sarika',
'Gopinath',
'Biswas',
'Veena',
'Swarnkar',
'Somasundaram',
'Saru',
'Govindasvamy',
'Cansai',
'Veerender',
'Tagore',
'Somu',
'Sashti',
'Chander',
'Vemuganti',
'Tamragouri',
'Sorabhjee',
'Sathiamoorthy',
'Guneratne',
'Chandramouli',
'Tanuja',
'Soundar',
'Satrujit',
'Guramurthy',
'Chandrasekharan',
'Venkatraman',
'Tarang',
'Sraddha',
'Satyavolu',
'Gutta',
'Charu',
'Vibha',
'Tatat',
'Sreehari',
'Savarna',
'Harku',
'Chaudhury',
'Vidwans',
'Thakur',
'Sreeram',
'Scindia',
'Himanshu',
'Chellappa',
'Vijayabhas',
'Thiagarajan',
'Srijoy',
'Sekhar',
'Honnenahalli',
'Cherupara',
'Vijayashree',
'Thiruvengadathan',
'Srimal',
'Senajit',
'Imani',
'Chhachhi',
'Vilok',
'Thundayal',
'Srivas',
'Seshadri',
'Jafferbhoy',
'Chikodi',
'Vinuta',
'Tickoo',
'Sruthi',
'Setna',
'Jaishree',
'Chirimar',
'Viraj',
'Tina',
'Subbarao',
'Shafiqul',
'Jasmit',
'Chitturu',
'Viresh',
'Trisanu',
'Subhangi',
'Shailaja',
'Jayaram',
'Choughoy',
'Vish',
'Tuhina',
'Subramani',
'Shalaby',
'Jeyaseelan',
'Dama',
'Vishwamber',
'Tuteja',
'Subramanyan',
'Shan',
'Jitesh',
'Viswanathan',
'Udayan',
'Suchitra',
'Sharmistha',
'Junanker',
'Dehiya',
'Vivekanand',
'Ujjwal',
'Sudesha',
'Shaukat',
'Kachwaha',
'Dharuna',
'Vyshali',
'Umakanta',
'Sudhanshu',
'Shiladitya',
'Kaith',
'Dhupam',
'Yalamanchi',
'Upender',
'Suhas',
'Shirish',
'Kallichuran',
'Dinkerrai',
'Yanamandra',
'Utpal',
'Sukarman',
'Shivani',
'Kambhatla',
'Dua',
'Yauvani',
'Vadlamani',
'Sultana',
'Shraddha',
'Kanetkar',
'Duvvoori',
'Yeluri',
'Vairaja',
'Sumon',
'Shrestha',
'Kanwar',
'Elayavalli',
'Yogish',
'Vajpayee',
'Sunny',
'Shrirang',
'Karumuri',
'Gadde',
'Agarwal',
'Vallurupalli',
'Supriya',
'Shubhabrata',
'Kathiravan',
'Gajendra',
'Anand',
'Vandita',
'Surati',
'Shurpali',
'Kaushik',
'Ganesh',
'Badesha',
'Varadarajan',
'Surendranath',
'Sidhu',
'Keerthana',
'Garikapaty',
'Barendran',
'Varki',
'Surti',
'Sitha',
'Keshava',
'Ghazali',
'Bhagyamma',
'Vashisth',
'Suryanarayama',
'Sivaraman',
'Keyush',
'Bhatti',
'Vasuman',
'Susumna',
'Snehasis',
'Khodaiji',
'Giridhara',
'Boparai',
'Vedanga',
'Swami',
'Solaimathi',
'Kittur',
'Goenka',
'Chandak',
'Veera',
'Swathi',
'Somatra',
'Kodi',
'Gopivallabha',
'Chandna',
'Vellanki',
'Taksa',
'Kolala',
'Govindraj',
'Chandrark',
'Venkataraghavan',
'Tandekar',
'Sornam',
'Kondapaneni',
'Gujral',
'Chandrashekar',
'Venkatesann',
'Tapan',
'Soundrapandian',
'Koppala',
'Gungabissoon',
'Venu',
'Tarit',
'Sravan',
'Kosuri',
'Gurbux',
'Chawd',
'Vichur',
'Tatavarti',
'Sreekanth',
'Kripa',
'Halder',
'Chengelpet',
'Vidyarthi',
'Thamma',
'Sreerupa',
'Kulasekaran',
'Haryadi',
'Chetan',
'Vijayakumar',
'Thimanniya',
'Srikaran',
'Kumur',
'Hindocha',
'Chheda',
'Vijaykumar',
'Thogulva',
'Srinath',
'Kurapati',
'Huggahalli',
'Chinnakannan',
'Vineet',
'Thundyil',
'Srivastav',
'Kusagra',
'Indrani',
'Chitrangda',
'Vipin',
'Tikaram',
'Sruti',
'Lahan',
'Jaffrey',
'Chohan',
'Virani',
'Tirumalai',
'Subbarat',
'Lalitesh',
'Jaisimha',
'Chowdry',
'Virini',
'Trishna',
'Subhendu',
'Latesh',
'Jaspal',
'Darsha',
'Vishal',
'Tumkur',
'Subramaniam',
'Laxmanan',
'Jayasinghe',
'Datla',
'Visvakarman',
'Tyagi',
'Subrata',
'Loy',
'Jignesh',
'Deol',
'Visweswaramurthy',
'Uddin',
'Sudarsan',
'Madan',
'Joardar',
'Dhawan',
'Vootla',
'Ujjwala',
'Sudeshna',
'Madugula',
'Jyothsna',
'Dibyendu',
'Waman',
'Umesh',
'Sudhansu',
'Mahajan',
'Kadak',
'Diwan',
'Yalamanchilli',
'Upendra',
'Sujan',
'Mahatapa',
'Kakde',
'Durai',
'Yashodhar',
'Utpat',
'Suketu',
'Maitryi',
'Kalluri',
'Edulbehram',
'Yavatkar',
'Vaibhav',
'Suman',
'Makarand',
'Kambli',
'Emankumar',
'Yesh',
'Vaisakhi',
'Sunanda',
'Malipatlolla',
'Kanitkar',
'Gadepalli',
'Zahin',
'Vajpeyi',
'Sunondo',
'Mallya',
'Kapadia',
'Gala',
'Agrawal',
'Valsan',
'Suprotik',
'Manasa',
'Karuppia',
'Gangadharan',
'Ankola',
'Vani',
'Suravinda',
'Mandar',
'Kathrada',
'Gavarasana',
'Bahl',
'Varahabhotla',
'Suri',
'Manekshaw',
'Kawediya',
'Ghemawat',
'Barot',
'Varsha',
'Suruchi',
'Mangeshkar',
'Keerthi',
'Ghouse',
'Bhanghoo',
'Vasi',
'Suryanarayan',
'Manjanatha',
'Keshavan',
'Girish',
'Bhoola',
'Vasumati',
'Sutapa',
'Mankad',
'Khadri',
'Goli',
'Buchar',
'Vedati',
'Swaminathan',
'Mantri',
'Khot',
'Gorawala',
'Chandan',
'Veeramany',
'Swati',
'Marisa',
'Kitu',
'Gowda',
'Chandra',
'Vellore',
'Talip',
'Masrani',
'Kodumudi',
'Gundlapalli',
'Chandrasekar',
'Venkataraman',
'Tanmaya',
'Matu',
'Kolar',
'Guntur',
'Channarayapatra',
'Venkateshwara',
'Tapas',
'Mayur',
'Konduru',
'Gurinder',
'Chatterji',
'Venugopal',
'Tarpa',
'Meenakshi',
'Koppale',
'Hament',
'Cheenu',
'Vidi',
'Tendulkar',
'Meher',
'Kota',
'Hazare',
'Chennapragada',
'Vidyasagar',
'Thamry',
'Melliyal',
'Krishnamma',
'Hiranandani',
'Chetana',
'Vijayalakshmi',
'Thirumalai',
'Michandani',
'Kulkarni',
'Hynala',
'Chiba',
'Vijaysaradhi',
'Thommana',
'Mirchandani',
'Kunal',
'Irani',
'Chinnappan',
'Vineeta',
'Thuraisingham',
'Mittur',
'Kurian',
'Jagder',
'Chittibabu',
'Vipperla',
'Tikekar',
'Moidu',
'Kuttikkad',
'Jandhyala',
'Choudhari',
'Virasana',
'Tirumalesa',
'Monica',
'Lahiri',
'Jasthi',
'Chudasama',
'Virmani',
'Trishwant',
'Mounil',
'Lalith',
'Jeeri',
'Daruka',
'Vishnavi',
'Tummala',
'Lath',
'Jindal',
'Datta',
'Visvanathan',
'Tyagri',
'Mukund',
'Lecamwasam',
'Jonnalagadda',
'Vittal',
'Udipi',
'Munish',
'Luthra',
'Kaalki',
'Dhiri',
'Vraman',
'Ujwal',
'Murthy',
'Maddukuri',
'Kadowala',
'Diggavi',
'Yadavalli',
'Umrigar',
'Murugesan',
'Magesh',
'Kalirai',
'Dosanjh',
'Yamini',
'Uppalapati',
'Muthukrishn',
'Mahale',
'Kalpna',
'Duranjaya',
'Yashodhara',
'Uttanka',
'Naagesh',
'Mahatma',
'Kandadai',
'Ekachakra',
'Yegammai',
'Vaidheeswarran',
'Naeem',
'Majety',
'Kankipati',
'Engineer',
'Yeshonath',
'Vaish',
'Nagaraja',
'Makhija',
'Karapiet',
'Gaekwad',
'Zev',
'Vakil',
'Nageshwar',
'Malipeddi',
'Kasthurirangan',
'Gambhir',
'Ahsen',
'Vamshi',
'Nahid',
'Malti',
'Katragadda',
'Gangulee',
'Apte',
'Vaninadh',
'Naini',
'Manasi',
'Kedar',
'Gavaskar',
'Bai',
'Varati',
'Namrata',
'Mandava',
'Kenchammana',
'Ghorpade',
'Battacharjee',
'Varun',
'Nandita',
'Manesh',
'Kesiraju',
'Gidh',
'Bhanjee',
'Vasudev',
'Naran',
'Mangina',
'Kharbanda',
'Girsh',
'Bipen',
'Vattikota',
'Narayan',
'Manjari',
'Kirmani',
'Gopalakrishnan',
'Buhpathi',
'Vedavyasa',
'Naseer',
'Manmeet',
'Kodali',
'Gordha',
'Chandar',
'Veeraraju',
'Natterraja',
'Manushi',
'Koganti',
'Gowravaram',
'Chandrakala',
'Velusamy',
'Nayna',
'Marita',
'Konchady',
'Gundugollu',
'Chandrasekaran',
'Venkataramanan',
'Neelakantachar',
'Matanga',
'Konkar',
'Gunturu',
'Chapal',
'Venkateswaran',
'Neeru',
'Maudgalya',
'Koppula',
'Gurudutt',
'Chaudhari',
'Venugopalan',
'Nergis',
'Mayuri',
'Kothari',
'Harbir',
'Chella',
'Vidur',
'Nihar',
'Meenakshisundaram',
'Krithivas',
'Hemalatha',
'Cheran',
'Vidyashankar',
'Nikunj',
'Meherhomji',
'Kumawagra',
'Hiten',
'Chethan',
'Vijayanath',
'Nilu',
'Merchant',
'Kuntal',
'Ilango',
'Chidambaram',
'Vikriti',
'Niraj',
'Mihir',
'Kurtha',
'Iyer',
'Chippada',
'Vinit',
'Nirmala',
'Mista',
'Kutumbaka',
'Jahnavi',
'Chittoor',
'Vipul',
'Nishit',
'Mitul',
'Lalima',
'Janjua',
'Virat',
'Nitesha',
'Mokate',
'Lalji',
'Jayakar',
'Contractor',
'Visala',
'Niveda',
'Mooljee',
'Laul',
'Jeevan',
'Daryapurkar',
'Vishnuraman',
'Ogale',
'Mousumi',
'Lokhande',
'Jinen',
'Davuluri',
'Visvayu',
'Padmanabh',
'Mukhi',
'Macharla',
'Joshi',
'Dhadda',
'Vivatma',
'Pai',
'Mukunda',
'Madhabi',
'Kabir',
'Dhrtiman',
'Vuppula',
'Palia',
'Muniyappa',
'Mahabala',
'Kaikini',
'Dinath',
'Yadgiri',
'Palshikar',
'Murti',
'Mahankali',
'Kallakuri',
'Dravid',
'Yamni',
'Panick',
'Musunur',
'Mainak',
'Kambhampat',
'Durjaya',
'Yashovarman',
'Panth',
'Muthukrishnan',
'Maji',
'Kandathil',
'Eknath',
'Yellepeddy',
'Paramartha',
'Nabendu',
'Kanmani',
'Eswara',
'Yogendra',
'Parnita',
'Nagabhushana',
'Mallick',
'Karim',
'Gahlot',
'Agarkar',
'Parthiban',
'Nagarajan',
'Mamta',
'Kasturirangan',
'Ganapathiraman',
'Amroliwallah',
'Pasuma',
'Nageshwara',
'Manavi',
'Kaul',
'Baboor',
'Patterjee',
'Naidoo',
'Mandhatri',
'Kedarnath',
'Ghandi',
'Banker',
'Pavi',
'Nakul',
'Mangalvedhe',
'Keshab',
'Ghosal',
'Bhagwat',
'Pendyala',
'Nandakishore',
'Manikkalingam',
'Keskar',
'Gilab',
'Bhattacharya',
'Phadkar',
'Nandkeolyar',
'Manju',
'Khilnani',
'Godambe',
'Bonjani',
'Pillalamarri',
'Narang',
'Manohar',
'Kishen',
'Gopalan',
'Chakrabarti',
'Polamreddy',
'Narayanaswamy',
'Maqbool',
'Kodanda',
'Gorti',
'Chandiramani',
'Pooja',
'Nashier',
'Markendaya',
'Kola',
'Gridharan',
'Chandran',
'Prabhath',
'Naueshwara',
'Mathrubootham',
'Konda',
'Gunendran',
'Chandrashaker',
'Praharaj',
'Nayudu',
'Maya',
'Konkipudi',
'Gupte',
'Chaterju',
'Neelam',
'Medapati',
'Koritala',
'Gutala',
'Prasanth',
'Neha',
'Megana',
'Kotla',
'Harishandra',
'Chellappan',
'Prasoon',
'Nerurkar',
'Kriti',
'Himani',
'Chet',
'Praveenkumar',
'Niharika',
'Meyappan',
'Kumbla',
'Hitendra',
'Chhavvi',
'Premkumar',
'Nilani',
'Milind',
'Kunwarjit',
'Ilyas',
'Chinmay',
'Prithvi',
'Nilufar',
'Mitanu',
'Kurupath',
'Jadeja',
'Chitnis',
'Priyadarshi',
'Niral',
'Labhsha',
'Jai',
'Chivukula',
'Profulla',
'Nirupa',
'Mondem',
'Lalit',
'Jannavi',
'Chowdhury',
'Pulavarti',
'Nishita',
'More',
'Lanka',
'Jayantilal',
'Darisipudi',
'Punati',
'Nithin',
'Muddiah',
'Lavanis',
'Jeoomal',
'Datar',
'Punj',
'Nivedita',
'Muktheswara',
'Lolaksi',
'Jinturkar',
'Deivan',
'Purva',
'Omarjeet',
'Mukundan',
'Mackherdhuj',
'Joshipura',
'Dhatri',
'Pusti',
'Padmanabhan',
'Muppala',
'Madhana',
'Kabra',
'Dhurvasula',
'Rachna',
'Murty',
'Mahadeo',
'Kaisth',
'Divecha',
'Ragha',
'Pallavan',
'Muthiah',
'Mahanthapa',
'Kallianpur',
'Duleepsinhji',
'Raghuram',
'Pamela',
'Muthukumarasamy',
'Maiti',
'Kambhampati',
'Dwijen',
'Rajabhushan',
'Panjwani',
'Nachiketa',
'Majoo',
'Kandula',
'Emankum',
'Rajan',
'Panyala',
'Naganathan',
'Kansal',
'Gade',
'Rajashi',
'Parameswaran',
'Nagaswamy',
'Mallika',
'Karkada',
'Gajraj',
'Rakala',
'Partha',
'Nagi',
'Manandhar',
'Kateel',
'Ganeshwaran',
'Ramamani',
'Parul',
'Nailadi',
'Manchanda',
'Kaushal',
'Garlanka',
'Raman',
'Patachli',
'Nallamothu',
'Mandyam',
'Kedia',
'Ghei',
'Ramanujam',
'Pauravi',
'Nandedkar',
'Mangalwadi',
'Keshav',
'Ghoshdashtidar',
'Ramaswamy',
'Pavithran',
'Nanga',
'Maninder',
'Ketaki',
'Giridharan',
'Ramila',
'Pennathur',
'Narasimha',
'Manjunath',
'Khodabhai',
'Gokaraju',
'Ramprakash',
'Phadnis',
'Narmada',
'Manohari',
'Kishore',
'Gorantla',
'Ranadhir',
'Pillay',
'Natasha',
'Maran',
'Kodandarami',
'Gowd',
'Polavarapu',
'Navya',
'Maruthi',
'Kolagunta',
'Gundamaraju',
'Ranjan',
'Poola',
'Neeharika',
'Mati',
'Kondapalli',
'Guntupalli',
'Rasiah',
'Prachi',
'Neena',
'Mayappan',
'Koothrappally',
'Gurijala',
'Raven',
'Prajapati',
'Neil',
'Medha',
'Kosanam',
'Hamada',
'Raviram',
'Prasai',
'Nidhi',
'Meghana',
'Koushika',
'Hattangady',
'Reema',
'Prasata',
'Nikhil',
'Mehul',
'Kudesia',
'Hinduja',
'Renukunta',
'Prassana',
'Nilima',
'Mhambrey',
'Kumble',
'Huggahilli',
'Richa',
'Pravil',
'Nimesh',
'Minakshi',
'Kuram',
'Innuganti',
'Rishmal',
'Prerana',
'Nirav',
'Kuruvilla',
'Jagarlamudi',
'Roy',
'Prithviraj',
'Nisha',
'Mohaiemen',
'Laddha',
'Jana',
'Rudraraju',
'Priyadarshini',
'Nita',
'Mongia',
'Lalita',
'Jaspreet',
'Rustagi',
'Progyan',
'Nitin',
'Motala',
'Lata',
'Jayasurya',
'Sadalge',
'Puli',
'Nuguru',
'Mudhol',
'Lavanya',
'Jimuta',
'Saeed',
'Pundari',
'Omkar',
'Mukti',
'Lolla',
'Jobanputra',
'Sahar',
'Punnoose',
'Padmesh',
'Mulla',
'Macwan',
'Jyotiradha',
'Saidullah',
'Pusan',
'Palanirajan',
'Muqtedar',
'Madhani',
'Kadamuddi',
'Sajal',
'Pyara',
'Paloma',
'Murugan',
'Mahadevan',
'Kalanadhabhatla',
'Sakib',
'Rachoor',
'Pandian',
'Muthu',
'Mahapatra',
'Kalpak',
'Salim',
'Raghavanpillai',
'Pankajakshan',
'Muthupalaniappan',
'Maitreya',
'Kampan',
'Sameer',
'Raghuvir',
'Papa',
'Nadhamuni',
'Makam',
'Kanive',
'Samrat',
'Rajagopal',
'Paritosh',
'Nagappa',
'Malini',
'Sandip',
'Rajani',
'Parthasarathi',
'Nagedwaran',
'Mallikarjun',
'Kasavaraju',
'Sangameswar',
'Rajasimha',
'Pasapuleti',
'Nagin',
'Manas',
'Katka',
'Sanghi',
'Rakhi',
'Patankar',
'Naimesh',
'Manchapora',
'Kayeeda',
'Sankait',
'Ramamohan',
'Pavanaja',
'Namasri',
'Maneesh',
'Kelaka',
'Santanu',
'Ramana',
'Payal',
'Nandin',
'Mangesh',
'Keshaw',
'Sanyukta',
'Ramaprasad',
'Perumal',
'Naoomal',
'Manivanan',
'Khanderia',
'Sara',
'Ramchand',
'Phani',
'Narasimhan',
'Manjusha',
'Khursh',
'Saraswathi',
'Ramjee',
'Piyush',
'Narsi',
'Mansey',
'Kity',
'Ramprakesh',
'Ponnada',
'Nath',
'Margasahayam',
'Koduri',
'Sarup',
'Ranadive',
'Potla',
'Nayak',
'Maruti',
'Kommana',
'Sasthi',
'Rangarathnam',
'Pradip',
'Neel',
'Matta',
'Koneru',
'Sathianarayan',
'Ranjana',
'Pramath',
'Neeraj',
'Mayekar',
'Koppolu',
'Satsangi',
'Rathiea',
'Prasanna',
'Nelagadde',
'Medikonda',
'Kothandaraman',
'Satyavrat',
'Ravikanth',
'Prashant',
'Nidra',
'Mehendale',
'Krishnamurthy',
'Savdeep',
'Ravuri',
'Pratima',
'Niki',
'Meka',
'Seetamraju',
'Rege',
'Preeti',
'Nilini',
'Mhari',
'Kunderan',
'Selvam',
'Resham',
'Prisha',
'Nira',
'Mirajkar',
'Kurinji',
'Senapathy',
'Riddhi',
'Pritish',
'Nirguna',
'Mittal',
'Kutty',
'Seshadrinathan',
'Ritula',
'Priyavardhan',
'Nishar',
'Mohanty',
'Lakhani',
'Setra',
'Ruchi',
'Prudvi',
'Niten',
'Moni',
'Lalitha',
'Shahbaz',
'Rukmini',
'Pulkit',
'Nitu',
'Motiwala',
'Latha',
'Shailendra',
'Saandeep',
'Punita',
'Nukala',
'Mudigonda',
'Lokesh',
'Shally',
'Sadaram',
'Puranjay',
'Oruganti',
'Mukul',
'Luthria',
'Shanbhag',
'Sagar',
'Pushkar',
'Pahad',
'Multani',
'Madduri',
'Shashank',
'Sahgal',
'Rabinder',
'Palanisamy',
'Murli',
'Mageshkumar',
'Sheba',
'Saighiridhar',
'Radheshyam',
'Palomi',
'Murugappa',
'Mahalingam',
'Shilpa',
'Sajan',
'Raghavendran',
'Pandit',
'Muthukaruppan',
'Mahavira',
'Shirishkumar',
'Saklani',
'Ragunathan',
'Pankharia',
'Muthuswami',
'Majhi',
'Shivaprakash',
'Saloni',
'Rajah',
'Papatranal',
'Nadkarni',
'Malavika',
'Shradhdha',
'Samiksha',
'Rajarama',
'Parmar',
'Nagaraj',
'Malleshi',
'Shridhar',
'Samudra',
'Raje',
'Parthasarathy',
'Nagesh',
'Mamgain',
'Shrisha',
'Sandipa',
'Ramadin',
'Pashupathy',
'Nagpal',
'Manavendra',
'Shubhashish',
'Sangappa',
'Ramamurthy',
'Naimish',
'Mandayam',
'Shvetank',
'Sangita',
'Ramanand',
'Pavani',
'Namdev',
'Mangalampally',
'Simha',
'Sankaran',
'Ramasubraman',
'Pedapudi',
'Nandini',
'Manglorkar',
'Sitipala',
'Santhanakrishnan',
'Ramchandra',
'Perumbeti',
'Nara',
'Manjrekar',
'Sivasubramaniam',
'Sanzgiri',
'Ramkumar',
'Phutika',
'Narasinha',
'Mannem',
'Snigdha',
'Saraf',
'Ramsamooj',
'Poduri',
'Nartana',
'Manyam',
'Solanki',
'Sarat',
'Ranga',
'Ponnekanti',
'Nathan',
'Markandeya',
'Somayaji',
'Saripella',
'Rangnekar',
'Potluri',
'Matangi',
'Sonia',
'Sarwate',
'Ranjini',
'Prafull',
'Neela',
'Mavalvala',
'Soumen',
'Saswata',
'Rathore',
'Pramila',
'Neerja',
'Mecca',
'Sourabh',
'Sathyanarayana',
'Ravipati',
'Prasannakumar',
'Nema',
'Meenan',
'Sree',
'Sattar',
'Raza',
'Prashanth',
'Nigam',
'Sreekanthan',
'Satyen',
'Rema',
'Pratyush',
'Nikitha',
'Merchia',
'Sreevijayan',
'Sawalha',
'Revathi',
'Preetinder',
'Nilofer',
'Milan',
'Srikrishna',
'Seetharaman',
'Rima',
'Pritha',
'Niradhara',
'Mitali',
'Srini',
'Selvi',
'Robi',
'Privrata',
'Nirmal',
'Mitun',
'Srivastava',
'Senapati',
'Ruchir',
'Priyodarshi',
'Nisheeth',
'Mona',
'Sthanumurthy',
'Seshan',
'Rupa',
'Puja',
'Nitesh',
'Moorthy',
'Subbaratnam',
'Shaban',
'Sabeer',
'Pullela',
'Nitya-Sundara',
'Mrudaya',
'Subhuja',
'Shahid',
'Sadasivam',
'Punith',
'Nuregesan',
'Mukku',
'Subramanian',
'Shailesh',
'Saginala',
'Puri',
'Padmakant',
'Mukundagiri',
'Subudhi',
'Shameem',
'Sahni',
'Pushkarini',
'Pahwa',
'Munusamy',
'Sudarshan',
'Shantinath',
'Sailendra',
'Rabindra',
'Palathingal',
'Murtugudde',
'Sudeva',
'Shashidhar',
'Saji',
'Radhey',
'Pals',
'Muthanna',
'Sudheer',
'Shefali',
'Saldanha',
'Raghunandan',
'Pandya',
'Muthukumar',
'Sujeet',
'Shindi',
'Sambandam',
'Raguraman',
'Pant',
'Nachik',
'Sukhjinder',
'Shirvaikar',
'Samit',
'Rajal',
'Parag',
'Nagalingam',
'Sumanna',
'Shokrollahi',
'Sanaka',
'Rajaraman',
'Parnika',
'Nagarjuna',
'Sundaramoorthy',
'Shreekant',
'Sandy',
'Raji',
'Parthathy',
'Nageswar',
'Sunrita',
'Shriharsha',
'Sangeeta',
'Ramakan',
'Pasram',
'Naik',
'Surabhi',
'Shrivastava',
'Sanigepalli',
'Ramamurti',
'Pattabhiraman',
'Nalini',
'Surekha',
'Shubhendu',
'Sankrant',
'Ramanathan',
'Paveljit',
'Nandakumar',
'Suriyaprakash',
'Shyamsundar',
'Santharam',
'Ramasubramanian',
'Pendharkar',
'Nandy',
'Surujnarine',
'Sinduvalli',
'Sapna',
'Ramdas',
'Pewar',
'Narasimban',
'Suryanarayana',
'Sivakumar',
'Sarangarajan',
'Rammohan',
'Pichai',
'Narayanswami',
'Suthar',
'Sivasubramanian',
'Saravati',
'Ramsundar',
'Podury',
'Nasir',
'Swamy',
'Sobha',
'Sarmad',
'Ranganathan',
'Ponte',
'Navarathna',
'Sweta',
'Solkar',
'Sashekala',
'Rangwala',
'Prabhat',
'Nayyar',
'Talwar',
'Somendra',
'Sathasivam',
'Ranjitsinhji',
'Pragalsingh',
'Neelesh',
'Tantry',
'Sony',
'Satin',
'Ratnasabapathi',
'Pramsu',
'Nehru',
'Tapesh',
'Soumitra',
'Satyanarayana',
'Raviprakash',
'Prasanta',
'Nidheesh',
'Tarpana',
'Sourajyoti',
'Saunak',
'Reba',
'Prashun',
'Nikesh',
'Tetegni',
'Sreedevan',
'Sawardekar',
'Rengarajan',
'Praveen',
'Nilesh',
'Thandray',
'Sreenivas',
'Sekar',
'Revati',
'Preetish',
'Nimbalkar',
'Thirumalaiswamy',
'Sridevan',
'Rishi',
'Prithu',
'Niramitra',
'Thribhuvana',
'Srikrisna',
'Seri',
'Roopak',
'Priyabroto',
'Nirupama',
'Thuvaradran',
'Sripadam',
'Sethi',
'Ruchira',
'Probal',
'Nishtha',
'Tikku',
'Srivaths',
'Shachi',
'Rupali',
'Pujar',
'Niti',
'Toodi',
'Subas',
'Shail',
'Sachi',
'Pummy',
'Nizami',
'Trupti',
'Subbarayan',
'Shaje',
'Sadayappan',
'Punitha',
'Omesh',
'Tupil',
'Subodh',
'Shamshaard',
'Sagoo',
'Purujit',
'Padmasola',
'Ubriani',
'Subramanien',
'Shareeka',
'Sai',
'Puskara',
'Palam',
'Udit',
'Sucharita',
'Shashwat',
'Sailesh',
'Rabindran',
'Pallavi',
'Ulind',
'Sudarshana',
'Shibu',
'Sajid',
'Radia',
'Pancholi',
'Unmesh',
'Sudevi',
'Shinjinee',
'Saligrama',
'Raghunathan',
'Pankaj',
'Uppuluri',
'Sudhindranath',
'Shivaiah',
'Sambandan',
'Rai',
'Paola',
'Uttara',
'Sujeev',
'Shorey',
'Sammeta',
'Rajamani',
'Parantap',
'Vaidhyanathan',
'Suksma',
'Shreerang',
'Sanat',
'Rajarshi',
'Parthak',
'Sumedh',
'Shrinivas',
'Sangal',
'Parvin',
'Sundararajan'
]
indian_last_names = [
'Chatterjee',
'Singh',
'Shah',
'Nair',
'Nayar',
'Verma',
'Sen',
'Yadav',
'Mehta',
'Pillai',
'Gupta',
'Bose',
'Jhadav',
'Patel',
'Rao',
'Malhotra',
'Sengupta',
'Jaiteley',
'Patil',
'Jayaraman',
'Bhatnagar',
'Das',
'Chauhan',
'Pawar',
'Powar',
'Venkatesan',
'Saxena',
'Dasgupta',
'Mistry',
'Gavde',
'Balasubramanium',
'Kapoor',
'Kapur',
'Banerjee',
'Khan',
'Kadam',
'Subramanium',
'Singh',
'Chattopadhyay',
'Tambe',
'Rangan',
'Mehra',
'Mukopadhyay',
'Chavan',
'Rangarajan',
'Chopra',
'Dutta',
'D Souza',
'Lobo',
'Rodrigues',
'D Costa',
'Sarin',
'Malik',
'Rao',
'Kumar',
'Chowdary',
'Reddy',
'Naidu',
'Raju',
'Varma',
'Achari',
'Chari',
'Tenzin',
'Tashi',
'Dolma',
'Passang',
'Pema',
'Metok',
'Dhundup',
'Lhamo',
'Sangyal',
'Yangkey',
'Tsomo',
'Rabten',
'Phuntsok',
'Rabgyal',
'Rigzin',
'Jangchup',
'Tsundue',
'Jorden',
'Bhakto',
'Namgyal',
'Wangchuk',
'Khando',
'Rangdol',
'Basu',
'Bose',
'Dutta',
'Ghosh',
'Guha',
'Gain',
'Mitra',
'Sinha',
'Sen',
'Pal',
'Mukherjee',
'Banerjee',
'Chatterjee',
'Ganguly',
'Ghoshal',
'Goswami',
'Barua',
'Bhagawati',
'Bhattacharjee',
'Singh',
'Yadav',
'Jha',
'Prasad',
'Paswan',
'Khan',
'Gupta',
'Kumar',
'Choudhary',
'Manjhi',
'Ahir',
'Amin',
'Sisodiya',
'Chawda',
'Rana',
'Patel',
'Shroff',
'Bhakta',
'Soni',
'Mehta',
'Jani',
'Modi',
'Desai',
'Parekh',
'Doshi',
'Mistry',
'Bhanushali'
]
#change the value of num to generate as many names as you want.The default is 10
num = 10
for i in range(num):
name = random.choice(indian_first_names)+' '+random.choice(indian_last_names)
row = [name]
#print(name)
myfile = open('names.csv','a')
wr = csv.writer(myfile) # add quoting = csv.QUOTE_ALL to surround the names by double quotes
wr.writerow(row)
|
# Generated by Django 3.2.7 on 2021-09-21 18:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalog', '0002_alter_book_options'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title', 'author']},
),
]
|
from chocs import HttpStatus
def test_http_status_str() -> None:
status = HttpStatus.OK
assert "200 OK" == str(status)
assert 200 == int(status)
def test_http_status_from_int() -> None:
status = HttpStatus.from_int(200)
assert "200 OK" == str(status)
assert 200 == int(status)
|
# Generated by Django 3.0.6 on 2020-05-29 15:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('Profile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Pre_medical_history', models.TextField()),
('init_observation', models.TextField()),
('discreption', models.TextField()),
('Tests', models.TextField()),
('medication', models.TextField()),
('comment', models.TextField()),
('doctor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Profile.Doctor_details')),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Profile.User_detail')),
],
),
]
|
def busca_binaria(lista, chave):
primeiro = 0
ultimo = len(lista)-1
while primeiro <= ultimo:
meio = int((primeiro+ultimo)/2)
if lista[meio] == chave:
return meio
if lista[meio] > chave:
ultimo = meio - 1
else:
primeiro = meio +1
return None
def exibe_lista(lista):
print(lista)
import random
random.seed()
def gera_valores():
lista = random.sample(range(1,100), 20)
return lista
lista = gera_valores()
print('Lista gerada pelo computador')
print('\Desordenada: ')
exibe_lista(lista)
print('')
print('Ordenada: ')
lista.sort() #no programa da prova deve-se fazer a função de ordenação
exibe_lista(lista)
print('Encontrado na Posicao: ', busca_binaria(lista, int(input('Digite um número: '))))
|
import math
from typing import List, Tuple
class Rectangle:
"""
A class used to represent a Rectangle, using only five sine qua non parameters.
Attributes
----------
x_center : float
center of the rectangle on x axis
y_center : float
center of the rectangle on y axis
radius : float
distance from the center to any vertex
alpha_angle : float
orientation of the first diagonal
beta_angle : float
orientation of the second diagonal
p1 : Tuple[float, float]
first vertex of the rectangle, on first diagonal
p2 : Tuple[float, float]
second vertex of the rectangle, on second diagonal
p3 : Tuple[float, float]
third vertex of the rectangle, on first diagonal
p4 : Tuple[float, float]
fourth vertex of the rectangle, on second diagonal
area : float
area of the rectangle
"""
def __init__(self, x_center: float, y_center: float, radius: float, alpha_angle: float, beta_angle: float) -> None:
"""
Parameters
----------
x_center : float
center of the rectangle on x axis
y_center : float
center of the rectangle on y axis
radius : float
distance from the center to any vertex
alpha_angle : float
orientation of the first diagonal
beta_angle : float
orientation of the second diagonal
"""
self.x_center = x_center
self.y_center = y_center
self.radius = abs(radius)
self.alpha_angle = alpha_angle
self.beta_angle = beta_angle
self.p1 = self.vertex_calculation(self.alpha_angle)
self.p2 = self.vertex_calculation(self.beta_angle)
self.p3 = self.opposite_vertex(self.p1)
self.p4 = self.opposite_vertex(self.p2)
self.area = self.area_calculation()
def vertex_calculation(self, angle: float) -> Tuple[float, float]:
"""
Vertex calculation following a given angle orientation from rectangle center.
Parameters
----------
angle : float
angle orientation from center to vertex
Returns
----------
Tuple[float, float]
the resulting vertex point coordinates (x, y)
"""
return (self.radius * math.cos(angle) + self.x_center,
self.radius * math.sin(angle) + self.y_center)
def opposite_vertex(self, vertex: Tuple[float, float]) -> Tuple[float, float]:
"""
Opposite vertex calculation (same diagonal) of a given vertex.
Parameters
----------
vertex : Tuple[float, float]
the source vertex to consider
Returns
----------
Tuple[float, float]
the resulting vertex point coordinates (x, y)
"""
return (2 * self.x_center - vertex[0],
2 * self.y_center - vertex[1])
def distance_calculation(self, point_a: Tuple[float, float], point_b: Tuple[float, float]) -> float:
"""
Euclidean distance between two given points.
Parameters
----------
point_a : Tuple[float, float]
the first point to consider
point_b : Tuple[float, float]
the second point to consider
Returns
----------
float
the Euclidean distance between the two points
"""
return math.sqrt(pow(point_a[0] - point_b[0], 2) + pow(point_a[1] - point_b[1], 2))
def area_calculation(self) -> float:
"""
Area calculation of the rectangle.
Returns
----------
float
the area of the rectangle
"""
return self.distance_calculation(self.p1, self.p2) * self.distance_calculation(self.p2, self.p3)
def change_x_center(self, new_x_center: float) -> None:
"""
Change the center of the rectangle on x axis.
Parameters
----------
new_x_center : float
new center of the rectangle on x axis
"""
delta = new_x_center - self.x_center
self.x_center = new_x_center
self.p1 = (self.p1[0] + delta, self.p1[1])
self.p2 = (self.p2[0] + delta, self.p2[1])
self.p3 = (self.p3[0] + delta, self.p3[1])
self.p4 = (self.p4[0] + delta, self.p4[1])
def change_y_center(self, new_y_center: float) -> None:
"""
Change the center of the rectangle on y axis.
Parameters
----------
new_y_center : float
new center of the rectangle on y axis
"""
delta = new_y_center - self.y_center
self.y_center = new_y_center
self.p1 = (self.p1[0], self.p1[1] + delta)
self.p2 = (self.p2[0], self.p2[1] + delta)
self.p3 = (self.p3[0], self.p3[1] + delta)
self.p4 = self.p4[0], self.p4[1] + delta
def change_radius(self, new_radius: float) -> None:
"""
Change the radius of the rectangle.
Parameters
----------
new_radius : float
new distance from the center to any vertex
"""
self.radius = abs(new_radius)
self.p1 = self.vertex_calculation(self.alpha_angle)
self.p2 = self.vertex_calculation(self.beta_angle)
self.p3 = self.opposite_vertex(self.p1)
self.p4 = self.opposite_vertex(self.p2)
self.area = self.area_calculation()
def change_alpha_angle(self, new_alpha_angle: float) -> None:
"""
Change the alpha_angle of the rectangle.
Parameters
----------
new_alpha_angle : float
new orientation of the first diagonal
"""
self.alpha_angle = new_alpha_angle
self.p1 = self.vertex_calculation(self.alpha_angle)
self.p3 = self.opposite_vertex(self.p1)
self.area = self.area_calculation()
def change_beta_angle(self, new_beta_angle: float) -> None:
"""
Change the beta_angle of the rectangle.
Parameters
----------
new_beta_angle : float
new orientation of the second diagonal
"""
self.beta_angle = new_beta_angle
self.p2 = self.vertex_calculation(self.beta_angle)
self.p4 = self.opposite_vertex(self.p2)
self.area = self.area_calculation()
def get_vertices(self) -> List[Tuple[float, float]]:
"""
Gets in consecutive order the four vertices of the rectangle.
Returns
----------
List[Tuple[float, float]]
list of the four vertices of the rectangle
"""
return [self.p1, self.p2, self.p3, self.p4]
def get_center(self) -> Tuple[float, float]:
"""
Gets the center of the rectangle.
Returns
----------
Tuple[float, float]
coordinates of the center (x, y)
"""
return (self.x_center,
self.y_center)
def get_x_center(self) -> float:
"""
Gets the center of the rectangle on x axis.
Returns
----------
float
center of the rectangle on x axis
"""
return self.x_center
def get_y_center(self) -> float:
"""
Gets the center of the rectangle on y axis.
Returns
----------
float
center of the rectangle on y axis
"""
return self.y_center
def get_radius(self) -> float:
"""
Gets the radius of the rectangle.
Returns
----------
float
distance from the center to any vertex
"""
return self.radius
def get_alpha_angle(self) -> float:
"""
Gets the alpha_angle of the rectangle.
Returns
----------
float
orientation of the first diagonal
"""
return self.alpha_angle
def get_beta_angle(self) -> float:
"""
Gets the beta_angle of the rectangle.
Returns
----------
float
orientation of the second diagonal
"""
return self.beta_angle
def pretty_point(self, name: str, point: Tuple[float, float]) -> str:
"""
Gets a pretty version of a given point.
Parameters
----------
name : str
name of the point
point : Tuple[float, float]
coordinates of the point
Returns
----------
str
a pretty version of the given point
"""
return f'{name} ({round(point[0], 3)};{round(point[1], 3)})'
def __eq__(self, other: 'Rectangle') -> bool:
"""
Rectangle equality comparison.
Returns
----------
bool
the equality comparison result
"""
return self.get_center() == other.get_center() \
and self.radius == other.get_radius() \
and (self.alpha_angle == other.get_alpha_angle() and self.beta_angle == other.get_beta_angle()
or self.alpha_angle == other.get_beta_angle() and self.beta_angle == other.get_alpha_angle())
def __str__(self) -> str:
"""
Gets the readable version of the rectangle.
Returns
----------
str
the readable version of the rectangle
"""
return f'\n'.join([self.pretty_point(f'P{i+1}', point) for i, point in enumerate(self.get_vertices())])
def __repr__(self) -> str:
"""
Gets the unambiguous version of the rectangle.
Returns
----------
str
the unambiguous version of the rectangle
"""
return f'Rectangle(' \
f'x_center={self.x_center}, ' \
f'y_center={self.y_center}, ' \
f'radius={self.radius}, ' \
f'alpha_angle={self.alpha_angle}, ' \
f'beta_angle={self.beta_angle}' \
f')\n'
|
import argparse
import os
import time
import matplotlib.pyplot as plt
import torch
from torch.optim import SGD
from torchvision import utils
from utils import create_dataloader, YOLOv2Loss, parse_cfg, build_model
# from torchviz import make_dot
parser = argparse.ArgumentParser(description='YOLOv2-pytorch')
parser.add_argument("--cfg", "-c", default="cfg/yolov2.yaml", help="Yolov1 config file path", type=str)
parser.add_argument("--dataset_cfg", "-d", default="cfg/dataset.yaml", help="Dataset config file path", type=str)
parser.add_argument("--weights", "-w", default="", help="Pretrained model weights path", type=str)
parser.add_argument("--output", "-o", default="output", help="Output path", type=str)
parser.add_argument("--epochs", "-e", default=100, help="Training epochs", type=int)
parser.add_argument("--lr", "-lr", default=0.01, help="Training learning rate", type=float)
parser.add_argument("--batch_size", "-bs", default=32, help="Training batch size", type=int)
parser.add_argument("--save_freq", "-sf", default=10, help="Frequency of saving model checkpoint when training",
type=int)
args = parser.parse_args()
def train(model, train_loader, optimizer, epoch, device, S, B, train_loss_lst):
model.train() # Set the module in training mode
train_loss = 0
for batch_idx, (inputs, labels) in enumerate(train_loader):
t_start = time.time()
inputs, labels = inputs.to(device), labels.to(device)
outputs = model(inputs)
# back prop
criterion = YOLOv2Loss(S, B)
loss = criterion(outputs, labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
train_loss += loss.item()
t_batch = time.time() - t_start
# show batch0 dataset
if batch_idx == 0 and epoch == 0:
fig = plt.figure()
inputs = inputs.cpu() # convert to cpu
grid = utils.make_grid(inputs)
plt.imshow(grid.numpy().transpose((1, 2, 0)))
plt.savefig(os.path.join(output_path, 'batch0.png'))
# plt.show()
plt.close(fig)
# print loss and accuracy
if batch_idx % 10 == 0:
print('Train Epoch: {} [{}/{} ({:.1f}%)] Time: {:.4f}s Loss: {:.6f}'
.format(epoch, batch_idx * len(inputs), len(train_loader.dataset),
100. * batch_idx / len(train_loader), t_batch, loss.item()))
# record training loss
train_loss /= len(train_loader)
train_loss_lst.append(train_loss)
return train_loss_lst
def validate(model, val_loader, device, S, B, val_loss_lst):
model.eval() # Sets the module in evaluation mode
val_loss = 0
# no need to calculate gradients
with torch.no_grad():
for data, target in val_loader:
data, target = data.to(device), target.to(device)
output = model(data)
# add one batch loss
criterion = YOLOv2Loss(S, B)
val_loss += criterion(output, target).item()
val_loss /= len(val_loader)
print('\nVal set: Average loss: {:.4f}'.format(val_loss))
# record validating loss
val_loss_lst.append(val_loss)
return val_loss_lst
def test(model, test_loader, device, S, B):
model.eval() # Sets the module in evaluation mode
test_loss = 0
# no need to calculate gradients
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
# add one batch loss
criterion = YOLOv2Loss(S, B)
test_loss += criterion(output, target).item()
# record testing loss
test_loss /= len(test_loader)
print('Test set: Average loss: {:.4f}'.format(test_loss))
if __name__ == "__main__":
cfg = parse_cfg(args.cfg)
dataset_cfg = parse_cfg(args.dataset_cfg)
img_path, label_path = dataset_cfg['images'], dataset_cfg['labels']
S, num_anchors, num_classes, input_size = cfg['S'], cfg['num_anchors'], cfg['num_classes'], cfg['input_size']
# create output file folder
start = time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))
output_path = os.path.join(args.output, start)
os.makedirs(output_path)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# build model
model = build_model(args.weights, S, num_anchors, num_classes).to(device)
# plot model structure
# graph = make_dot(model(torch.rand(1, 3, args.input_size, args.input_size).cuda()),
# params=dict(model.named_parameters()))
# graph.render('model_structure', './', cleanup=True, format='png')
# get data loader
train_loader, val_loader, test_loader = create_dataloader(img_path, label_path, 0.8, 0.1, 0.1, args.batch_size,
input_size, S, num_anchors, num_classes)
optimizer = SGD(model.parameters(), lr=args.lr, momentum=0.9, weight_decay=0.0005)
# optimizer = Adam(model.parameters(), lr=lr)
train_loss_lst, val_loss_lst = [], []
# train epoch
for epoch in range(args.epochs):
train_loss_lst = train(model, train_loader, optimizer, epoch, device, S, num_anchors, train_loss_lst)
val_loss_lst = validate(model, val_loader, device, S, num_anchors, val_loss_lst)
# save model weight every save_freq epoch
if epoch % args.save_freq == 0 and epoch >= args.epochs / 3:
torch.save(model.state_dict(), os.path.join(output_path, 'epoch' + str(epoch) + '.pth'))
test(model, test_loader, device, S, num_anchors)
# save model
torch.save(model.state_dict(), os.path.join(output_path, 'last.pth'))
# plot loss, save params change
fig = plt.figure()
plt.plot(range(args.epochs), train_loss_lst, 'g', label='train loss')
plt.plot(range(args.epochs), val_loss_lst, 'k', label='val loss')
plt.grid(True)
plt.xlabel('epoch')
plt.ylabel('acc-loss')
plt.legend(loc="upper right")
plt.savefig(os.path.join(output_path, 'loss_curve.jpg'))
plt.show()
plt.close(fig)
|
import string
import pytest
from hypothesis import assume, given
from hypothesis import strategies as st
from pybitcoin.mnemonic_code_words import MNEMONIC_CODE_WORDS
from pybitcoin.tests.wallet.fixtures import BIP_32_TEST_VECTORS
from pybitcoin.wallet import KeyStore, hmac_sha512, validate_mnemonic
@st.composite
def mnemonic_code_words(draw, num_words):
words = [draw(st.sampled_from(MNEMONIC_CODE_WORDS)) for _ in range(num_words)]
return ' '.join(words)
@given(
num_words=st.integers(max_value=100),
data=st.data(),
)
def test_validate_mnemonic_wrong_number_of_words(num_words, data):
assume(num_words not in (12, 15, 18, 21, 24))
words = data.draw(mnemonic_code_words(num_words=num_words))
with pytest.raises(ValueError, match='Invalid number of mnemonic keywords!'):
validate_mnemonic(words)
@given(
num_words=st.sampled_from([11, 14, 17, 20, 23]),
data=st.data(),
extra_word=st.text(
alphabet=st.characters(blacklist_categories=('Cs',), blacklist_characters=string.whitespace),
max_size=10,
),
)
def test_validate_mnemonic_wrong_words(num_words, data, extra_word):
assume(extra_word not in MNEMONIC_CODE_WORDS)
words = data.draw(mnemonic_code_words(num_words=num_words)) + f' {extra_word}'
with pytest.raises(ValueError, match='Invalid mnemonic keyword'):
validate_mnemonic(words)
def test_validate_mnemonic_invalid_checksum():
pass
@pytest.mark.parametrize(
'mnemonic',
[
'answer act aspect mansion report own orphan mixed leader gate siren there',
'educate magnet hub kidney trophy invite amused rival dream jaguar finish mechanic',
'thumb citizen system submit certain stairs diamond elephant remove butter edge also galaxy umbrella awesome state husband audit agent rotate pulp transfer path harbor',
'harbor bind butter advance erode enhance rough album photo mandate orbit order teach frown already mistake candy quality nasty split hen fresh agent syrup',
],
)
def test_validate_menmonic_ok_mnemonics(mnemonic):
validate_mnemonic(mnemonic)
@given(key=st.binary(), msg=st.binary())
def test_hmac_sha512(key, msg):
digest = hmac_sha512(key, msg)
assert len(digest) == 64
def test_hd_wallet_from_mnemonic():
pass
def test_hd_wallet_new_invalid_size_bits():
pass
def test_hd_wallet_new():
pass
@pytest.mark.parametrize('seed_hex,path,expected_pub, expected_priv', BIP_32_TEST_VECTORS)
def test_key_store_get_key(seed_hex, path, expected_pub, expected_priv):
seed = bytes.fromhex(seed_hex)
key_store = KeyStore(root_seed=seed)
derived_key = key_store.get_key(path)
derived_public_key = derived_key.generate_public_key()
assert derived_key.to_wif() == expected_priv
assert derived_public_key.to_wif() == expected_pub
|
import os
import shutil
import unittest
import mdl
class TestDownloader(unittest.TestCase):
def test_aria2(self):
self.assertIs(
mdl.downloader.downloaders['aria2'],
mdl.downloader.aria2.aria2c
)
class TestBiliVideo(unittest.TestCase):
def assertion(self, vid, videos=1, audios=1):
self.assertEqual(set(os.listdir(vid)), {'audio', 'video'})
self.assertGreaterEqual(len(os.listdir(f'{vid}/video')), videos)
self.assertGreaterEqual(len(os.listdir(f'{vid}/audio')), audios)
shutil.rmtree(vid)
def test_av(self):
mdl.download('av7')
self.assertion('av7')
def test_av_URLs(self):
for url in [
'https://www.bilibili.com/video/av7',
'https://www.bilibili.com/av7',
'https://bilibili.com/video/av7',
'http://www.bilibili.com/video/av7',
'http://bilibili.com/video/av7'
]:
self.assertTrue(mdl.extract(url))
def test_BV(self):
mdl.download('BV1XW411M7Gu')
self.assertion('BV1XW411M7Gu', 4, 2)
def test_BV_URLs(self):
for url in [
'https://www.bilibili.com/video/BV1xx411c7m9',
'https://www.bilibili.com/BV1xx411c7m9',
'https://bilibili.com/video/BV1xx411c7m9',
'http://www.bilibili.com/video/BV1xx411c7m9',
'http://bilibili.com/video/BV1xx411c7m9'
]:
self.assertTrue(mdl.extract(url))
def test_aria2(self):
mdl.download('av7', 'aria2')
self.assertion('av7')
def test_aria2c(self):
mdl.download('av7', mdl.downloader.aria2.aria2c)
self.assertion('av7')
class TestKugouMusic(unittest.TestCase):
def test_aria2(self):
mdl.download(
'https://www.kugou.com/song/#hash='
'5B40301A02B4A431F8CEEACBC58C50F0'
)
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-20 13:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('photologue', '0010_auto_20160105_1307'),
]
operations = [
migrations.AlterField(
model_name='photoeffect',
name='filters',
field=models.CharField(blank=True, help_text='Chain multiple filters using the following pattern "FILTER_ONE->FILTER_TWO->FILTER_THREE". Image filters will be applied in order. The following filters are available: BLUR, CONTOUR, DETAIL, EDGE_ENHANCE, EDGE_ENHANCE_MORE, EMBOSS, FIND_EDGES, Kernel, SHARPEN, SMOOTH, SMOOTH_MORE.', max_length=200, verbose_name='filters'),
),
]
|
## This file is the celeryconfig for the Periodic Task Handler on scanmaster.
import sys
sys.path.append('.')
import djcelery
djcelery.setup_loader()
from scaggr.settings import *
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_IMPORTS = ('virusscan.tasks',)
from virusscan.tasks import get_periodic_queues
CELERY_QUEUES = get_periodic_queues()
|
"""Functional tests using the API with a fake Apple TV."""
import pyatv
import ipaddress
import asynctest
from pyatv.conf import AppleTV
from tests import zeroconf_stub
HOMESHARING_SERVICE_1 = zeroconf_stub.homesharing_service(
'AAAA', b'Apple TV 1', '10.0.0.1', b'aaaa')
HOMESHARING_SERVICE_2 = zeroconf_stub.homesharing_service(
'BBBB', b'Apple TV 2', '10.0.0.2', b'bbbb')
HOMESHARING_SERVICE_3 = zeroconf_stub.homesharing_service(
'CCCC', b'Apple TV 3', '10.0.0.3', b'cccc')
DEVICE_SERVICE_1 = zeroconf_stub.device_service(
'CCCC', b'Apple TV 3', '10.0.0.3')
MRP_SERVICE_1 = zeroconf_stub.mrp_service(
'DDDD', b'Apple TV 4', '10.0.0.4')
MRP_SERVICE_2 = zeroconf_stub.mrp_service(
'EEEE', b'Apple TV 5', '10.0.0.5')
AIRPLAY_SERVICE_1 = zeroconf_stub.airplay_service(
'Apple TV 6', '10.0.0.6')
class FunctionalTest(asynctest.TestCase):
async def test_scan_no_device_found(self):
zeroconf_stub.stub(pyatv)
atvs = await pyatv.scan_for_apple_tvs(self.loop, timeout=0)
self.assertEqual(len(atvs), 0)
async def test_scan_for_apple_tvs(self):
zeroconf_stub.stub(
pyatv, HOMESHARING_SERVICE_1, HOMESHARING_SERVICE_2,
MRP_SERVICE_1, AIRPLAY_SERVICE_1)
atvs = await pyatv.scan_for_apple_tvs(self.loop, timeout=0)
self.assertEqual(len(atvs), 3)
# First device
dev1 = AppleTV(ipaddress.ip_address('10.0.0.1'), 'Apple TV 1')
self.assertIn(dev1, atvs)
# Second device
dev2 = AppleTV(ipaddress.ip_address('10.0.0.2'), 'Apple TV 2')
self.assertIn(dev2, atvs)
# Third device
dev3 = AppleTV(ipaddress.ip_address('10.0.0.4'), 'Apple TV 4')
self.assertIn(dev3, atvs)
async def test_scan_abort_on_first_found(self):
zeroconf_stub.stub(pyatv, HOMESHARING_SERVICE_1, HOMESHARING_SERVICE_2)
atvs = await pyatv.scan_for_apple_tvs(
self.loop, timeout=0, abort_on_found=True)
self.assertEqual(len(atvs), 1)
self.assertEqual(atvs[0].name, 'Apple TV 1')
async def test_scan_all_devices(self):
zeroconf_stub.stub(pyatv, DEVICE_SERVICE_1)
atvs = await pyatv.scan_for_apple_tvs(
self.loop, timeout=0, only_usable=False)
self.assertEqual(len(atvs), 1)
self.assertEqual(atvs[0].name, 'Apple TV 3')
self.assertEqual(atvs[0].address, ipaddress.ip_address('10.0.0.3'))
services = atvs[0].services()
self.assertEqual(len(services), 1)
service = services[0]
self.assertEqual(service.port, 3689)
async def test_scan_home_sharing_overrules(self):
zeroconf_stub.stub(pyatv, DEVICE_SERVICE_1, HOMESHARING_SERVICE_3)
atvs = await pyatv.scan_for_apple_tvs(self.loop, timeout=0)
self.assertEqual(len(atvs), 1)
self.assertEqual(atvs[0].name, 'Apple TV 3')
self.assertEqual(atvs[0].address, ipaddress.ip_address('10.0.0.3'))
service = atvs[0].usable_service()
self.assertEqual(service.device_credentials, 'cccc')
self.assertEqual(service.port, 3689)
async def test_scan_mrp(self):
zeroconf_stub.stub(pyatv, MRP_SERVICE_1, MRP_SERVICE_2)
atvs = await pyatv.scan_for_apple_tvs(
self.loop, only_usable=False, timeout=0)
self.assertEqual(len(atvs), 2)
dev1 = AppleTV(ipaddress.ip_address('10.0.0.4'), 'Apple TV 4')
self.assertIn(dev1, atvs)
dev2 = AppleTV(ipaddress.ip_address('10.0.0.5'), 'Apple TV 5')
self.assertIn(dev2, atvs)
async def test_scan_airplay_device(self):
zeroconf_stub.stub(pyatv, AIRPLAY_SERVICE_1)
atvs = await pyatv.scan_for_apple_tvs(
self.loop, timeout=0, only_usable=False)
self.assertEqual(len(atvs), 1)
self.assertEqual(atvs[0].name, 'Apple TV 6')
self.assertEqual(atvs[0].address, ipaddress.ip_address('10.0.0.6'))
services = atvs[0].services()
self.assertEqual(len(services), 1)
service = services[0]
self.assertEqual(service.port, 7000)
async def test_scan_for_particular_device(self):
zeroconf_stub.stub(pyatv, HOMESHARING_SERVICE_1, HOMESHARING_SERVICE_2)
atvs = await pyatv.scan_for_apple_tvs(
self.loop, timeout=0, only_usable=False, device_ip='10.0.0.2')
self.assertEqual(len(atvs), 1)
self.assertEqual(atvs[0].name, 'Apple TV 2')
self.assertEqual(atvs[0].address, ipaddress.ip_address('10.0.0.2'))
|
import os
import json
import requests
from base64 import b64encode, b64decode
from collections import OrderedDict
from Crypto.PublicKey import RSA
from Crypto.Util import number
from datetime import datetime
from jwt import JWT, jwk_from_dict
from OpenSSL import crypto
from suds.client import Client
from xml.dom import minidom
class SystemUserToken:
"""
Class used to exchange a system user token for a system user Ticket.
A system user Ticket is one of the accepted credential types when calling
SuperOffice web services.
Attributes
----------
app_token : str
the application secret, or token.
private_key_file : str
the private certificate key filename.
environment : str
the subdomain used to determine the deployment target, i.e. sod, stage
or online.
Methods
-------
get_system_user_ticket(sys_token, context_id)
uses the system user token to get the system user ticket credential
for spefified tenant (context_id).
"""
def get_long_int(self, nodelist):
"""converts contents of element as long int"""
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
string = ''.join(rc)
return number.bytes_to_long(b64decode(string))
#
# gets the PEM cert from from Private RSA Key XML file.
#
def get_rsa_as_pem_content(self, file_name):
'''returns a PEM from from Private RSA Key XML file.'''
with open(file_name, 'rb') as pkFile:
xmlPrivateKey = pkFile.read()
rsaKeyValue = minidom.parseString(xmlPrivateKey)
modulus = self.get_long_int(rsaKeyValue.getElementsByTagName(
'Modulus')[0].childNodes)
exponent = self.get_long_int(rsaKeyValue.getElementsByTagName(
'Exponent')[0].childNodes)
d = self.get_long_int(
rsaKeyValue.getElementsByTagName('D')[0].childNodes)
p = self.get_long_int(
rsaKeyValue.getElementsByTagName('P')[0].childNodes)
q = self.get_long_int(
rsaKeyValue.getElementsByTagName('Q')[0].childNodes)
qInv = self.get_long_int(rsaKeyValue.getElementsByTagName(
'InverseQ')[0].childNodes)
privateKey = RSA.construct((modulus, exponent, d, p, q, qInv), False)
pemKey = privateKey.exportKey()
return pemKey.decode('utf-8')
def get_pem_content(self, file_name):
'''returns contents from private RSA XML file.'''
with open(file_name, 'rb') as pkFile:
rsaFileContent = pkFile.read()
return rsaFileContent.decode('utf-8')
def get_private_key(self, file_name):
if str(file_name).endswith('.xml'):
return self.get_rsa_as_pem_content(file_name)
elif str(file_name).endswith('.pem'):
return self.get_pem_content(file_name)
def __init__(self, app_token, private_key_file, environment='online'):
self.application_token = app_token
self.private_key = self.get_private_key(private_key_file)
print(self.private_key)
self.environment = environment
self.login_endpoint = 'login/Services/PartnerSystemUserService.svc'
self.wsdl_path = os.path.join(os.path.dirname(
os.path.realpath(__file__)), 'PartnerSystemUserService.wsdl')
def get_system_user_ticket(self, sys_token, context_id):
"""
returns Ticket credential string.
Attributes
----------
sys_token : str
the tenant-specific system user token.
context_id : str
the customer identifier, i.e. Cust12345.
"""
time_utc = datetime.utcnow()
time_formatted = datetime.strftime(time_utc, "%Y%m%d%H%M")
system_token = sys_token + '.' + time_formatted
key = crypto.load_privatekey(crypto.FILETYPE_PEM, self.private_key)
signature = crypto.sign(key, system_token, 'sha256')
signed_system_token = system_token + "." + \
b64encode(signature).decode('UTF-8')
headers = OrderedDict([
('ApplicationToken', self.application_token),
('ContextIdentifier', context_id)
])
client = Client('file:%s' % self.wsdl_path)
client.set_options(soapheaders=headers)
client.set_options(
location='https://{env}.superoffice.com/{endpoint}'.format(
env=self.environment, endpoint=self.login_endpoint))
token_type = client.factory.create('TokenType')['Jwt']
response = client.service.Authenticate(signed_system_token, token_type)
if response.IsSuccessful == True:
jwt_token = response.Token
print('Reponse: ' + str(response))
jwt = JWT()
jwksResponse = requests.get(
'https://{env}.superoffice.com/login/.well-known/jwks'.format(
env=self.environment))
jwks = json.loads(jwksResponse.text)
verifying_key = jwk_from_dict(jwks['keys'][0])
message_received = jwt.decode(jwt_token, verifying_key)
return str(message_received['http://schemes.superoffice.net/identity/ticket'])
return 'Failed!'
|
from typing import Any, Literal, Sequence, Union, TypedDict
class BeginEvent(TypedDict):
event: Literal["begin"]
xid: int
class ChangeEvent(TypedDict):
event: Literal["change"]
xid: int
timestamp: str
schema: str
table: str
class InsertEvent(ChangeEvent):
kind: Literal["insert"]
columnnames: Sequence[str]
columntypes: Sequence[str]
columnvalues: Sequence[Any]
class OldKeys(TypedDict):
keynames: Sequence[str]
keytypes: Sequence[str]
keyvalues: Sequence[Any]
class UpdateEvent(ChangeEvent):
kind: Literal["update"]
columnnames: Sequence[str]
columntypes: Sequence[str]
columnvalues: Sequence[Any]
oldkeys: OldKeys
class DeleteEvent(ChangeEvent):
kind: Literal["delete"]
oldkeys: OldKeys
class CommitEvent(TypedDict):
event: Literal["commit"]
Event = Union[
BeginEvent, InsertEvent, UpdateEvent, DeleteEvent, CommitEvent,
]
|
from pywps import Service
from pywps.tests import assert_response_success
from .common import client_for
from c3s_magic_wps.processes import processes
def test_wps_caps():
client = client_for(Service(processes=processes))
resp = client.get(service='wps', request='getcapabilities', version='1.0.0')
names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier')
# 'consecdrydays',
# 'shape_select',
# 'perfmetrics',
expected_caps = sorted([
'blocking',
'capacity_factor',
'combined_indices',
'consecdrydays',
'cvdp',
'diurnal_temperature_index',
'drought_indicator',
'ensclus',
'extreme_index',
'heatwaves_coldwaves',
'modes_of_variability',
'multimodel_products',
'preproc',
'shapefile_selection',
# 'sleep',
'teleconnections',
'weather_regimes',
'zmnam',
'toymodel',
'rainfarm',
'quantile_bias',
'hyint',
'smpi',
'perfmetrics',
'extreme_events',
'meta',
])
print(sorted(names.split()))
print(expected_caps)
assert sorted(names.split()) == expected_caps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, division, print_function
import requests
import sys
import json
import os
import codecs
import logging
import logging.handlers
# Some constants
BASEURL = None
# Format the logger output
class CustomFormatter(logging.Formatter):
"""Custom formatter, overrides funcName with value of funcname if it
exists
"""
def format(self, record):
if hasattr(record, 'funcname'):
record.funcName = record.funcname
return super(CustomFormatter, self).format(record)
# Editor testing logging
LOGNAME = "DOWNLOADER"
LOGFNAME = "downloader.log"
LOGLEVEL = logging.DEBUG
try:
fmt = "%(asctime)s [%(levelname)s] %(name)s in %(funcName)s(): %(message)s"
LOG = logging.getLogger(LOGNAME)
formatter = CustomFormatter(fmt)
ofstream = logging.handlers.TimedRotatingFileHandler(LOGFNAME, when="D", interval=1, encoding="utf-8")
ofstream.setFormatter(formatter)
LOG.addHandler(ofstream)
LOG.setLevel(LOGLEVEL)
except Exception as e:
print("FATAL ERROR: Could not create logging instance: {}".format(e), file=sys.stderr)
sys.exit(1)
USERS = None
class Downloader:
def __init__(self, config):
self.user_password = config['user_password']
self.user_token = None
self.project_id = None
self.project_user = config['project_user']
self.project_token = None
def login_project(self):
"""
Login as user
Place user 'token' in self.user_token
"""
if self.project_token is None:
LOG.info("{} logging in".format(self.project_user))
headers = {"Content-Type" : "application/json"}
data = {"username": self.project_user, "password": self.user_password, "role" : 'project'}
res = requests.post(BASEURL + "projects/login", headers=headers, data=json.dumps(data))
LOG.info('login(): SERVER SAYS:{}'.format(res.text))
pkg = res.json()
self.project_token = pkg['token']
else:
LOG.info("User logged in already!")
def logout_project(self):
"""
Logout as user
"""
if self.project_token is not None:
headers = {"Content-Type" : "application/json"}
data = {"token": self.project_token}
res = requests.post(BASEURL + "projects/logout", headers=headers, data=json.dumps(data))
LOG.info('logout(): SERVER SAYS:{}'.format(res.text))
self.project_token = None
else:
LOG.info("User not logged in!")
def listprojects(self):
"""
"""
if self.project_token is not None:
LOG.info("Listing projects")
headers = {"Content-Type" : "application/json"}
data = {"token": self.project_token }
res = requests.post(BASEURL + "projects/listprojects", headers=headers, data=json.dumps(data))
LOG.info('loadproject(): SERVER SAYS:{}'.format(res.text))
LOG.info(res.status_code)
pkg = res.json()
self.project_info = pkg['projects']
else:
LOG.info("User not logged in!")
def login(self, user):
"""
Login as user
Place user 'token' in self.user_token
"""
if self.user_token is None:
LOG.info("{} logging in".format(user))
headers = {"Content-Type" : "application/json"}
data = {"username": user, "password": self.user_password, "role" : 'editor'}
res = requests.post(BASEURL + "editor/login", headers=headers, data=json.dumps(data))
LOG.info('login(): SERVER SAYS:{}'.format(res.text))
pkg = res.json()
self.user_token = pkg['token']
self.username = user
else:
LOG.info("User logged in already!")
def logout(self):
"""
Logout as user
"""
if self.user_token is not None:
headers = {"Content-Type" : "application/json"}
data = {"token": self.user_token}
res = requests.post(BASEURL + "editor/logout", headers=headers, data=json.dumps(data))
LOG.info('logout(): SERVER SAYS:{}'.format(res.text))
self.user_token = None
else:
LOG.info("User not logged in!")
def loadtasks(self):
"""
Load all tasks belonging to neil
"""
LOG.info("username={}: loadtasks(): Entering".format(self.username))
if self.user_token is not None:
headers = {"Content-Type" : "application/json"}
data = {"token": self.user_token}
res = requests.post(BASEURL + "editor/loadtasks", headers=headers, data=json.dumps(data))
print('SERVER SAYS:', res.text)
print(res.status_code)
pkg = res.json()
if len(pkg['collator']) > 0:
self.all_tasks = pkg["collator"]
else:
print('No tasks to select')
self.all_tasks = []
LOG.info("username={}: loadtasks(): No tasks to select!".format(self.username))
else:
print("User not logged in!")
LOG.error("username={}: loadtasks(): User not logged in!".format(self.username))
print('')
def gettext(self, projectid, taskid):
"""
Return the task's text
"""
LOG.info("username={}: gettext(): Entering".format(self.username))
if self.user_token is not None:
headers = {"Content-Type" : "application/json"}
data = {'token' : self.user_token, 'projectid' : projectid, 'taskid' : taskid}
res = requests.post(BASEURL + "editor/gettext", headers=headers, data=json.dumps(data))
LOG.info("username={}: gettext(): {}".format(self.username, res.text))
print(res.status_code)
pkg = res.json()
return pkg['text']
else:
print("User not logged in!")
LOG.error("username={}: gettext(): User not logged in!".format(self.username))
print('')
if __name__ == "__main__":
if not os.path.exists('config.json'):
print('ERROR: cannot load config.json file in current folder')
sys.exit(1)
config = json.load(open('config.json'))
USERS = config['downloader']['USERS']
BASEURL = config['baseurl']
downloader = Downloader(config)
downloader.login_project()
downloader.listprojects()
downloader.logout_project()
project_info = {}
for project in downloader.project_info:
project_info[project['projectid']] = project['projectname']
file = codecs.open("transcriptions.txt", "w", "utf-8")
for user in USERS:
downloader.login(user)
downloader.loadtasks()
for task in downloader.all_tasks:
text = downloader.gettext(task['projectid'], task['taskid'])
projectname = project_info[task['projectid']]
file.write("{};{}\n".format(projectname, text))
downloader.logout()
file.close()
|
import keccak_hash
from binascii import unhexlify, hexlify
import unittest
import sys
# smartcash block #1
# rc125@ubuntu:~/.smartcash$ smartcashd getblockhash 1
# 00000009c4e61bee0e8d6236f847bb1dd23f4c61ca5240b74852184c9bf98c30
# rc125@ubuntu:~/.smartcash$ smartcashd getblock 00000009c4e61bee0e8d6236f847bb1dd23f4c61ca5240b74852184c9bf98c30
# {
# "hash": "00000009c4e61bee0e8d6236f847bb1dd23f4c61ca5240b74852184c9bf98c30",
# "confirmations": 172736,
# "strippedsize": 356,
# "size": 356,
# "weight": 1424,
# "height": 1,
# "version": 2,
# "versionHex": "00000002",
# "merkleroot": "a68bf0e348915b09d7da1d8dae05fb04d9016d06d5d964c4cc85dab8f6b032e9",
# "tx": [
# "a68bf0e348915b09d7da1d8dae05fb04d9016d06d5d964c4cc85dab8f6b032e9"
# ],
# "time": 1499790268,
# "mediantime": 1499790268,
# "nonce": 146506294,
# "bits": "1e0ffff0",
# "difficulty": 0.000244140625,
# "chainwork": "0000000000000000000000000000000000000000000000000000000000200020",
# "previousblockhash": "000007acc6970b812948d14ea5a0a13db0fdd07d5047c7e69101fa8b361e05a4",
# "nextblockhash": "00000001d83bf07ff4faddf97a5e68e760f012d6526126b2668aea29bd23bd09"
# }
header_hex = ("02000000" +
"a4051e368bfa0191e6c747507dd0fdb03da1a0a54ed14829810b97c6ac070000" +
"e932b0f6b8da85ccc464d9d5066d01d904fb05ae8d1ddad7095b9148e3f08ba6"
"bcfb6459" +
"f0ff0f1e" +
"3682bb08")
best_hash = '308cf99b4c185248b74052ca614c3fd21dbb47f836628d0eee1be6c409000000'
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.block_header = unhexlify(header_hex)
self.best_hash = best_hash
def test_keccak_hash(self):
self.pow_hash = keccak_hash.getPoWHash(self.block_header)
self.pow_hash = hexlify(self.pow_hash)
self.assertEqual(self.pow_hash, self.best_hash)
if __name__ == '__main__':
unittest.main()
|
"""
LINK: https://leetcode.com/problems/pascals-triangle-ii/
Given an integer rowIndex, return the rowIndexth row of the Pascal's triangle.
Notice that the row index starts from 0.
In Pascal's triangle, each number is the sum of the two numbers directly above it.
Follow up:
Could you optimize your algorithm to use only O(k) extra space?
Example 1:
Input: rowIndex = 3
Output: [1,3,3,1]
Example 2:
Input: rowIndex = 0
Output: [1]
Example 3:
Input: rowIndex = 1
Output: [1,1]
Constraints:
0 <= rowIndex <= 33
"""
def getRow(rowIndex):
if not rowIndex:
return [1]
elif rowIndex==1:
return [1,1]
layer = [1,1]
for row in range(rowIndex-1):
new = [1]
for index, current in enumerate(layer[1:], 1):
previous = layer[index-1]
new.append(current+previous)
new.append(1)
layer = new
return layer
|
#!/usr/bin/env python
import roslib; roslib.load_manifest('rosprolog')
import rospy
from rosprolog_client import PrologException, Prolog
if __name__ == '__main__':
rospy.init_node('example_query_prolog_kb')
prolog = Prolog()
query = prolog.query("perishable(X), location(X,Y,_)")
for solution in query.solutions():
print 'Found solution. Product: %s, Location: %s' % (solution['X'], solution['Y'])
query.finish()
|
# We are given a three data sets represented as arrays: A, B and C. Find algorithm that determines
# if there is a triplet a,b and c respectively from A, B and C that a + b = c.
# 1st solution:
def partition(T, l, r):
pivot = T[r]
i = l - 1
for j in range(l, r):
if T[j] <= pivot:
i += 1
T[i], T[j] = T[j], T[i]
T[i + 1], T[r] = T[r], T[i + 1]
return i + 1
def quicksort(T, l, r):
while l < r:
q = partition(T, l, r)
if q - l <= r - q:
quicksort(T, l, q - 1)
l = q + 1
else:
quicksort(T, q, r)
r = q - 1
def is_sum_equal_a_and_b_sort(A, B, C):
quicksort(A, 0, len(A) - 1)
quicksort(B, 0, len(B) - 1)
for i in range(len(C)):
a = 0
b = len(B) - 1
while a < len(A) and b >= 0:
if A[a] + B[b] == C[i]:
return True
elif A[a] + B[b] > C[i]:
b -= 1
else:
a += 1
return False
# 2nd solution:
def binary_search(T, l, r, x):
if r >= l:
mid = (l + r) // 2
if T[mid] == x:
return True
elif T[mid] > x:
return binary_search(T, l, mid - 1, x)
else:
return binary_search(T, mid + 1, r, x)
return False
def is_sum_equal_c_sort(A, B, C):
quicksort(C, 0, len(C) - 1)
for i in range(len(A)):
for j in range(len(B)):
summary = A[i] + B[j]
if binary_search(C, 0, len(C) - 1, summary):
return True
return False
A = [3, 2, 56, 3, 11, 78, 5, 9]
B = [34, 7, 3, 5, 7, 2, 56, 8]
C = [53, 3, 79, 19, 65, 89, 1]
print(is_sum_equal_a_and_b_sort(A, B, C))
A = [3, 16, 7, 8, 19, 2, 11, 26]
B = [14, 5, 22, 4, 8, 16, 3, 5, 2]
C = [3, 73, 26, 37, 49, 52]
print(is_sum_equal_c_sort(A, B, C))
|
import argparse
from pathlib import Path
INDENT = ' ' * 4
ELEMENTS_PER_LINE = 8
def main():
parser = argparse.ArgumentParser(
description="""
bla bla bla
""".strip(),)
parser.add_argument('filenames', type=str, help='filename help', nargs='+')
args = parser.parse_args()
for filename in (Path(x) for x in args.filenames):
create_header(filename)
out_file = filename.parent.joinpath(filename.stem + '.h')
def create_header(path: Path):
array_name = f"{path.stem}"
guard_name = f"{path.stem.upper()}_H"
body = []
body.append(f"""
#ifndef {guard_name}
#define {guard_name}
const unsigned char {array_name}[] = {{
""".strip())
with open('sprite.2bpp', 'rb') as f:
data = f.read()
data = range(255)
i = 0
while i < len(data):
line = []
for j in range(ELEMENTS_PER_LINE):
offset = i + j
if offset < len(data):
c = data[offset]
line.append(f"0x{c:02X}")
if offset < len(data) -1:
line.append(', ')
body.append(INDENT + ''.join(line))
i += j + 1
body.append(f"""
}};
#endif
""".strip() + "\n")
print('\n'.join(body))
if __name__ == '__main__':
main()
|
#Tom Mirrigton
#Exercise 6 submission
#define the function factorial
#set y to 1 or for loop will just multiply through 0
def fact(x):
y = 1
#for loop using a range of all values up to and including x
#iterating though the for loop will multiply i value by cumaltive y value
for i in range (1, x + 1):
y = y * i
return y
#test function using specified values
m = 5
n = 7
p = 10
print(fact(m))
print(fact(n))
print(fact(p))
#References
#GMIT 52167 Programming and Scripting course material https://learnonline.gmit.ie/course/view.php?id=3940
#The Python Tutorial https://docs.python.org/3/tutorial/
|
"""Class to perform under-sampling based on nearmiss methods."""
# Authors: Guillaume Lemaitre <g.lemaitre58@gmail.com>
# Christos Aridas
# License: MIT
import warnings
from collections import Counter
import numpy as np
from sklearn.utils import safe_indexing
from ..base import BaseUnderSampler
from ...utils import check_neighbors_object
from ...utils import Substitution
from ...utils.deprecation import deprecate_parameter
from ...utils._docstring import _random_state_docstring
@Substitution(
sampling_strategy=BaseUnderSampler._sampling_strategy_docstring,
random_state=_random_state_docstring)
class NearMiss(BaseUnderSampler):
"""Class to perform under-sampling based on NearMiss methods.
Read more in the :ref:`User Guide <controlled_under_sampling>`.
Parameters
----------
{sampling_strategy}
return_indices : bool, optional (default=False)
Whether or not to return the indices of the samples randomly
selected from the majority class.
.. deprecated:: 0.4
``return_indices`` is deprecated. Use the attribute
``sample_indices_`` instead.
{random_state}
.. deprecated:: 0.4
``random_state`` is deprecated in 0.4 and will be removed in 0.6.
version : int, optional (default=1)
Version of the NearMiss to use. Possible values are 1, 2 or 3.
n_neighbors : int or object, optional (default=3)
If ``int``, size of the neighbourhood to consider to compute the
average distance to the minority point samples. If object, an
estimator that inherits from
:class:`sklearn.neighbors.base.KNeighborsMixin` that will be used to
find the k_neighbors.
n_neighbors_ver3 : int or object, optional (default=3)
If ``int``, NearMiss-3 algorithm start by a phase of re-sampling. This
parameter correspond to the number of neighbours selected create the
subset in which the selection will be performed. If object, an
estimator that inherits from
:class:`sklearn.neighbors.base.KNeighborsMixin` that will be used to
find the k_neighbors.
n_jobs : int, optional (default=1)
The number of threads to open if possible.
ratio : str, dict, or callable
.. deprecated:: 0.4
Use the parameter ``sampling_strategy`` instead. It will be removed
in 0.6.
Attributes
----------
sample_indices_ : ndarray, shape (n_new_samples)
Indices of the samples selected.
.. versionadded:: 0.4
``sample_indices_`` used instead of ``return_indices=True``.
Notes
-----
The methods are based on [1]_.
Supports multi-class resampling.
References
----------
.. [1] I. Mani, I. Zhang. "kNN approach to unbalanced data distributions:
a case study involving information extraction," In Proceedings of
workshop on learning from imbalanced datasets, 2003.
Examples
--------
>>> from collections import Counter
>>> from sklearn.datasets import make_classification
>>> from imblearn.under_sampling import \
NearMiss # doctest: +NORMALIZE_WHITESPACE
>>> X, y = make_classification(n_classes=2, class_sep=2,
... weights=[0.1, 0.9], n_informative=3, n_redundant=1, flip_y=0,
... n_features=20, n_clusters_per_class=1, n_samples=1000, random_state=10)
>>> print('Original dataset shape %s' % Counter(y))
Original dataset shape Counter({{1: 900, 0: 100}})
>>> nm = NearMiss()
>>> X_res, y_res = nm.fit_resample(X, y)
>>> print('Resampled dataset shape %s' % Counter(y_res))
Resampled dataset shape Counter({{0: 100, 1: 100}})
"""
def __init__(self,
sampling_strategy='auto',
return_indices=False,
random_state=None,
version=1,
n_neighbors=3,
n_neighbors_ver3=3,
n_jobs=1,
ratio=None):
super().__init__(
sampling_strategy=sampling_strategy, ratio=ratio)
self.random_state = random_state
self.return_indices = return_indices
self.version = version
self.n_neighbors = n_neighbors
self.n_neighbors_ver3 = n_neighbors_ver3
self.n_jobs = n_jobs
def _selection_dist_based(self,
X,
y,
dist_vec,
num_samples,
key,
sel_strategy='nearest'):
"""Select the appropriate samples depending of the strategy selected.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Original samples.
y : array-like, shape (n_samples,)
Associated label to X.
dist_vec : ndarray, shape (n_samples, )
The distance matrix to the nearest neigbour.
num_samples: int
The desired number of samples to select.
key : str or int,
The target class.
sel_strategy : str, optional (default='nearest')
Strategy to select the samples. Either 'nearest' or 'farthest'
Returns
-------
idx_sel : ndarray, shape (num_samples,)
The list of the indices of the selected samples.
"""
# Compute the distance considering the farthest neighbour
dist_avg_vec = np.sum(dist_vec[:, -self.nn_.n_neighbors:], axis=1)
target_class_indices = np.flatnonzero(y == key)
if (dist_vec.shape[0] != safe_indexing(X,
target_class_indices).shape[0]):
raise RuntimeError('The samples to be selected do not correspond'
' to the distance matrix given. Ensure that'
' both `X[y == key]` and `dist_vec` are'
' related.')
# Sort the list of distance and get the index
if sel_strategy == 'nearest':
sort_way = False
elif sel_strategy == 'farthest':
sort_way = True
else:
raise NotImplementedError
sorted_idx = sorted(
range(len(dist_avg_vec)),
key=dist_avg_vec.__getitem__,
reverse=sort_way)
# Throw a warning to tell the user that we did not have enough samples
# to select and that we just select everything
if len(sorted_idx) < num_samples:
warnings.warn('The number of the samples to be selected is larger'
' than the number of samples available. The'
' balancing ratio cannot be ensure and all samples'
' will be returned.')
# Select the desired number of samples
return sorted_idx[:num_samples]
def _validate_estimator(self):
"""Private function to create the NN estimator"""
# check for deprecated random_state
if self.random_state is not None:
deprecate_parameter(self, '0.4', 'random_state')
self.nn_ = check_neighbors_object('n_neighbors', self.n_neighbors)
self.nn_.set_params(**{'n_jobs': self.n_jobs})
if self.version == 3:
self.nn_ver3_ = check_neighbors_object('n_neighbors_ver3',
self.n_neighbors_ver3)
self.nn_ver3_.set_params(**{'n_jobs': self.n_jobs})
if self.version not in (1, 2, 3):
raise ValueError('Parameter `version` must be 1, 2 or 3, got'
' {}'.format(self.version))
def _fit_resample(self, X, y):
if self.return_indices:
deprecate_parameter(self, '0.4', 'return_indices',
'sample_indices_')
self._validate_estimator()
idx_under = np.empty((0, ), dtype=int)
target_stats = Counter(y)
class_minority = min(target_stats, key=target_stats.get)
minority_class_indices = np.flatnonzero(y == class_minority)
self.nn_.fit(safe_indexing(X, minority_class_indices))
for target_class in np.unique(y):
if target_class in self.sampling_strategy_.keys():
n_samples = self.sampling_strategy_[target_class]
target_class_indices = np.flatnonzero(y == target_class)
X_class = safe_indexing(X, target_class_indices)
y_class = safe_indexing(y, target_class_indices)
if self.version == 1:
dist_vec, idx_vec = self.nn_.kneighbors(
X_class, n_neighbors=self.nn_.n_neighbors)
index_target_class = self._selection_dist_based(
X,
y,
dist_vec,
n_samples,
target_class,
sel_strategy='nearest')
elif self.version == 2:
dist_vec, idx_vec = self.nn_.kneighbors(
X_class, n_neighbors=target_stats[class_minority])
index_target_class = self._selection_dist_based(
X,
y,
dist_vec,
n_samples,
target_class,
sel_strategy='nearest')
elif self.version == 3:
self.nn_ver3_.fit(X_class)
dist_vec, idx_vec = self.nn_ver3_.kneighbors(
safe_indexing(X, minority_class_indices))
idx_vec_farthest = np.unique(idx_vec.reshape(-1))
X_class_selected = safe_indexing(X_class, idx_vec_farthest)
y_class_selected = safe_indexing(y_class, idx_vec_farthest)
dist_vec, idx_vec = self.nn_.kneighbors(
X_class_selected, n_neighbors=self.nn_.n_neighbors)
index_target_class = self._selection_dist_based(
X_class_selected,
y_class_selected,
dist_vec,
n_samples,
target_class,
sel_strategy='farthest')
# idx_tmp is relative to the feature selected in the
# previous step and we need to find the indirection
index_target_class = idx_vec_farthest[index_target_class]
else:
index_target_class = slice(None)
idx_under = np.concatenate(
(idx_under,
np.flatnonzero(y == target_class)[index_target_class]),
axis=0)
self.sample_indices_ = idx_under
if self.return_indices:
return (safe_indexing(X, idx_under), safe_indexing(y, idx_under),
idx_under)
return safe_indexing(X, idx_under), safe_indexing(y, idx_under)
def _more_tags(self):
return {'sample_indices': True}
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from netaddr import *
def merge(dbag, data):
# A duplicate ip address wil clobber the old value
# This seems desirable ....
if "add" in data and data['add'] is False and "ipv4_address" in data:
if data['ipv4_address'] in dbag:
del(dbag[data['ipv4_address']])
else:
remove_keys = set()
for key, entry in dbag.iteritems():
if key != 'id' and entry['mac_address'] == data['mac_address']:
remove_keys.add(key)
break
for remove_key in remove_keys:
del(dbag[remove_key])
dbag[data['ipv4_address']] = data
return dbag
|
import html
import re
from typing import Any, Callable, Dict, List, Optional, Tuple
from urllib.parse import quote, unquote, urlencode
from mwparserfromhell.wikicode import Wikicode
from mwcomposerfromhell.magic_words import MAGIC_WORDS, MagicWord
# A parser function is a callable which takes two parameters (param and context)
# and returns a string to replace itself with.
Context = List[Tuple[str, str, bool]]
ParentContext = Dict[str, str]
ParserFunction = Callable[[str, Context, ParentContext], str]
MULTIPLE_SPACES = re.compile(r" +")
class ArticleNotFound(Exception):
"""The article was not found."""
class MagicWordNotFound(Exception):
"""The magic word does not exist."""
class ParserFunctionNotFound(Exception):
"""The parser function does not exist."""
class CanonicalTitle:
def __init__(self, namespace: str, title: str, interwiki: str):
self.namespace = namespace
self.title = title
self.interwiki = interwiki
def __eq__(self, other: Any) -> bool:
if not isinstance(other, CanonicalTitle):
return False
return (self.namespace, self.title, self.interwiki) == (
other.namespace,
other.title,
other.interwiki,
)
@property
def full_title(self) -> str:
if self.namespace:
return self.namespace + ":" + self.title
else:
return self.title
@property
def link(self) -> str:
"""Get a version of the canonical title appropriate for a URL."""
return self.full_title.replace(" ", "_")
def _normalize_spaces(key: str) -> str:
"""Spaces are turned to underscores, multiple get combined, and stripped from the beginning and end."""
# Convert spaces to underscores.
key = key.replace("_", " ")
# Replace strings of underscores with a single underscore.
key = MULTIPLE_SPACES.sub(" ", key)
# Remove all underscores from the start and end.
return key.strip("_")
def _normalize_namespace(key: str) -> str:
"""MediaWiki treats the first character of namespaces as upper-case and the rest as lower-case."""
if not key:
return ""
key = _normalize_spaces(key)
return key[0].upper() + key[1:].lower()
def _normalize_title(key: str) -> str:
"""MediaWiki treats the first character of article names as upper-case."""
if not key: # Empty string
return ""
key = _normalize_spaces(key)
return key[0].upper() + key[1:]
class Namespace:
"""
A Namespace maps article names (as strings) to
``mwparserfromhell.wikicode.Wikicode`` instances.
Note that each article is expected to already have the namespace name removed.
"""
def __init__(self, articles: Optional[Dict[str, Wikicode]] = None):
if articles is None:
self._articles = {}
else:
# Fix the names of any incoming articles.
self._articles = {
_normalize_title(name): article for name, article in articles.items()
}
def __getitem__(self, key: str) -> Wikicode:
return self._articles[_normalize_title(key)]
def __setitem__(self, key: str, value: Wikicode) -> Wikicode:
self._articles[_normalize_title(key)] = value
return value
class ArticleResolver:
"""
Holds the configuration of things that can be referenced from articles.
"""
def __init__(self, base_url: str = "/wiki/", edit_url: str = "/index.php"):
# The base URL should be the root that articles sit in.
self._base_url = base_url.rstrip("/")
self._edit_url = edit_url
# A map of namespace names to Namespace objects. Used to find articles.
self._namespaces = {} # type: Dict[str, Namespace]
# A map of the "canonical" namespace to the "human" capitalization.
self._canonical_namespaces = {} # type: Dict[str, str]
# A map of magic words to callables.
self._magic_words = MAGIC_WORDS.copy() # type: Dict[str, MagicWord]
# A map of parser functions to callable.
self._parser_functions = {} # type: Dict[str, ParserFunction]
def add_namespace(self, name: str, namespace: Namespace) -> None:
self._namespaces[_normalize_namespace(name)] = namespace
self._canonical_namespaces[_normalize_namespace(name)] = name
def get_article_url(self, canonical_title: CanonicalTitle) -> str:
"""Given a canonical title, return a URL suitable for linking."""
# TODO Handle interwiki links.
title = quote(canonical_title.link, safe="/:~")
return f"{self._base_url}/{title}"
def get_edit_url(self, canonical_title: CanonicalTitle) -> str:
"""Given a page title, return a URL suitable for editing that page."""
params = (
("title", canonical_title.link),
("action", "edit"),
("redlink", "1"),
)
# MediaWiki generates an escaped URL.
return "{}?{}".format(self._edit_url, html.escape(urlencode(params, safe=":")))
def resolve_article(self, name: str, default_namespace: str) -> CanonicalTitle:
"""
Get the canonical namespace and name for an article.
:param name: The name of the article to find.
:param default_namespace: The namespace to use, if one is not provided.
"""
return self.canonicalize_title(name, default_namespace)
def get_article(self, name: str, default_namespace: str = "") -> Wikicode:
"""
Get an article's content (as ``mwparserfromhell.wikicode.Wikicode``) from a name.
:param name: The name of the article to find.
:param default_namespace: The namespace to use, if one is not provided.
"""
canonical_title = self.resolve_article(name, default_namespace)
try:
return self._namespaces[canonical_title.namespace][canonical_title.title]
except KeyError:
raise ArticleNotFound(canonical_title)
def canonicalize_title(
self, title: str, default_namespace: str = ""
) -> CanonicalTitle:
"""
Generate the canonical form of a title.
See https://en.wikipedia.org/wiki/Help:Link#Conversion_to_canonical_form
TODO Handle anonymous user pages.
"""
# HTML entities and percent encoded characters get converted to their raw
# character.
title = html.unescape(unquote(title))
# Convert spaces to underscores.
title = title.replace("_", " ")
# Replace strings of underscores with a single underscore.
title = MULTIPLE_SPACES.sub(" ", title)
# Remove all underscores from the start and end.
title = title.strip()
# The parts are separate by colons.
parts = title.split(":")
has_interwiki = title and title[0] == ":"
# Generally only 1 - 3 parts are expected (interwiki, namespace, and title).
num_parts = len(parts)
if num_parts >= 4:
if has_interwiki:
interwiki = parts[1]
parts = parts[2:]
else:
interwiki = ""
namespace = parts[0]
title = ":".join(parts[1:])
elif num_parts == 3:
if has_interwiki:
_, interwiki, title = parts
namespace = default_namespace
else:
interwiki = ""
namespace = parts[0]
title = ":".join(parts[1:])
elif num_parts == 2:
# In this case an interwiki link cannot be given.
interwiki = ""
namespace, title = parts
else:
# No colons, it is just a page title.
interwiki = ""
namespace = default_namespace
title = parts[0]
# Each of the pieces again has and starting / trailing underscores removed.
interwiki = interwiki.strip()
namespace = namespace.strip()
title = title.strip()
# According to the MediaWiki docs, the namespace gets canonicalized to
# upper-case, then all lower-case. This doesn't seem accurate (see how
# it treats the "MediaWiki" namespace).
try:
canonical_namespace = self._canonical_namespaces[
_normalize_namespace(namespace)
]
except KeyError:
canonical_namespace = namespace
return CanonicalTitle(canonical_namespace, _normalize_title(title), interwiki)
def get_magic_word(self, magic_word: str) -> MagicWord:
"""Given a magic word, return the callable for it."""
try:
return self._magic_words[magic_word]
except KeyError:
raise MagicWordNotFound(magic_word)
def add_magic_word(self, magic_word: str, function: MagicWord) -> None:
"""Add an additional magic word."""
self._magic_words[magic_word] = function
def get_parser_function(self, parser_function: str) -> ParserFunction:
try:
return self._parser_functions[parser_function]
except KeyError:
raise ParserFunctionNotFound(parser_function)
def add_parser_function(
self, parser_function: str, function: ParserFunction
) -> None:
"""Add an additional magic word."""
self._parser_functions[parser_function] = function
|
"""
File name: version.py
Author: rameshpr
Date: 11/5/18
"""
__version__ = '1.1'
|
#Maze is the class that creates a matrix like field
from Maze import Maze
from Player import *
from SaveLoad import *
from ConsoleColors import set_color
from Help import *
from INFO import BUILDING
from INFO import RESEARCH
from random import randint
from time import strftime
from GetFileDir import get_file_dir
from os import *
################################################################################
def replace(word, key1, key2):
final_word = ""
for char in word:
if char == key1:
final_word += key2
else:
final_word += char
return final_word
################################################################################
def pos_dir(x_loc, y_loc):
global field
pos = []
if field.cell(x_loc + 1, y_loc) != "":
pos.append( "x_loc += 1" )
elif (x_loc + 1) % 2 == 0 and y_loc % 2 == 0:
pos.append( "x_loc += 1" )
if field.cell(x_loc - 1, y_loc) != "":
pos.append( "x_loc -= 1" )
elif (x_loc - 1) % 2 == 0 and y_loc % 2 == 0:
pos.append( "x_loc -= 1" )
if field.cell(x_loc, y_loc + 1) != "":
pos.append( "y_loc += 1" )
elif x_loc % 2 == 0 and (y_loc + 1) % 2 == 0:
pos.append( "y_loc += 1" )
if field.cell(x_loc, y_loc - 1) != "":
pos.append( "y_loc -= 1" )
elif x_loc % 2 == 0 and (y_loc - 1) % 2 == 0:
pos.append( "y_loc -= 1" )
return pos
################################################################################
def clear(num = 15):
for i in range(num):
print "\n"
################################################################################
game = False # should the gameplay start?
fail = False # for synx typing
MAX_NAME_LEN = 10 # max possible name for a player
MAX_COLOR_LEN = 10 # max possible name for a color
COLOR = 15 # the standard Menu coloring
DMG_COLOR = (80, 64, 112) # colors for hp damage levels
RESET_OPTIONS = ["15", "15", "0", "4", [["Red", "012"], ["Blue", "009"], ["Green", "010"], ["Yellow", "014"], ["Cyan", "011"]], [["Player 1", "Red"], ["Player 2", "Blue"], ["Player 3", "Green"], ["Player 4", "Yellow"]]]
# what the options file will reset itself to
# saves it to the options file
# [0] field size, (x)
# [1] field size, (y)
# [2] starting gold
# [3] # of players
# [4] possible colors, each divided by " - " and in the form "COLOR NAME<>Code"
# [5] previously used players, each divided by " - " and in the form "PLAYER NAME<>COLOR NAME"
################################################################################
def clear(num = 15):
"""clear(...) --> None
prints num blank lines in the console, effectively clearing the screen.
num is an int"""
for i in range(num):
print "\n"
################################################################################
def color_num(color_name):
"""color_num(...) --> int
Returns the value of the color with the name color_name, for windows console
The num is taken from the colors list in options.
if the color name is no in the options file it returns the standard COLOR
color_name is a str
"""
global options
for color in range(len(options[4])):
if options[4][color][0] == color_name:
return int(options[4][color][1])
return COLOR
################################################################################
def write_options(options):
"""write_options(...) --> None
Writes the options file for this game, specific format needed
options is a list"""
for line in range(len(options[4])):
options[4][line] = "<>".join(options[4][line])
options[4] = " - ".join(options[4])
for line in range(len(options[5])):
options[5][line] = "<>".join(options[5][line])
options[5] = " - ".join(options[5])
options = "\n".join(options)
savegame("options", options, ext = ".osf")
options = options.split("\n")
options[4] = options[4].split(" - ")
for item in range(len(options[4])):
options[4][item] = options[4][item].split("<>")
options[5] = options[5].split(" - ")
for item in range(len(options[5])):
options[5][item] = options[5][item].split("<>")
for i in range(options.count("")):
options.remove("")
return options
################################################################################
def options_menu():
"""options() --> OUTPUT
prints off the options menu file to the command prompt and allows interaction
"""
choice = ""
global options
global RESET_OPTIONS
while choice != "0":
try:
options = loadgame("options", ext = ".osf").split("\n")
options[4] = options[4].split(" - ")
except:RESET_OPTIONS = write_options(RESET_OPTIONS)
options = loadgame("options", ext = ".osf").split("\n")
options[4] = options[4].split(" - ")
for item in range(len(options[4])):
options[4][item] = options[4][item].split("<>")
options[5] = options[5].split(" - ")
for item in range(len(options[5])):
options[5][item] = options[5][item].split("<>")
for i in range(options.count("")):
options.remove("")
clear()
set_color(COLOR)
print "--> OPTIONS <--\n"
print "1) Field Size: %s * %s" % (options[0], options[1])
print "2) Starting Gold: %s" % (options[2])
print "3) Players: %s" % (options[3])
print "4) Add/Remove Colors"
print "\nDefault Players:"
for players in range(len(options[5])):
print "%i) %s" % (5 + players, options[5][players][0]),
print "%s: " % (" " * (MAX_NAME_LEN - len(options[5][players][0]))),
color = color_num(options[5][players][1])
print "%s" % (options[5][players][1].capitalize()),
set_color(int(color))
del color
print "%s: " % (" " * (MAX_COLOR_LEN - len(options[5][players][1]))),
print "SAMPLE"
set_color(COLOR)
print "\nS) Save Options File"
print "L) Load Options File"
print "RD) RESET TO DEFAULT"
print "\n0) Quit"
choice = raw_input("\n:").lower()
########################################################################
if choice == "1":
while choice != "3":
clear()
print "--> Field Size <--"
print "1) Length: %s" % (options[0])
print "2) Width: %s" % (options[1])
print "\n3) Quit"
choice = raw_input("\n:")
if choice == "1":
choice = raw_input("Set New Length\n:")
if choice.isdigit():
options[0] = choice
elif choice == "2":
choice = raw_input("Set New Width\n:")
if choice.isdigit():
options[1] = choice
########################################################################
elif choice == "2":
choice = raw_input("Set New Starting Gold\n:")
if choice.isdigit():
options[2] = choice
########################################################################
elif choice == "3":
print "\n\n(Max Number is 5)"
choice = raw_input("Set New Number of Players\n:")
if choice.isdigit() and int(choice) <= 5 and int(choice) > 1:
options[3] = choice
if len(options[5]) < int(choice):
for num in range(int(choice)):
if num >= len(options[5]):
options[5].append(["Player %i" % (num + 1), options[4][0][0]])
if len(options[5]) > int(choice) and len(options[5]) > 2:
options[5] = options[5][:int(choice)]
########################################################################
elif choice == "4":
color_options()
########################################################################
elif choice == "rd":
options = RESET_OPTIONS[:4]
options.append( [] )
for item in RESET_OPTIONS[4][:]:
options[4].append( item )
options.append( [] )
for item in RESET_OPTIONS[5][:]:
options[5].append( item )
########################################################################
elif choice == "s":
for line in range(len(options[4])):
options[4][line] = "<>".join(options[4][line])
options[4] = " - ".join(options[4])
for line in range(len(options[5])):
options[5][line] = "<>".join(options[5][line])
options[5] = " - ".join(options[5])
options = "\n".join(options)
savegame(raw_input("Enter File Name: "), options, ext = ".osf")
options = options.split("\n")
options[4] = options[4].split(" - ")
for item in range(len(options[4])):
options[4][item] = options[4][item].split("<>")
options[5] = options[5].split(" - ")
for item in range(len(options[5])):
options[5][item] = options[5][item].split("<>")
for i in range(options.count("")):
options.remove("")
########################################################################
elif choice == "l":
try:
if raw_input("Are you sure you wish to discard current options settings? (Y/N)\n:").lower() == "n":
loadgame()
options = loadgame(raw_input("(Must be in the same folder as Astral.)\nEnter File Name: "), ext = ".osf")
options = options.split("\n")
options[4] = options[4].split(" - ")
for item in range(len(options[4])):
options[4][item] = options[4][item].split("<>")
options[5] = options[5].split(" - ")
for item in range(len(options[5])):
options[5][item] = options[5][item].split("<>")
for i in range(options.count("")):
options.remove("")
except:raw_input("Load Failed\nPress Enter to Continue")
########################################################################
elif choice != "0" and choice.isdigit() and int(choice) < 5 + int(options[3]):
# Default Player Menu
player = int(choice) - 5
while choice != "3":
clear()
print "1) " + options[5][player][0]
print "2) " + options[5][player][1]
set_color(COLOR)
print "\n3) Quit"
choice = raw_input("\n:")
if choice == "1":
name = raw_input("New Player Name\n:")
if len(name) > MAX_NAME_LEN:
print "Name too long, sorry for the inconvinience"
else:
options[5][player][0] = name
options = write_options(options)
elif choice == "2":
current_colors()
color = raw_input("Choose New Player Color: ")
if color.isdigit and int(color) >= 0 and int(color) < len(options[4]):
options[5][player][1] = options[4][int(color)][0]
options = write_options(options)
options = write_options(options)
################################################################################
def current_colors():
print "--> Current Colors <--",
for num in range(len(options[4])):
set_color(int(options[4][num][1]))
text = "\n%i) %s\nCode: %s" % (num, options[4][num][0].capitalize(), options[4][num][1])
print text
set_color(COLOR)
################################################################################
def color_options():
global options
global choice
choice = ""
while choice != "4":
clear()
current_colors()
print "\n\n--> Color Menu <--"
print "1) Add Color"
print "2) Remove Color"
print "3) See Color List"
print "\n4) Quit"
#UPDATE: 3) Sort By
choice = raw_input("\n:")
########################################################################
if choice == "1":
choice = "new"
while choice != "3" and choice != "4":
if choice == "new":
color = [raw_input("New Color Name\n:")]
color.append(raw_input("Value\n:"))
if len(str(color[1])) < 3:
color[1] = "0" * (3 - len(str(color[1]))) + color[1]
clear()
print "--> YOUR NEW COLOR <--"
print "\n\n1) " + color[0]
if color[0] > MAX_COLOR_LEN:
print "The color name is too long\n"
set_color(int(color[1]))
print "SAMPLE"
set_color(COLOR)
print "2) Value: %s" % (color[1])
print "\n3) Add This Color"
print "\n4) Quit & Discard"
choice = raw_input("\n:")
if choice == "1":
color = [raw_input("New Color Name\n:")]
color.append(color[0])
elif choice == "2":
color = [color[0]]
color.append(raw_input("Value\n:"))
elif choice == "3":
if color[0] > MAX_COLOR_LEN:
options[4].append(color)
options = write_options(options)
else:
print "\nThe color name is too long"
raw_input("Press Enter to Continue")
########################################################################
elif choice == "2":
current_colors()
choice = raw_input("\nWhich Color\n:").lower()
y_nchoice = raw_input("Are you Sure? (Y/N)\n:").lower()
if y_nchoice == "y" or y_nchoice == "yes":
options[4] = options[4][:int(choice)] + options[4][int(choice) + 1:]
options = write_options(options)
choice = ""
########################################################################
elif choice == "3":
clear()
for num in range(256):
set_color(num)
print "0" * (3 - len(str(num))) + str(num),
set_color(COLOR)
print " ",
if num % 10 == 0:
print "\n"
raw_input('\n\nPress Enter to Continue')
# BOOKMARKS:
# Search for it to find commands
# NEW GAME
# LOAD GAME
# GAMEPLAY
# SHOW FIELD
# PLAYER INFO
# FIX CONTINUE
# SELL CONTINUE
# BUILD CONTINUE
# BUILD
# BUILD PART 2
# STATS
# ALLY-MENU
# TRIBUTE
# RESEARCH
# FIX
# SELL
# INCOME
# CONSOLE
# END TURN
# SAVE
# LOAD
# Main Menu, loops until quit
################################################################################
choice = ""
set_color(COLOR)
while choice != "0" or choice.lower() == "quit":
clear()
print """
--> Astral <--
1) New Game
2) Load Game
3) Options
4) Instructions
0) Quit
"""
choice = raw_input(":")
#NEW GAME
################################################################################
if choice == "1":
#creates the battle field
# first opens the options file
options = loadgame("options", ext = ".osf")
options = options.split("\n")
options[4] = options[4].split(" - ")
for item in range(len(options[4])):
options[4][item] = options[4][item].split("<>")
options[5] = options[5].split(" - ")
for item in range(len(options[5])):
options[5][item] = options[5][item].split("<>")
for i in range(options.count("")):
options.remove("")
options[0] = int(options[0])
options[1] = int(options[1])
options[2] = int(options[2])
options[3] = int(options[3])
# makes an empty field
field = Maze("#")
field.clear(options[0], options[1], value = "")
if (field.sizey() / 2) % 2 != 0:
y_half = (field.sizey()) / 2 - 1
else:
y_half = field.sizey() - 1
if (field.sizex() / 2) % 2 != 0:
x_half = field.sizex() / 2 - 1
else:
x_half = field.sizex() - 2
effect_field = Maze("#")
effect_field.clear(options[0], options[1], value = "")
attack_field = Maze("#")
attack_field.clear(options[0], options[1], value = "")
x = field.sizex()
y = field.sizey()
usable = [(0,0)] # Bottom Left
usable.append((x - 1,y - 1)) # Top Right
usable.append((0, y - 1)) # Top Left
usable.append((x - 1, 0)) # Bottom Right
usable.append((x_half, y_half)) # Middle Middle
usable.append((0, y_half)) #Middle Left
usable.append((x_half, 0)) # Bottom Middle
usable.append((x_half, y - 1)) # Middle Right
usable.append((x - 1, y_half)) #Top Middle
ally_invite = {}
for player in range(options[3]):
myself = [player + 1]
exec "player%i = Player(%i, options[5][player][1], options[5][player][0], gold = options[2], allies = myself[:])" % (player + 1, player + 1)
exec "player%i.add_building(BUILDING['Wizard Cottage'].copy(), usable[player][0], usable[player][1])" % (player + 1)
field.setcell(usable[player][0], usable[player][1], player + 1)
ally_invite[player + 1] = []
player1.gold += 50
player = 1
player1.turn = 1
game = True
did_player_attack = False
clear()
#LOAD GAME
################################################################################
elif choice == "2":
# if game is loaded
directory = getcwd()
name = get_file_dir(cur_dir = getcwd() + "\Save Games", ext = ".sav")
if name != None:
save_data = loadgame(name, loc = getcwd(), ext = ".sav")
exec save_data
game = True
chdir(directory)
################################################################################
elif choice == "3":
options_menu()
################################################################################
elif choice == "4":
#instructions for the game
help_menu("help")
################################################################################
elif choice == "0" or choice.lower() == "quit":
if raw_input("Are you sure you wish to quit?\n(Y/N)\n:").lower() == "n":
choice = ""
game = False
# GAMEPLAY
################################################################################
if game:
#gameplay, options file is in this format:
# [0] field size, (x)
# [1] field size, (y)
# [2] starting gold
# [3] # of players
# [4] possible colors, each divided by " - " and in the form "COLOR NAME<>Code"
# [5] previously used players, each divided by " - " and in the form "PLAYER NAME<>COLOR NAME"
choice = ""
max_heal = 0
# Loop for all players
continue_option = False
continue_building_till_stop = False
x_loc = ""
y_loc = ""
direction = ""
while choice != "quit" and choice != "q" and choice != "exit":
# SHOW FIELD
####################################################################
clear(num = 30)
for y_value in range(field.sizey() - 1, -1, - 1):
#changes numbers to be with 0 at the bottom
print "\n%i" % (y_value),
print " " * ( 2 - len(str(y_value))),
for x_value in range(field.sizex()):
color_add = 0
# if there is a building add its symbol
if field.cell(x_value,y_value) != "":
play = int(field.cell(x_value,y_value))
exec "build_type = player%i.buildings[(%i,%i)]['TYPE'][:]" % (play, x_value, y_value)
sym = " "
# to show degree of damage
hp = 100
try:exec "hp = int(float(player%i.buildings[(%i,%i)]['HP']) / player%i.buildings[(%i,%i)]['MAXHP'] * 100)" % (play, x_value, y_value, play, x_value, y_value)
except:pass
if hp >= 100:color_add = 0
elif hp <= 99 and hp >= 66:color_add = DMG_COLOR[0]
elif hp <= 65 and hp >= 33:color_add = DMG_COLOR[1]
elif hp <= 32:color_add = DMG_COLOR[2]
# for Astral Lines
if "AL" in build_type and ((x_value % 2 == 0 and y_value % 2 != 0) or (x_value % 2 != 0 and y_value % 2 == 0)):
exec "sym = player%i.buildings[(%i,%i)]['SYMB']" % (play, x_value, y_value)
exec "set_color(color_add + color_num(player%i.color))" % (play)
if (x_value % 2 == 0 and y_value % 2 != 0):
sym = sym[0]
elif (x_value % 2 != 0 and y_value % 2 == 0):
sym = sym[1]
# for buildings
elif "B" in build_type and x_value % 2 == 0 and y_value % 2 == 0:
if "AL" in build_type:
exec "sym = player%i.buildings[(%i,%i)]['SYMB'][-1]" % (play, x_value, y_value)
exec "set_color(color_add + color_num(player%i.color))" % (play)
else:
exec "sym = player%i.buildings[(%i,%i)]['SYMB']" % (play, x_value, y_value)
exec "set_color(color_add + color_num(player%i.color))" % (play)
print sym,
print "",
set_color(COLOR)
#if there is no building add 'empty' symbols for each square
elif x_value % 2 == 0 and y_value % 2 == 0:
# location for buildings
print ".",
print "",
elif x_value % 2 != 0 and y_value % 2 != 0:
# location for nothing...
print " ",
print "",
else:
# location for empty astral lines
print " ",
print "",
# adds bottom numbers
print "\n\n ",
for x_value in range(field.sizey()):
print str(x_value) + " " * (2 - len(str(x_value))),
# PLAYER INFO
# player information, gold, color, whose turn it is
print "\n"
exec "player_color = player%i.color" % (player)
exec "cur_gold = player%i.gold" % (player)
exec "player_name = player%i.name" % (player)
print " %s - MP: %i - " % (player_name, cur_gold),
set_color(color_num(player_color))
print "COLOR"
set_color(COLOR)
print " Turn:",
exec 'print player%i.turn' % (player)
# possible choice for player input
####################################################################
if not continue_option:
# if the person misspelled somethin, not a known combination
if fail:
set_color(012)
print ""
print "< This Was not a Known Key Combination, Please Check Your Spelling >",
set_color(COLOR)
fail = False
print "\n\n",
print "Type 'help' for general help with the game"
choice = raw_input(": ")
#ATTACK CONTINUE
####################################################################
elif continue_option == "attack":
#choose location
temp = choice
choice = ""
if temp == "ATTACK: Basic Magics":
exec "allies = player%i.allies[:]" %(player)
for ally in allies:
exec "other_ally_list = player%i.allies[:]" % (ally)
if player not in other_ally_list:
allies.remove(ally)
if player not in allies:
allies.append(player)
allies.sort()
while 1 and x_loc == "" and y_loc == "":
choice = raw_input("(Type 'cancel' to quit)\n(format: 'x,y')\nSelect Place to Attack From: ").lower()
if choice == "cancel" or choice == "quit" or choice == "q" or choice == "exit":
continue_option = False
choice = ""
break
choice = choice.split(",")
can_use = False
if len(choice) == 2 and choice[0].isdigit() and choice[1].isdigit():
x_loc = int(choice[0])
y_loc = int(choice[1])
if field.cell(x_loc,y_loc) == player and attack_field.cell(x_loc,y_loc) == "":pass
elif attack_field.cell(x_loc,y_loc) != "":
set_color(12)
print "You already attacked with this building..."
raw_input("Press Enter to Continue: ")
set_color(COLOR)
choice = temp
x_loc = ""
y_loc = ""
break
else:
set_color(12)
print "Square Not Possible"
raw_input("Press Enter to Continue: ")
set_color(COLOR)
choice = temp
x_loc = ""
y_loc = ""
break
if field.cell(x_loc,y_loc) == player and attack_field.cell(x_loc,y_loc) == "":
choice = temp
continue_option = "attack2"
elif continue_option == "attack2":
#choose direction
print "LOC: %i, %i" % (x_loc, y_loc)
possible = pos_dir(x_loc,y_loc)
pos_input = {}
if "x_loc += 1" in possible:
print "Right"
pos_input["right"] = "x_loc += 1"
if "x_loc -= 1" in possible:
print "Left"
pos_input["left"] = "x_loc -= 1"
if "y_loc += 1" in possible:
print "Up"
pos_input["up"] = "y_loc += 1"
if "y_loc -= 1" in possible:
print "Down"
pos_input["down"] = "y_loc -= 1"
print " \nQuit"
if len(possible) > 1:
dir_choice = raw_input("Which Direction: ").lower()
if dir_choice in pos_input.keys():
direction = pos_input[dir_choice]
continue_option = False
if dir_choice == "cancel":
continue_option = False
choice = ""
else:
choice = temp
elif len(possible) == 1:
direction = possible[0]
continue_option = False
else:
continue_option = "attack"
# FIX CONTINUE
#if one of the menu's need the map shown, all must end with "continue_option = False" and "choice = ''"
####################################################################
elif continue_option == "fix":
exec "loc_building = player%i.buildings[ (choice[1][0], choice[1][1]) ].copy()" % (player)
if max_heal == 0 and cur_gold / loc_building["FIX"] != 0:
while max_heal != "cancel":
exec "loc_building = player%i.buildings[ (choice[1][0], choice[1][1]) ].copy()" % (player)
print "COST Per HP: " + loc_building["FIX"] + " MP"
print "HP: ",
print loc_building["HP"],
print "/",
print loc_building["MAXHP"]
max_heal = raw_input("Type Cancel to continue\nHeal How Much HP?\n: ")
if max_heal.isdigit():
max_heal = int(max_heal)
break
elif max_heal == "max":
max_fix = (cur_gold / loc_building["FIX"])
max_hp = loc_building["MAXHP"] - loc_building["HP"]
if max_hp - max_fix < 0: max_heal = max_hp
elif max_hp - max_fix > 0: max_heal = max_fix
if max_heal * loc_building["FIX"] <= cur_gold:
exec "player%i.buildings[ (choice[1][0], choice[1][1]) ]['HP'] += max_heal" % (player)
exec "player%i.gold -= max_heal * loc_building['FIX']" % (player)
print "You healed: %i HP" % (max_heal)
print "Using %i MP" % (max_heal * loc_building["FIX"])
print "LOC: (%i, %i)" % (choice[1][0], choice[1][1])
print "NEW HP: ",
exec "loc_building = player%i.buildings[ (choice[1][0], choice[1][1]) ].copy()" % (player)
print loc_building["HP"],
print "/",
print loc_building["MAXHP"]
raw_input("Press Enter to Continue: ")
continue_option = False
choice = ""
# SELL CONTINUE
####################################################################
elif continue_option == 'sell':
exec "temp_cost = player%i.buildings[ (choice[1][0], choice[1][1]) ]['COST']" % (player)
exec "temp_hp = player%i.buildings[ (choice[1][0], choice[1][1]) ]['HP']" % (player)
exec "temp_maxhp = player%i.buildings[ (choice[1][0], choice[1][1]) ]['MAXHP']" % (player)
mana_gain = int((float(temp_hp) / temp_maxhp) * temp_cost) - 1
exec "player%i.gold += mana_gain" % (player)
exec "player%i.dest_building( choice[1][0], choice[1][1] )" % (player)
field.setcell(choice[1][0], choice[1][1], "")
print "Sold For: " + str(mana_gain) + " MP"
raw_input("\nPress Enter to Continue: ")
mana_gain = 0
choice = ""
continue_option = False
exec "temp_alive = player%i.isalive()" % (player)
if not temp_alive:
set_color(012)
print "\nGAME OVER\nYou Have No More Buildings, you have lost the game."
raw_input("Press Enter to Continue: ")
set_color(COLOR)
choice = "done"
# BUILD CONTINUE
#if one of the menu's need the map shown, all must end with "continue_option = False" and "choice = ''"
####################################################################
elif continue_option == "build":
choice = ""
exec "allies = player%i.allies[:]" %(player)
for ally in allies:
exec "other_ally_list = player%i.allies[:]" % (ally)
if player not in other_ally_list:
allies.remove(ally)
if player not in allies:
allies.append(player)
allies.sort()
while 1:
print ""
print "%s - " % (name),
print BUILDING[ name ]["COST"],
print "MP"
choice = raw_input("(Type 'cancel' to quit)\n(format: 'x,y')\nSelect Place to Build: ").lower()
if choice == "cancel" or choice == "quit" or choice == "q" or choice == "exit":
continue_option = False
continue_building_till_stop = False
break
choice = choice.split(",")
# check if input was correct
can_build = False
if len(choice) == 2 and choice[0].isdigit() and choice[1].isdigit():
if field.cell(int(choice[0]), int(choice[1])) == "":
x = int(choice[0])
y = int(choice[1])
# if it is a building check only adjacent spots
if "B" in BUILDING[name]["TYPE"] and x % 2 == 0 and y % 2 == 0:
if field.cell(x+1,y) in allies or field.cell(x-1,y) in allies or field.cell(x,y+1) in allies or field.cell(x,y-1) in allies:
can_build = True
# if it is an AL check adjacent, and spots connected by other AL's
if "AL" in BUILDING[name]["TYPE"] and not can_build:
if field.cell(x+1,y) in allies or field.cell(x-1,y) in allies or field.cell(x,y+1) in allies or field.cell(x,y-1) in allies:
can_build = True
elif field.cell(x+1,y+1) in allies or field.cell(x-1,y-1) in allies or field.cell(x-1,y+1) in allies or field.cell(x+1,y-1) in allies:
can_build = True
elif (x % 2 == 0 and y % 2 != 0) and (field.cell(x,y+2) in allies or field.cell(x,y-2) in allies):
can_build = True
elif (x % 2 != 0 and y % 2 == 0) and (field.cell(x+2,y) in allies or field.cell(x-2,y) in allies):
can_build = True
if x % 2 == 0 and y % 2 == 0:
can_build = False
if x % 2 != 0 and y % 2 != 0:
can_build = False
if x < 0 or x >= field.sizex() or y < 0 or y >= field.sizex():
can_build = False
if not can_build:
set_color(12)
print "Square Not Possible"
raw_input("Press Enter to Continue: ")
set_color(COLOR)
break
else:
exec "player%i.gold -= BUILDING[name]['COST']" % (player)
exec "player%i.add_building(BUILDING[name].copy(), x, y)" % (player)
field.setcell(x, y, player)
exec "cur_gold = player%i.gold" % (player)
if not continue_building_till_stop or cur_gold < BUILDING[name]['COST']:
continue_option = False
continue_building_till_stop = False
break
elif choice == ["continue"]:
continue_building_till_stop = True
break
else:
set_color(12)
print "\nWrong Format"
raw_input("Press Enter to Continue: ")
set_color(COLOR)
break
choice = ""
#SURRENDER CONTINUE
####################################################################
elif continue_option == "done":
choice = "done"
continue_option = False
# HELP
####################################################################
if len(choice) >= 4 and choice[:4].lower() == "help":
help_menu(choice)
#BUILD
####################################################################
elif choice.lower() == "build":
exec "temp = player%i.poss_building()" % (player)
build_choice = ""
name = ""
clear()
while build_choice != "0":
exec "cur_gold = player%i.gold" % (player)
exec "player_color = player%i.color" % (player)
# if a buildings was not chosen
if len(name) == 0:
print "--> Build Menu <--"
length = 0
for item in temp:
if len(item) > length:
length = len(item)
for item in range(len(temp)):
if BUILDING[ temp[item] ]["COST"] > cur_gold:
set_color(12)
#red
print "%i) %s" % (item + 1, temp[item]),
print " " * (length - len(temp[item])) + "-",
print BUILDING[ temp[item] ]["COST"],
print "MP"
set_color(COLOR)
print "\n0) QUIT"
build_choice = raw_input("\n: ").lower()
if build_choice.isdigit() and int(build_choice) > 0 and int(build_choice) <= len(temp):
name = temp[int(build_choice) - 1]
# if a building was chosen
elif len(name) > 0:
clear()
print "--> %s <--" % (name)
# the place it can be built on
print "Type:"
if "AL" in BUILDING[name]["TYPE"]:
print " Astral Line"
if "B" in BUILDING[name]["TYPE"]:
print " Building"
if len(BUILDING[name]["PRE"]) > 0:
print "\nPre-requisites:"
for item in BUILDING[name]["PRE"]:
print " -> " + item
print ""
print "Cost : ",
print BUILDING[name]["COST"],
print "MP"
# for astral lines to show the two symbol types
if "AL" in BUILDING[name]["TYPE"] and "B" not in BUILDING[name]["TYPE"]:
print "Symbol - Horizontal:",
set_color(color_num(player_color))
print BUILDING[name]["SYMB"][0]
set_color(COLOR)
print "Symbol - Vertical: ",
set_color(color_num(player_color))
print BUILDING[name]["SYMB"][1]
set_color(COLOR)
# to show the type of symbol if it is a building
else:
print "Symbol : ",
set_color(color_num(player_color))
print BUILDING[name]["SYMB"]
set_color(COLOR)
#print "",
#print BUILDING[name]["IMAGE"]
# for when images are implemented
print "HP: ",
print BUILDING[name]["HP"],
print "/",
print BUILDING[name]["MAXHP"]
if BUILDING[name]["MP"] > 0:
print "MP Per Turn: +",
print BUILDING[name]["MP"],
print "MP"
if BUILDING[name]["RES"] > 0:
print "Resistance: ",
print BUILDING[name]["RES"],
print "DMG"
print "Cost to Fix 1 HP: ",
print BUILDING[name]["FIX"]
print ""
print BUILDING[name]["DESC"]
print ""
if BUILDING[ temp[int(build_choice) - 1] ]["COST"] <= cur_gold:
print "1) Build"
else:
set_color(12)
print "--> NOT ENOUGH MANA <--"
set_color(COLOR)
print "0) Cancel"
build_choice = raw_input("\n: ").lower()
if build_choice == "1" and BUILDING[name]["COST"] <= cur_gold:
continue_option = "build"
build_choice = "0"
# BUILD PART 2
# for the build menu if no long list of options is needed
####################################################################
elif len(choice) >= 5 and choice[:5].lower() == "build":
#splits off the name part, and capitalizes it
choice = choice[6:]
choice = choice.split(" ")
name = ""
for line in range(len(choice)):
name += choice[line].capitalize()
name += " "
name = name[:-1]
# Finds possible buildings, then checks if there is such a building, and if it is possible to build it
exec "temp = player%i.poss_building()" % (player)
exec "cur_gold = player%i.gold" % (player)
# Syntax Error Messages
if name not in BUILDING:
set_color(12)
raw_input("Not Possible Building\n\nPress Enter to Continue: ")
set_color(COLOR)
elif name not in temp:
set_color(12)
raw_input("Pre-requisites not met\n\nPress Enter to Continue: ")
set_color(COLOR)
elif cur_gold < BUILDING[name]["COST"]:
set_color(12)
raw_input("Not Enough Mana\n\nPress Enter to Continue: ")
set_color(COLOR)
else:
continue_option = "build"
choice = ""
# SELL
####################################################################
elif len(choice) >= 4 and choice[:4].lower() == "sell" and not did_player_attack:
choice = choice.split(" ")
if len(choice) == 3 or len(choice) == 2:
choice[1] = choice[1].split(",")
if len(choice[1]) == 2 and choice[1][0].isdigit() and choice[1][1].isdigit():
choice[1][0] = int(choice[1][0])
choice[1][1] = int(choice[1][1])
if field.cell( choice[1][0], choice[1][1]) == player:
continue_option = "sell"
else:
choice = ""
# STATS
# Shows stats of buildings
####################################################################
elif len(choice) >= 5 and choice[:5].lower() == "stats":
choice = choice.split(" ")
if len(choice) == 2:
choice[1] = choice[1].split(",")
if len(choice[1]) == 2 and choice[1][0].isdigit() and choice[1][1].isdigit():
choice[1][0] = int(choice[1][0])
choice[1][1] = int(choice[1][1])
loc_player = field.cell(choice[1][0], choice[1][1])
if loc_player != "":
exec "people = player%i.allies" % (loc_player)
exec "loc_building = player%i.buildings[ (choice[1][0], choice[1][1]) ].copy()" % (loc_player)
if player in people or player == loc_player:
#Displays stats
clear()
print "--> %s <--" % (loc_building["NAME"])
# the place it can be built on
print "Type:"
if "AL" in loc_building["TYPE"]:
print " Astral Line"
if "B" in loc_building["TYPE"]:
print " Building"
if len(loc_building["PRE"]) > 0:
print "\nPre-requisites:"
for item in loc_building["PRE"]:
print " -> " + item
print ""
# for astral lines to show the two symbol types
if "AL" in loc_building["TYPE"] and "B" not in loc_building["TYPE"]:
print "Symbol - Horizontal:",
set_color(color_num(player_color))
print loc_building["SYMB"][0]
set_color(COLOR)
print "Symbol - Vertical: ",
set_color(color_num(player_color))
print loc_building["SYMB"][1]
set_color(COLOR)
# to show the type of symbol if it is a building
else:
print "Symbol : ",
set_color(color_num(player_color))
print loc_building["SYMB"][-1]
set_color(COLOR)
#print "",
#print loc_building["IMAGE"]
# for when images are implemented
print "HP: ",
print loc_building["HP"],
print "/",
print loc_building["MAXHP"]
if loc_building["MP"] > 0:
print "MP Per Turn: +",
print loc_building["MP"],
print "MP"
if loc_building["RES"] > 0:
print "Resistance: ",
print loc_building["RES"],
print "DMG"
print "Cost to Fix 1 HP: ",
print loc_building["FIX"]
print ""
print loc_building["DESC"]
print ""
if not did_player_attack:
print "1) Sell"
exec "temp_cost = player%i.buildings[ (choice[1][0], choice[1][1]) ]['COST']" % (player)
exec "temp_hp = player%i.buildings[ (choice[1][0], choice[1][1]) ]['HP']" % (player)
exec "temp_maxhp = player%i.buildings[ (choice[1][0], choice[1][1]) ]['MAXHP']" % (player)
mana_gain = int((float(temp_hp) / temp_maxhp) * temp_cost) - 1
print " FOR: " + str(mana_gain) + " MP"
mana_gain = 0
if loc_building["FIX"] < cur_gold and loc_building["HP"] != loc_building["MAXHP"]:
print "2) Fix"
print "3) Max Possible Fix"
exec "cur_pos_research = player%i.poss_research(include_special = True)" % (player)
for item in range(len(loc_building["OPT"])):
if loc_building["OPT"][item] not in loc_building["OPT-DONE"] and loc_building["OPT"][item] in cur_pos_research:
print "%i) %s" % (item + 4, loc_building["OPT"][item]),
print ": COST " + str(RESEARCH[loc_building["OPT"][item]]["COST"]) + " MP"
print ""
print RESEARCH[loc_building["OPT"][item]]["DESC"]
else:print ""
print "\n0) Cancel"
stats_choice = raw_input("\n: ").lower()
if stats_choice.isdigit():
stats_choice = int(stats_choice)
else:
stats_choice = 0
if stats_choice == 1 and not did_player_attack:
if raw_input("Are You Sure You Wish to Sell? (Y/N)\n: ").lower() == "y":
choice[0] = "sell"
continue_option = "sell"
elif stats_choice == 2 and loc_building["HP"] != loc_building["MAXHP"]:
continue_option = "fix"
max_heal = 0
elif stats_choice == 3 and loc_building["HP"] != loc_building["MAXHP"]:
continue_option = "fix"
max_heal = "max"
elif stats_choice > 3 and stats_choice <= len(loc_building["OPT"]) + 3:
if loc_building["OPT"][stats_choice - 4] not in loc_building["OPT-DONE"] and RESEARCH[ loc_building["OPT"][stats_choice - 4] ]["COST"] <= cur_gold and loc_building["OPT"][item] in cur_pos_research:
name = loc_building["OPT"][stats_choice - 4]
# Research for options for these buildings
if "EXEC" in RESEARCH[name].keys():
lines = RESEARCH[name]["EXEC"].split("\n")
for line in lines:
exec line % (player)
else:fail = True
# ALLY-MENU
# changes of alliances menu
####################################################################
elif len(choice) >= 4 and choice[:4].lower() == "ally":
people = {}
for person in Player.live_players[:]:
exec "people[player%s.name] = person" % (person)
choice = choice.split(" ")
if len(choice[0]) >= 5:
choice.append( choice[0][4] )
name = ""
exec "allies = player%i.allies[:]" % (player)
while len(choice) < 2 and name != "q" and name != "quit" and name != "cancel" and name != "exit":
clear()
exec "allies = player%i.allies[:]" % (player)
# shows the current players
for person in range(1, options[3] + 1):
print ""
exec "print player%s.name" % (person)
exec "set_color(color_num(player%s.color))" % (person)
print " COLOR"
set_color(COLOR)
if person not in Player.live_players:
set_color(012)
print " DEAD"
set_color(COLOR)
elif person in allies:
print " ALLY"
else:
print " ENEMY"
name = raw_input("(EXACT name is required)\n(Type cancel to quit)\nChange Which Player: ")
if name not in people.keys():
set_color(012)
print "Not a Player"
set_color(COLOR)
else:
choice.append(name)
name = ""
if len(choice) < 2 or choice[1] not in people.keys():
set_color(012)
print "Not a Player"
set_color(COLOR)
else:
if len(choice) < 3:
print ""
print choice[1]
exec "set_color(color_num(player%s.color))" % (people[choice[1]])
print " COLOR"
set_color(COLOR)
if people[choice[1]] in allies:
print " ALLY"
name = raw_input("Do you wish to remove(-) him as an ally?\n:")
else:
print " ENEMY"
name = raw_input("Do you wish to add(+) him as an ally?\n:")
if name in "+-":
choice.append(name)
else: name = ""
if len(choice) >= 3 and choice[2] in "+-":
if choice[2] == "+" and people[choice[1]] not in allies:
if player * -1 in ally_invite[people[choice[1]]]:
ally_invite[people[choice[1]]].remove(player * -1)
ally_invite[people[choice[1]]].append(player)
exec "player%i.add_ally(people[choice[1]])" % (player)
elif choice[2] == "-" and people[choice[1]] in allies:
if people[choice[1]] != player:
ally_invite[people[choice[1]]].append(-1 * player)
if player in ally_invite[people[choice[1]]]:
ally_invite[people[choice[1]]].remove(player)
exec "player%i.remove_ally(people[choice[1]])" % (player)
else:
set_color(012)
print "Format Error Try Again"
set_color(COLOR)
choice = ""
# TRIBUTE
####################################################################
elif len(choice) >= 7 and choice.lower()[:7] == "tribute":
people = {}
for person in Player.live_players[:]:
exec "people[player%s.name] = person" % (person)
choice = choice.split(" ")
if len(choice) < 2:
choice.append("")
name = ""
while choice[1] not in people.keys():
for person in people.keys():
print ""
exec "print player%s.name" % (people[person])
exec "set_color(color_num(player%s.color))" % (people[person])
print " COLOR"
set_color(COLOR)
print "\nMake sure to check name spelling."
name = raw_input("(EXACT name)\n('Cancel to quit')\nTribute Which Player: ")
if name == "cancel" or name == "q":
break
choice[1] = name
if len(choice) < 3:
choice.append(cur_gold + 1)
elif type(choice[2]) == type("") and choice[2].isdigit():
choice[2] = int(choice[2])
elif type(choice[2]) != type(0):
choice[2] = cur_gold + 1
while int(choice[2]) > cur_gold and name != "cancel" and name != "q":
print "(Type cancel to quit)"
exec "cur_gold = player%i.gold" % (player)
print "MANA: " + str(cur_gold)
choice[2] = raw_input("How Much: ")
if choice[2] == "cancel" or choice[2] == "q":break
if not choice[2].isdigit():
choice[2] = cur_gold + 1
if name != "cancel" and name != "q" and choice[2] != "cancel" and choice[2] != "q":
exec "player%i.gold += int(choice[2])" % (people[choice[1]])
exec "player%i.gold -= int(choice[2])" % (player)
# RESEARCH
####################################################################
elif len(choice) >= 5 and choice[:5].lower() == "learn":
#splits off the name part, and capitalizes it
name = ""
if choice != "learn":
choice = choice[6:]
choice = choice.split(" ")
name = ""
for line in range(len(choice)):
if line == 0:
name += choice[line].upper()
name += " "
else:
name += choice[line].capitalize()
name += " "
name = name[:-1]
# if a buildings was not chosen
exec "cur_gold = player%i.gold" % (player)
exec "cur_pos_research = player%i.poss_research()[:]" % (player)
if len(name) == 0:
exec "cur_research = player%i.research.copy()" % (player)
print "--> Completed Research <--"
for item in cur_research:
print item
print "\n--> Research Menu <--"
length = 0
for item in cur_pos_research:
if len(item) > length:
length = len(item)
for item in range(len(cur_pos_research)):
if RESEARCH[ cur_pos_research[item] ]["COST"] > cur_gold:
set_color(12)
#red
print "%i) %s" % (item + 1, cur_pos_research[item]),
print " " * (length - len(cur_pos_research[item])) + "-",
print RESEARCH[ cur_pos_research[item] ]["COST"],
print "MP"
print "" + RESEARCH[ cur_pos_research[item] ]["DESC"]
set_color(COLOR)
print "\n0) QUIT"
build_choice = raw_input("\n: ").lower()
if build_choice.isdigit() and int(build_choice) > 0 and int(build_choice) <= len(cur_pos_research):
name = cur_pos_research[int(build_choice) - 1]
# if a building was chosen
if len(name) > 0:
if name in cur_pos_research and RESEARCH[name]["COST"] <= cur_gold:
exec "player%i.research[RESEARCH[name]['NAME']] = RESEARCH[name].copy()" % (player)
exec "player%i.gold -= RESEARCH[name]['COST']" % (player)
if name[:6] == "ATTACK":
exec "player%i.attacks[RESEARCH[name]['NAME']]= RESEARCH[name].copy()" % (player)
#adds item etc.
# FIX
####################################################################
elif len(choice) >= 3 and choice[:3].lower() == "fix":
choice = choice.split(" ")
if len(choice) == 3 or len(choice) == 2:
choice[1] = choice[1].split(",")
if len(choice[1]) == 2 and choice[1][0].isdigit() and choice[1][1].isdigit():
choice[1][0] = int(choice[1][0])
choice[1][1] = int(choice[1][1])
if len(choice) == 3 and choice[2].isdigit():
choice[2] = int(choice[2])
continue_option = "fix"
max_heal = choice[2]
elif len(choice) == 3 and choice[2] == "max":
max_heal = "max"
continue_option = "fix"
elif len(choice) == 2:
continue_option = "fix"
max_heal = 0
# INCOME
####################################################################
elif choice.lower() == "income":
clear(num = 20)
print "--> INCOME <--"
# adds mana to the next player
exec "items = player%i.buildings.copy()" % (player)
mana_gain = 0
num = 1
for item in items:
exec 'next_mana = player%i.gold + mana_gain' % (player)
if num % 5 == 0:
print "--------------------"
print mana_gain
print ""
num += 1
if type(items[item]["MP"]) == type("str"):
print items[item]["NAME"]
print " + " + items[item]["MP"]
print " + " + str(next_mana / int(items[item]["MP"][:-1])) + " MP"
num += 1
mana_gain += next_mana / int(items[item]["MP"][:-1])
else:
if items[item]["MP"] != 0:
mana_gain += items[item]["MP"]
print items[item]["NAME"]
print " + " + str(items[item]["MP"]) + " MP"
num += 1
exec 'next_mana = player%i.gold + mana_gain' % (player)
print "\n--------------------"
print "TOTAL: " + str(mana_gain) + " " * (10 - len(str(mana_gain))) + " MP"
print "\nMana Next Turn:"
print str(next_mana) + " " * (17 - len(str(next_mana))) + " MP"
raw_input("\n\nPress Enter to Continue: ")
# CONSOLE
####################################################################
elif choice.lower() == "~" and raw_input("There is NO console in this game\n:Press Enter to Continue:") == "~":
clear()
console_choice = "start"
while console_choice != "" or console_choice != (" " * len(console_choice)):
console_choice = raw_input("\n (Type '#<--' to decrease indent)\n (Type '' to quit)\n>>> ")
if len(console_choice) > 0 and console_choice[-1] == ":":
temp = raw_input("...... ")
loop_num = 1
while temp != "" or temp != (" " * len(temp)):
if temp == "#<--":
loop_num -= 1
console_choice += "\n" + " " * loop_num + temp
if console_choice[-1] == ":":
loop_num += 1
if loop_num < 0:
break
temp = raw_input("..." * (loop_num + 1) + " ")
try:
exec console_choice
except:
print "Error!\n"
clear(100)
#END TURN
####################################################################
elif choice.lower() == "end turn" or choice.lower() == "done":
# changes to next player
attack_field = Maze("#")
attack_field.clear(field.sizex(), field.sizey(), "")
player += 1
while player not in Player.live_players:
player += 1
if player > int(options[3]):
player = 1
# adds mana to the next player
exec "items = player%i.buildings.copy()" % (player)
mana_gain = 0
for item in items:
if type(items[item]["MP"]) == type("str"):
exec 'next_mana = player%i.gold + mana_gain' % (player)
mana_gain += next_mana / int(items[item]["MP"][:-1])
else:
mana_gain += items[item]["MP"]
exec "player%i.gold += mana_gain" % (player)
# shows any alliance cancellations and invitations
for person in ally_invite[player]:
if person >= 0:exec "name = player%i.name" % (person)
else: exec "name = player%i.name" % (person * -1)
# player information, gold, color, whose turn it is
print "\n\n\n\n\n"
exec "player_color = player%i.color" % (player)
exec "player_name = player%i.name" % (player)
print " %s - " % (player_name),
set_color(color_num(player_color))
print "COLOR"
set_color(COLOR)
if person >= 0:
print '%s wants to invite you to an alliance.' % (name)
exec "set_color(color_num(player%s.color))" % (person)
print "COLOR"
set_color(COLOR)
yn_choice = raw_input("Do you wish to ally with him? (Y/N)\n: ").lower()
if yn_choice == "y":
exec "player%i.add_ally(person)" % (player)
elif person < 0:
person = person * -1
print '%s has cancelled your allience.' % (name)
exec "set_color(color_num(player%s.color))" % (person)
print "COLOR"
set_color(COLOR)
yn_choice = raw_input("Do you wish to remain an ally to him? (Y/N)\n: ").lower()
if yn_choice == "n":
exec "player%i.remove_ally(person)" % (player)
ally_invite[player] = []
# adds 1 turn to next player
exec "player%i.turn += 1" % (player)
# shows the player did not attack yet
did_player_attack = False
#adds score
# UPDATE
# SURRENDER
####################################################################
elif choice.lower() == "surrender":
exec "temp = player%i.buildings.keys()" % (player)
for key in temp:
field.setcell(key[0], key[1], "")
exec "player%i.buildings = {}" % (player)
exec "player%i.build_list = {}" % (player)
print Player.live_players
exec "player%i.isalive()" % (player)
continue_option = "done"
# SAVE
####################################################################
elif len(choice) >= 4 and choice.lower()[:4].lower() == "save":
choice = choice.split(" ")
if len(choice) == 2:
choice = choice[1]
else:
choice = "Auto Save" #+ strftime("%Y%m%d%H%M%S")
save_data = ""
save_data += "options = %s\n" % (str(options[:]))
save_data += "field = Maze('#')\n"
save_data += 'field.clear(options[0], options[1], value = "")\n'
save_data += "effect_field = Maze('#')\n"
save_data += 'effect_field.clear(options[0], options[1], value = "")\n'
save_data += "attack_field = Maze('#')\n"
save_data += 'attack_field.clear(options[0], options[1], value = "")\n'
for x in range(field.sizex()):
for y in range(field.sizey()):
if field.cell(x,y) != "":
save_data += "field.setcell(%i, %i, %s)\n" % (x, y, field.cell(x,y))
if effect_field.cell(x,y) != "":
save_data += "effect_field.setcell(%i, %i, %s)\n" % (x, y, effect_field.cell(x,y))
if attack_field.cell(x,y) != "":
save_data += "attack_field.setcell(%i, %i, %s)\n" % (x, y, attack_field.cell(x,y))
for person in Player.live_players[:]:
exec "save_color = player%i.color" % (person)
exec "save_name = player%i.name" % (person)
exec "save_gold = player%i.gold" % (person)
exec "save_allies = str(player%i.allies[:])" % (person)
exec "save_research = str(player%i.research.copy())" % (person)
exec "save_attacks = str(player%i.attacks.copy())" % (person)
exec "save_buildings = str(player%i.buildings.copy())" % (person)
exec "save_build_list = str(player%i.build_list.copy())" % (person)
exec "save_turn = player%i.turn" % (person)
exec "save_score = player%i.score" % (person)
save_data += "player%i = Player(%i, '%s', '%s', gold = %i, allies = %s, research = %s, attacks = %s, buildings = %s, build_list = %s, turn = %i, score = %i)\n" % (person, person, save_color, save_name, save_gold, save_allies, save_research, save_attacks, save_buildings, save_build_list, save_turn, save_score)
save_data += "player = %i\n" % (player)
save_data += "ally_invite = %s\n" %(str(ally_invite))
save_data += "did_player_attack = %s" % (str(did_player_attack))
savegame(choice, save_data, loc = getcwd() + "\\Save Games" ,ext = ".sav")
# LOAD
####################################################################
# if game is loaded
elif len(choice) >= 4 and choice.lower()[:4].lower() == "load":
if raw_input("Are you sure you wish to abandon this game? (Y/N)\n: ").lower() == "y":
choice = choice.split(" ")
if len(choice) >= 2:
choice = " ".join(choice[1:])
else:
choice = "Auto Save" #+ strftime("%Y%m%d%H%M%S")
directory = getcwd()
name = choice
try:
save_data = loadgame(name, loc = getcwd() + "\\Save Games", ext = ".sav")
exec save_data
game = True
except:
print "Game could not load..."
raw_input("Press Enter to Continue: ")
chdir(directory)
choice = ""
# checks if the player truly wants to exit
####################################################################
elif choice.lower() == "quit" or choice.lower() == "q" or choice.lower() == "exit":
set_color(12)
if raw_input("\nAre You Sure You Wish to Exit? (Y/N)\n:").lower() != "y":
choice = ""
set_color(COLOR)
# if the person misspelled somethin, not a known combination
####################################################################
elif choice.lower() == "":pass
# ATTACK
####################################################################
elif len(choice) >= 6 and choice[:6].lower() == "attack":
name = choice.split(" ")
# choosing of the attack
if len(name) > 2:
name[0] = name[0].upper()
for times in range(len(name) - 1):
name[times+1] = name[times+1].capitalize()
temp = ""
for item in name:
temp += item + " "
temp = temp[:-1]
name = temp
else:
# choosing name, becomes "" if cancelled
name = ""
exec "cur_gold = player%i.gold" % (player)
exec "cur_attacks = player%i.attacks.keys()" % (player)
if len(name) == 0:
print "\n--> Attack Menu <--"
length = 0
for item in cur_attacks:
if len(item) > length:
length = len(item)
for item in range(len(cur_attacks)):
if RESEARCH[ cur_attacks[item] ]["COST"] > cur_gold:
set_color(12)
#red
print "%i) %s" % (item + 1, cur_attacks[item]),
print " " * (length - len(cur_attacks[item])) + "-",
print RESEARCH[ cur_attacks[item] ]["COST"],
print "MP"
print "" + RESEARCH[ cur_attacks[item] ]["DESC"]
set_color(COLOR)
print "\n0) QUIT"
name = raw_input("\n: ").lower()
if name.isdigit() and int(name) > 0 and int(name) <= len(cur_attacks):
name = cur_attacks[int(name) - 1]
exec "cur_attacks = player%i.attacks.copy()" % (player)
if name in cur_attacks and continue_option != "attack2" and (x_loc == "" or y_loc == "" or direction == ""):
player_did_attack = True
continue_option = "attack"
choice = name
x_loc = ""
y_loc = ""
name = ""
elif name in cur_attacks and continue_option == "attack2":
choice = name
name = ""
if name not in cur_attacks:
name = ""
#choose power
if name != "":
exec "cur_power_max = player%i.attacks[name]['MAX']" % (player)
while 1:
print "Max Power: " + str(cur_power_max) + " MP"
print "How Much Power Do You Want To Use?"
power = raw_input(": ")
if power == "cancel" or power == "q" or power == "quit":
power = ""
name = ""
break
if power.isdigit() and int(power) <= cur_power_max and int(power) > 0 :
power = int(power)
break
if name != "":
attack_field.setcell(x_loc, y_loc, 1)
power_field = Maze("#")
power_field.clear(field.sizex(), field.sizey(), 0)
################################################################
if name == "ATTACK: Basic Magics":
exec "cur_allies = player%i.allies[:]" % (player)
while power > 0:
temp = direction
temp = replace(temp, "+","!")
temp = replace(temp, "-","+")
temp = replace(temp, "!","-")
exec direction
possible = pos_dir(x_loc, y_loc)
if temp in possible:
possible.remove( temp )
power_field.setcell(x_loc, y_loc, power)
if field.cell(x_loc, y_loc) != "" and field.cell(x_loc, y_loc) not in cur_allies:
exec "temp_build = player%i.buildings[ (x_loc, y_loc) ].copy()"% (field.cell(x_loc, y_loc))
power -= temp_build["RES"]
power -= temp_build["HP"]
elif field.cell(x_loc, y_loc) in cur_allies:
if x_loc % 2 == 0 and y_loc % 2 == 0:
power = 0
if len(possible) >= 1:
direction = randint(0,len(possible) - 1)
direction = possible[direction]
else:
power = 0
if name != "":
# Distributes power
###############################################################
exec "cur_allies = player%i.allies[:]" % (player)
for x in range(field.sizex()):
for y in range(field.sizey()):
if field.cell(x,y) != "" and power_field.cell(x,y) != 0:
power = power_field.cell(x,y)
person = field.cell(x,y)
if person not in cur_allies:
exec "cur_hp = player%i.buildings[ (x,y) ]['HP']" % (person)
exec "cur_res = player%i.buildings[ (x,y) ]['RES']" % (person)
if power - (cur_res + cur_hp) >= 0:
exec "player%i.capt_building( x, y, player%i )" % (player, person)
exec "player%i.buildings[ (x,y) ]['HP'] = (player%i.buildings[ (x,y) ]['HP'] / 10) + 1" % (player, player)
field.setcell(x,y,player)
elif power - cur_res >= 0:
exec "player%i.buildings[ (x,y) ]['HP'] = player%i.buildings[ (x,y) ]['HP'] - (power - cur_res)" % (person, person)
direction = ""
x_loc = ""
y_loc = ""
####################################################################
else:
fail = True
game = False
|
"""Stores group membership information as secrets, for faster lookups."""
import asyncio
import json
import urllib
import os
from typing import List, Optional
import aiohttp
import cpg_utils.cloud
from flask import Flask
ANALYSIS_RUNNER_PROJECT_ID = 'analysis-runner'
app = Flask(__name__)
async def _groups_lookup(access_token: str, group_name: str) -> Optional[str]:
async with aiohttp.ClientSession() as session:
# https://cloud.google.com/identity/docs/reference/rest/v1/groups/lookup
async with session.get(
f'https://cloudidentity.googleapis.com/v1/groups:lookup?'
f'groupKey.id={urllib.parse.quote(group_name)}',
headers={'Authorization': f'Bearer {access_token}'},
) as resp:
if resp.status != 200:
return None
content = await resp.text()
return json.loads(content)['name']
async def _groups_memberships_list(access_token: str, group_parent: str) -> List[str]:
result = []
async with aiohttp.ClientSession() as session:
page_token = None
while True:
# https://cloud.google.com/identity/docs/reference/rest/v1/groups/lookup
async with session.get(
f'https://cloudidentity.googleapis.com/v1/{group_parent}/memberships?'
f'pageToken={page_token or ""}',
headers={'Authorization': f'Bearer {access_token}'},
) as resp:
content = await resp.text()
decoded = json.loads(content)
for member in decoded['memberships']:
result.append(member['preferredMemberKey']['id'])
page_token = decoded.get('nextPageToken')
if not page_token:
break
return result
async def _transitive_group_members(access_token: str, group_name: str) -> List[str]:
groups = [group_name]
seen = set()
result = set()
while groups:
remaining_groups = []
for group in groups:
if group in seen:
continue # Break cycles.
seen.add(group)
remaining_groups.append(group)
group_parents = await asyncio.gather(
*(_groups_lookup(access_token, group) for group in remaining_groups)
)
memberships_aws = []
for group, group_parent in zip(remaining_groups, group_parents):
if group_parent:
# It's a group, so add its members for the next round.
memberships_aws.append(
_groups_memberships_list(access_token, group_parent)
)
else:
# Group couldn't be resolved, which usually means it's an individual.
result.add(group)
memberships = await asyncio.gather(*memberships_aws)
groups = []
for members in memberships:
groups.extend(members)
return sorted(list(result))
async def _get_service_account_access_token() -> str:
# https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances#applications
async with aiohttp.ClientSession() as session:
async with session.get(
'http://metadata.google.internal/computeMetadata/v1/instance/'
'service-accounts/default/token',
headers={'Metadata-Flavor': 'Google'},
) as resp:
content = await resp.text()
return json.loads(content)['access_token']
async def _get_group_members(group_names: List[str]) -> List[List[str]]:
access_token = await _get_service_account_access_token()
return await asyncio.gather(
*(
_transitive_group_members(access_token, group_name)
for group_name in group_names
)
)
@app.route('/', methods=['POST'])
def index():
"""Cloud Run entry point."""
config = json.loads(
cpg_utils.cloud.read_secret(ANALYSIS_RUNNER_PROJECT_ID, 'server-config')
)
group_types = [
'access',
'web-access',
'test',
'standard',
'full',
]
# add SM group types
group_types.extend(
f'sample-metadata-{env}-{rs}'
for env in ('main', 'test')
for rs in ('read', 'write')
)
groups = []
dataset_by_group = {}
for dataset in config:
for group_type in group_types:
group = f'{dataset}-{group_type}'
groups.append(group)
dataset_by_group[group] = dataset
# Google Groups API queries are ridiculously slow, on the order of a few hundred ms
# per query. That's why we use async processing here to keep processing times low.
all_group_members = asyncio.run(
_get_group_members([f'{group}@populationgenomics.org.au' for group in groups])
)
for group, group_members in zip(groups, all_group_members):
secret_value = ','.join(group_members)
dataset = dataset_by_group[group]
project_id = config[dataset]['projectId']
# Check whether the current secret version is up-to-date.
secret_name = f'{group}-members-cache'
current_secret = cpg_utils.cloud.read_secret(project_id, secret_name)
if current_secret == secret_value:
print(f'Secret {secret_name} is up-to-date')
else:
cpg_utils.cloud.write_secret(project_id, secret_name, secret_value)
print(f'Updated secret {secret_name}')
return ('', 204)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(os.environ.get('PORT', 8080)))
|
"""
*
* Author: Juarez Paulino(coderemite)
* Email: juarez.paulino@gmail.com
*
"""
n=int(input())
a=[*map(int,input().split())]
x=sum(a[i]==i for i in range(n))
x+=(x<n)
x+=any(i!=a[i] and i==a[a[i]] for i in range(n))
print(x)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import functools
import traceback
import sys
from multiprocessing import Value, Lock
from synapseclient.utils import printTransferProgress
def notifyMe(syn, messageSubject='', retries=0):
"""Function decorator that notifies you via email whenever an function completes running or
there is a failure.
:param syn: A synapse object as obtained with syn = synapseclient.login()
:param messageSubject: A string with subject line for sent out messages.
:param retries: Number of retries to attempt on failure (default=0)
Example::
# to decorate a function that you define
from synapseutils import notifyMe
import synapseclient
syn = synapseclient.login()
@notifyMe(syn, 'Long running function', retries=2)
def my_function(x):
doing_something()
return long_runtime_func(x)
my_function(123)
#############################
# to wrap a function that already exists
from synapseutils import notifyMe
import synapseclient
syn = synapseclient.login()
notify_decorator = notifyMe(syn, 'Long running query', retries=2)
my_query = notify_decorator(syn.tableQuery)
results = my_query("select id from syn1223")
#############################
"""
def notify_decorator(func):
@functools.wraps(func)
def with_retry_and_messaging(*args, **kwargs):
attempt = 0
destination = syn.getUserProfile()['ownerId']
while attempt<=retries:
try:
output = func(*args, **kwargs)
syn.sendMessage([destination], messageSubject,
messageBody='Call to %s completed successfully!' %func.__name__)
return output
except Exception as e:
sys.stderr.write(traceback.format_exc())
syn.sendMessage([destination], messageSubject,
messageBody = ('Encountered a temporary Failure during upload. '
'Will retry %i more times. \n\n Error message was:\n%s\n\n%s'
%(retries-attempt, e, traceback.format_exc())))
attempt +=1
return with_retry_and_messaging
return notify_decorator
def with_progress_bar(func, totalCalls, prefix = '', postfix='', isBytes=False):
"""Wraps a function to add a progress bar based on the number of calls to that function.
:param func: Function being wrapped with progress Bar
:param totalCalls: total number of items/bytes when completed
:param prefix: String printed before progress bar
:param prefix: String printed after progress bar
:param isBytes: A boolean indicating weather to convert bytes to kB, MB, GB etc.
:return: a wrapped function that contains a progress bar
"""
completed = Value('d', 0)
lock = Lock()
def progress(*args, **kwargs):
with lock:
completed.value +=1
printTransferProgress(completed.value, totalCalls, prefix, postfix, isBytes)
return func(*args, **kwargs)
return progress
|
"""Frequency Finder
Analyzes frequency of letters in given message compared to the most common occurring
letters to determine if message is in the English language.
Attributes:
ETAOIN (str): String containing uppercase latin letters in order from most to least common.
LETTERS (str): String containing uppercase latin letters in alphabetical order.
Note:
* Compares six most and six least common letters in the English language.
* https://www.nostarch.com/crackingcodes/ (BSD Licensed)
"""
ETAOIN = 'ETAOINSHRDLCUMWFGYPBVKJXQZ'
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
def getLetterCount(message: str) -> dict:
"""Get letter count
Counts the frequency of all latin letters in a given message.
Args:
message: String containing message to analyze letter frequency.
Returns:
Dictionary with keys of single letters and values of the count of how many
times they appear in the message parameter.
"""
letterCount = {'A': 0, 'B': 0, 'C': 0, 'D': 0, 'E': 0, 'F': 0,
'G': 0, 'H': 0, 'I': 0, 'J': 0, 'K': 0, 'L': 0, 'M': 0, 'N': 0,
'O': 0, 'P': 0, 'Q': 0, 'R': 0, 'S': 0, 'T': 0, 'U': 0, 'V': 0,
'W': 0, 'X': 0, 'Y': 0, 'Z': 0}
for letter in message.upper():
if letter in LETTERS:
letterCount[letter] += 1
return letterCount
def getItemAtIndexZero(items: tuple):
"""Get element at index zero
Helper function that returns the first element of a given tuple.
Args:
items: Tuple containing a latin letter and its frequency count.
Returns:
The first element of the given tuple: the latin letter.
"""
return items[0]
def getFrequencyOrder(message: str) -> str:
"""Get frequency order
Analyzes frequency of each letter in given message and returns string with each letter from most to least frequent.
Args:
message: String containing message to analyze frequency.
Returns:
String of the alphabet letters arranged in order of most frequently occurring in the message parameter.
"""
# First, get a dictionary of each letter and its frequency count:
letterToFreq = getLetterCount(message)
# Second, make a dictionary of each frequency count to the letter(s)
# with that frequency:
freqToLetter = {}
for letter in LETTERS:
if letterToFreq[letter] not in freqToLetter:
freqToLetter[letterToFreq[letter]] = [letter]
else:
freqToLetter[letterToFreq[letter]].append(letter)
# Third, put each list of letters in reverse "ETAOIN" order, and then
# convert it to a string:
for freq in freqToLetter:
freqToLetter[freq].sort(key=ETAOIN.find, reverse=True)
freqToLetter[freq] = ''.join(freqToLetter[freq])
# Fourth, convert the freqToLetter dictionary to a list of
# tuple pairs (key, value), and then sort them:
freqPairs = list(freqToLetter.items())
freqPairs.sort(key=getItemAtIndexZero, reverse=True)
# Fifth, now that the letters are ordered by frequency, extract all
# the letters for the final string:
freqOrder = []
for freqPair in freqPairs:
freqOrder.append(freqPair[1])
return ''.join(freqOrder)
def englishFreqMatchScore(message: str) -> int:
"""English Frequency Match Score
Calculates number of matches that the string in the message parameter has when its letter frequency is
compared to English letter frequency.
Args:
message: String containing message to calculate English match score.
Returns:
Number representing message's matches to English letter frequency.
Note:
* A "match" is how many of its six most frequent and six least frequent letters are among the six
most frequent and six least frequent letters for English.
* A "perfect score" is 12
"""
freqOrder = getFrequencyOrder(message)
matchScore = 0
# Find how many matches for the six most common letters there are:
for commonLetter in ETAOIN[:6]:
if commonLetter in freqOrder[:6]:
matchScore += 1
# Find how many matches for the six least common letters there are:
for uncommonLetter in ETAOIN[-6:]:
if uncommonLetter in freqOrder[-6:]:
matchScore += 1
return matchScore
|
import os
import time
# noinspection PyPackageRequirements
import ffmpeg
from radikoplaylist import MasterPlaylistClient, LiveMasterPlaylistRequest, TimeFreeMasterPlaylistRequest
ENVIRONMENT_VALIABLE_KEY_AREA_ID = "RADIKO_AREA_ID"
AREA_ID_DEFAULT = "JP13"
def record(station, recording_time, outfilename):
master_playlist_request = LiveMasterPlaylistRequest(station)
master_playlist = MasterPlaylistClient.get(master_playlist_request,
area_id=get_area_id())
stream = ffmpeg.input(master_playlist.media_playlist_url,
headers=master_playlist.headers,
copytb='1')
stream = ffmpeg.output(stream, outfilename, f='mp4', c='copy')
record_stream(stream, recording_time)
def record_time_free(station, outfilename, start_at, end_at, timeout=None):
master_playlist_request = TimeFreeMasterPlaylistRequest(
station, start_at, end_at)
master_playlist = MasterPlaylistClient.get(master_playlist_request,
area_id=get_area_id())
stream = ffmpeg.input(master_playlist.media_playlist_url,
headers=master_playlist.headers,
copytb='1')
stream = ffmpeg.output(stream, outfilename, f='mp4', c='copy')
if timeout is None:
ffmpeg.run(stream)
else:
record_stream(stream, timeout)
def get_area_id():
return os.getenv(ENVIRONMENT_VALIABLE_KEY_AREA_ID, AREA_ID_DEFAULT)
def record_stream(stream, recording_time):
# Launch video recording
popen = ffmpeg.run_async(stream, pipe_stdin=True)
time.sleep(recording_time * 60)
# Stop video recording
popen.communicate(str.encode("q")) # Equivalent to send a Q
# To be sure that the process ends I wait 3 seconds and then terminate de process (wich is more like kill -9)
time.sleep(3)
popen.terminate()
|
from dataclasses import dataclass, field
from typing import List
from enum import Enum
@dataclass
class Task:
"""
TODO: add supported_file_types
"""
name: str
supported: bool = field(default=False)
supported_metrics: List[str] = field(default_factory=list)
@dataclass
class TaskCategory:
name: str
description: str
tasks: List[Task]
class TaskType(str, Enum):
text_classification = "text-classification"
named_entity_recognition = "named-entity-recognition"
extractive_qa = "extractive-qa"
summarization = "summarization"
text_pair_classification = "text-pair-classification"
hellaswag = "hellaswag"
aspect_based_sentiment_classification = "aspect-based-sentiment-classification"
@staticmethod
def list():
return list(map(lambda c: c.value, TaskType))
@dataclass
class Task:
"""
TODO: add supported_file_types
"""
name: str
supported: bool = field(default=False)
supported_metrics: List[str] = field(default_factory=list)
@dataclass
class TaskCategory:
name: str
description: str
tasks: List[Task]
_task_categories: List[TaskCategory] = [
TaskCategory("conditional-text-generation",
"data-to-text and text transduction tasks such as translation or summarization",
[
Task("machine-translation"),
Task("sentence-splitting-fusion"),
Task(TaskType.summarization, True, [
"bleu", "rouge1", "rouge2", "rougel"])
]),
TaskCategory("text-classification", "predicting a class index or boolean value",
[Task(TaskType.text_classification, True, ["F1score", "Accuracy"])]),
TaskCategory("structure-prediction", "predicting structural properties of the text, such as syntax",
[Task(TaskType.named_entity_recognition, True, ["f1_score_seqeval"])]),
TaskCategory("question-answering", "question answering tasks",
[Task(TaskType.extractive_qa, True, ["f1_score_qa", "exact_match_qa"])]),
TaskCategory("span-text-prediction", "prediction based on span and text",
[Task(TaskType.aspect_based_sentiment_classification, True, ["F1score", "Accuracy"])]),
TaskCategory("text-pair-classification", "predicting a class of two texts",
[Task(TaskType.text_pair_classification, True, ["F1score", "Accuracy"])]),
]
def get_task_categories():
"""getter for task categories data"""
return _task_categories
def get_task_categories():
"""getter for task categories data"""
return _task_categories
|
import numpy as np
def convolucion(A,B):
C1 = 0
for k in range(len(A)-1):
for j in range (len(A)-1):
N = A[k][j]*B[k][j]
C1 += N
C2 = 0
for k in range(len(A)-1):
for j in range (len(A)-1):
S = A[k][j+1]*B[k][j]
C2 += S
C3 = 0
for k in range(len(A)-1):
for j in range (len(A)-1):
T = A[k+1][j+1]*B[k][j]
C3 += T
C = [C1, C2, C3]
return C
Matriz1 = [[6,9,0,3],[8,4,9,1],[4,1,3,12],[3,2,1,100]]
Filtro = [[1,0,2],[5,0,9],[6,2,1]]
def main():
Matriz1 = [[6,9,0,3],[8,4,9,1],[4,1,3,12],[3,2,1,100]]
Filtro = [[1,0,2],[5,0,9],[6,2,1]]
A=np.array(Matriz1)
B=np.array(Filtro)
C =np.zeros((2,2))
C =np.array(convolucion(A,B))
print (C)
main()
|
"""
The aws module is intended to provide an interface to aws
It tightly interfaces with boto. Indeed, many functions require a boto connection object parameter
While it exposes boto objects (espcially instances) to callees, it provides the following ease of use ability:
* Unpacking reservations into reservations
* Functionality to wait until servers are booted
* Retrying aws commands on errors
* Defining aws parameters
* Other aws utility functions
"""
import datetime
import operator
import random
import socket
import time
import boto
import logging
logger = logging.getLogger('aws')
meta_url = 'http://instance-data/latest/meta-data/'
meta_data = ['ami-id', 'hostname', 'instance-id', 'instance-type', 'kernel-id',
'local-hostname', 'local-ipv4', 'public-hostname', 'public-ipv4']
# TODO(DEVELOPER): THIS DATA IS REALLY OUTDATED!!
# AWS API doesn't provide an easy way to access on-demand instance costs
# AWS definitions
REGION_US_EAST_1 = 'us-east-1'
REGION_US_WEST_1 = 'us-west-1'
REGION_US_WEST_2 = 'us-west-2'
REGION_AP_NORTHEAST_1 = 'ap-northeast-1'
REGION_AP_SOUTHEAST_1 = 'ap-southeast-1'
REGION_EU_WEST_1 = 'eu-east-1'
# information incomplete for regions other than us_east_1 and instances we don't use
od_instance_costs = {REGION_US_EAST_1: {'m1.small' : 0.06,
'm1.medium' : 0.12,
'm1.large' : 0.24,
'm1.xlarge' : 0.48,
't1.micro' : 0.02,
'm2.xlarge' : 0.41,
'm2.2xlarge' : .820,
'm2.4xlarge' : 1.640,
'c1.medium' : 0.145,
'c1.xlarge' : 0.58,
'cc1.4xlarge' : 1.3,
'cc2.8xlarge' : 2.4,
'cg1.4xlarge' : 2.1,
'hi1.4xlarge' : 3.1,
'cr1.8xlarge' : 3.5
},
REGION_US_WEST_1: {
},
}
#Definiton of instance boot AMIs we have on EC2
AMIs = {REGION_US_EAST_1: {'karmic32': 'ami-bb709dd2',
'karmic64': 'ami-55739e3c',
'lucid32': 'ami-4fd00726',
'lucid64': 'ami-35de095c',
'oneiric32' : 'ami-d1a671b8',
'oneiric64' : 'ami-4fa37426',
'precise64' : 'ami-cf5e2ba6',
'raring64' : 'ami-9597e1fc',
'setup_server': 'ami-2eff6047', # precise with aufs and wsshd, created 03/08/13
},
REGION_US_WEST_1: {
},
}
#Definition of EBS boot AMIs we have on EC2
AMIs_ebs = {REGION_US_EAST_1: {
'karmic32': 'ami-6743ae0e',
'karmic64': 'ami-7d43ae14',
'lucid32': 'ami-71dc0b18',
'lucid64': 'ami-55dc0b3c',
'oneiric32' : 'ami-6ba27502',
'oneiric64' : 'ami-6fa27506',
'precise64' : 'ami-e7582d8e',
'raring64' : 'ami-e995e380',
},
REGION_US_WEST_1: {
},
}
#Definition of HVM AMIs we have on EC2
# (All of these are also EBS-boot)
AMIs_hvm = {REGION_US_EAST_1: {
'natty64': 'ami-f1589598',
'oneiric64' : 'ami-beba68d7',
'precise64' : 'ami-f9582d90',
'raring64' : 'ami-eb95e382',
},
REGION_US_WEST_1: {
},
}
def get_ami(ami, zone, ebs_boot=False, instance_type = None):
"""Get AMI from our AMI list if it exists
Else return None"""
if not zone:
zone = REGION_US_EAST_1 + 'a'
if instance_type == 't1.micro':
# t1.micro lack instance storage
ebs_boot = True
region = zone[:-1] #dropping last letter should be region
if instance_type in ['cc1.4xlarge', 'cc2.8xlarge', 'cg1.4xlarge']:
# Cluster compute instances use ebs backed hvm
dct = AMIs_hvm
elif ebs_boot:
dct = AMIs_ebs
else:
dct = AMIs
return dct[region].get(ami,None)
imaged_amis = []
def gen_custom_image_table():
global imaged_amis
ami_dcts = [AMIs, AMIs_ebs, AMIs_hvm]
region_defs = [REGION_US_EAST_1]
# DEVELOPER: PUT YOUR CUSTOM AMI KEYS BELOW (e.g. ami-..)
ami_keys = ['']
for ami_dct in ami_dcts:
for region_def in region_defs:
region_dct = ami_dct.get(region_def)
if not region_dct:
continue
for ami_key in ami_keys:
ami = region_dct.get(ami_key)
if ami:
imaged_amis.append(ami)
gen_custom_image_table() #needed for below function
def is_custom_image(ami_id):
"""Determinte if an ami-id, e.g. ami-63be790a is imaged.
"""
global imaged_amis
return ami_id in imaged_amis
def retry_n_times(func, n, caller, *args, **kwargs):
"""Run function func(*args, **kawargs) n times until no EC2ResponseError or n is reached
caller is a string specifying who called this (for logging)"""
i= -1
while True:
try:
return func(*args,**kwargs)
except boto.exception.EC2ResponseError, e: #aws hickups sometimes
n-=1
i+=1
logger.error('%s: EC2ResponseError: %s', caller, e)
if n <= 0:
raise
else:
time.sleep(min(10,0.2 + (1<<i) * 0.5 * random.random())) # expodential backoff
continue
"""
Generic server spawning with boto
"""
def get_instances(connection, instance_ids = None, filters = None):
"""Get instances by instance_ids
A dictionary of filters can be provided as well
See http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/
"""
#reservations = connection.get_all_instances(instance_ids = instance_ids)
reservations = retry_n_times(connection.get_all_instances, 3, 'get_instances',
instance_ids = instance_ids, filters = filters)
return extract_instances(reservations)
def extract_instances(reservations):
"""Extract instances from a list of reservations"""
instances = []
for reservation in reservations:
try:
groups = [group.groupName for group in reservation.groups]
except AttributeError: #boto version < 2.0rc1
try:
groups = [group.name for group in reservation.groups]
except AttributeError:
groups = [group.id for group in reservation.groups]
for instance in reservation.instances:
instance.groups = groups
instances.append(instance)
return instances
"""Below need a boto EC2Connection object, connection, to work"""
def run_instances(connection, ami, ebs_boot = False, num=1, min_count=None, groups=['default'],
key_name='team', zone='us-east-1a', type='m1.small'):
"""
Returns reservation
reservation.instances accesses the actual instances
"""
my_ami = get_ami(ami, zone, ebs_boot, type)
if not my_ami:
my_ami = ami
if min_count == None:
min_count = num
reservation = connection.run_instances(image_id=my_ami, security_groups=groups, max_count=num,
min_count=min_count, instance_type=type, placement=zone,
key_name=key_name)
return reservation
rin = run_instances
def get_spot_requests(connection, request_ids = None, filters= None):
"""Get spot requests by request_ids
A dictionary of filters can be provided as well
http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/index.html?ApiReference-query-DescribeSpotInstanceRequests.html
"""
#reservations = connection.get_all_instances(instance_ids = instance_ids)
reservations = retry_n_times(connection.get_all_spot_instance_requests, 3, 'get_spot_requests',
request_ids = request_ids, filters = filters)
return reservations
def get_spot_price_history(connection,start_time=None, end_time=None,
instance_type=None, product_description=None,
availability_zone=None):
"""Get spot price history.
start_time and end_time should be datetime objects in UTC or None
See boto's get_spot_price_history
encode timestamp as a datetime.datetime object
"""
"""
internally has a loop to handle boto returning 1,000 results max (no idea why)
"""
start_times = {} # dictionary maps instance_type + az to times
result_set = []
extra_loops = 0
while True:
start_time_str = start_time.isoformat() if start_time else None
end_time_str = end_time.isoformat() if end_time else None
price_hist = retry_n_times(connection.get_spot_price_history, 3, 'get_spot_price_history',
start_time=start_time_str, end_time=end_time_str,
instance_type=instance_type, product_description=product_description,
availability_zone=availability_zone)
for ph in price_hist:
ph.timestamp = ts = datetime.datetime.strptime(ph.timestamp, '%Y-%m-%dT%H:%M:%S.000Z')
key = '^'.join([ph.instance_type,ph.availability_zone])
if key not in start_times or ts < start_times[key]:
start_times[key] = ts
price_hist.sort(key=lambda ph: ph.timestamp )
result_set = price_hist + result_set
if not price_hist:
#print 'epricehist term %s records' % len(start_times)
break
if not start_time and not end_time: # just show 1000..
break
if end_time and price_hist[0].timestamp >= end_time: # can't go earlier!
break
if start_time: # verify that all az have been found
if price_hist[0].timestamp <= start_time: # at least one instance time has been resolved
extra_loops += 1
#print 'extra loop %s' % extra_loops
if extra_loops > 20:
# sanity check - don't go too far back
break
for record_start_time in start_times.values():
if record_start_time > start_time: # fail case
break
else: # all resolved successfully
#print 'rc term %s records' % len(start_times)
break
end_time = price_hist[0].timestamp
return result_set
def request_spot_instances(connection, max_price, ami, ebs_boot = False, num=1,
groups=['default'], key_name='team', zone='us-east-1a', type='m1.small'):
"""
Returns List of Spot requests
Price is a string, e.g. '0.08' for $0.08
"""
my_ami = get_ami(ami, zone, ebs_boot, type)
if not my_ami:
my_ami = ami
spot_requests = connection.request_spot_instances(image_id = my_ami, price=max_price,
security_groups = groups, count=num, instance_type=type,
placement = zone,key_name=key_name)
return spot_requests
def cancel_spot_requests(connection, spot_request_ids):
"""Terminate spot requests"""
if not spot_request_ids:
return
if not hasattr(spot_request_ids, '__iter__'):
spot_request_ids = [spot_request_ids]
return retry_n_times(connection.cancel_spot_instance_requests, 3, 'cancel_spot_requests',
request_ids = spot_request_ids)
def get_reserved_instances(connection, filters = None):
"""Get reserved instances"""
res_instances = retry_n_times(connection.get_all_reserved_instances, 3, 'get_reserved_instances', filters = filters)
return res_instances
def terminate_instances(connection, instance_ids):
if not hasattr(instance_ids, '__iter__'):
instance_ids = [instance_ids]
return retry_n_times(connection.terminate_instances, 3, 'terminate_instances',
instance_ids = instance_ids)
def set_instance_or_req_tags(connection, ids, tag_dict):
"""Set the tag of an instance(s) or spot request(s) with given ids
tag_dict - maps tags to values
"""
if not hasattr(ids, '__iter__'):
ids = [ids]
return retry_n_times(connection.create_tags, 9, 'set_instance_or_req_tags',
resource_ids = ids, tags = tag_dict)
tin = terminate_instances
describe_instances = get_instances
din = describe_instances
"""
Generic ebs volume management with boto
"""
def wait_for_status(ec2obj, status, num_polls=10, sleep_time=0.5,
do_raise=True):
"""Waits until ec2obj.status (or ec2obj.state) becomes *status*. Expects a
boto ec2 object with a status (or state) attribute and an update() method.
"""
field = None
if hasattr(ec2obj, 'status'):
field = 'status'
elif hasattr(ec2obj, 'state'):
field = 'state'
else:
raise Exception('ec2obj has no status or state attribute')
get_status = operator.attrgetter(field)
tries = 0
while True:
if get_status(ec2obj) == status or tries > num_polls:
break
time.sleep(sleep_time)
ec2obj.update()
tries += 1
if do_raise and get_status(ec2obj) != status:
raise Exception('ec2obj status %s != %s' % (get_status(ec2obj), status))
def get_volumes(connection, volume_ids=None, filters=None):
"""Get all volumes satisfying criteria.
* connection: An ec2 connection instance.
* volume_ids: IDs of volumes to retrieve.
* filters: Additional filtering criteria
Returns a list of volume objects.
"""
volumes = retry_n_times(connection.get_all_volumes, 3, 'get_volumes',
volume_ids=volume_ids, filters=filters)
return volumes
def get_volume(connection, volume_id):
"""Returns an ebs volume with *volume_id*."""
res_set = get_volumes(connection, [volume_id])
if not res_set or len(res_set) != 1:
raise Exception('unexpected result from get_volumes')
volume = res_set.pop()
return volume
def create_volume(connection, size, zone, snapshot=None, block=False,
num_polls=120, sleep_time=1, do_raise=True):
"""Creates an ebs volume.
* connection: An ec2 connection instance.
* size: Size of volume to create in GiB.
* zone: Availability zone in which the volume should be created.
* snapshot: Optional snapshot (or id) from which to create the volume.
* block: If True, waits until the volume has been attached successfully.
* num_polls: Max number of polls to perform while blocking.
* sleep_time: Seconds to wait between polls while blocking.
* do_raise: Raises exception if creation is not successful after block.
Returns the volume object that was created.
"""
volume = connection.create_volume(size, zone, snapshot)
if block:
wait_for_status(volume, 'available', num_polls, sleep_time, do_raise)
return volume
def delete_volume(connection, volume_id=None):
"""Deletes an ebs volume.
* connection: An ec2 connection instance.
* volume_id: ID of volume to delete.
Returns True if deletion is successful.
"""
return connection.delete_volume(volume_id)
def attach_volume(connection, volume_id, instance_id, device, block=False,
num_polls=60, sleep_time=0.5, do_raise=True):
"""Attaches an ebs volume to an ec2 instance.
* connection: An ec2 connection instance.
* volume_id: ID of volume to attach.
* instance_id: ID of instance where volume will be attached.
* device: Device file where volume will be accessible.
* block: If True, waits until the volume has been attached successfully.
* num_polls: Max number of polls to perform while blocking.
* sleep_time: Seconds to wait between polls while blocking.
* do_raise: Raises exception if attachment is not successful after block.
Returns True if successful.
"""
result = connection.attach_volume(volume_id, instance_id, device)
if result and block:
volume = get_volume(connection, volume_id)
wait_for_status(volume, 'in-use', num_polls, sleep_time, do_raise)
return result
def detach_volume(connection, volume_id, instance_id, device, force=False,
block=False, num_polls=120, sleep_time=0.5, do_raise=True):
"""Detaches an ebs volume from an instance.
* connection: An ec2 connection instance.
* volume_id: ID of volume to detach.
* instance_id: ID of instance from which volume will be detached.
* device: Device file where volume is accessible.
* block: If True, waits until the volume has been detached successfully.
* num_polls: Max number of polls to perform while blocking.
* sleep_time: Seconds to wait between polls while blocking.
* do_raise: Raises exception if detachment is not successful after block.
Returns True if successful.
"""
result = connection.detach_volume(volume_id, instance_id, device, force)
if result and block:
volume = get_volume(connection, volume_id)
wait_for_status(volume, 'available', num_polls, sleep_time, do_raise)
return result
def get_volume_tags(connection, volume_id):
"""Returns the tags of an ebs volume."""
volume = get_volume(connection, volume_id)
return volume.tags
def add_volume_tag(connection, volume_id, key, value=''):
"""Adds key/value as a tag to an ebs volume."""
volume = get_volume(connection, volume_id)
return volume.add_tag(key, value)
def remove_volume_tag(connection, volume_id, key, value=None):
"""Removes a tag from an ebs volume."""
volume = get_volume(connection, volume_id)
return volume.remove_tag(key, value)
"""
Generic snapshot management with boto
"""
def get_snapshots(connection, snapshot_ids=None, filters=None):
"""Get all snapshots satisfying criteria.
* connection: An ec2 connection instance.
* snapshot_ids: IDs of snapshots to retrieve.
* filters: Additional filtering criteria
Returns a list of snapshot objects.
"""
snapshots = retry_n_times(connection.get_all_snapshots, 3, 'get_snapshots',
snapshot_ids=snapshot_ids, filters=filters)
return snapshots
def get_snapshot(connection, snapshot_id):
"""Returns a snapshot with *snapshot_id*."""
res_set = get_snapshots(connection, [snapshot_id])
if not res_set or len(res_set) != 1:
raise Exception('unexpected result from get_snapshots')
snapshot = res_set.pop()
return snapshot
def create_snapshot(connection, volume_id, description=None, block=False,
num_polls=720, sleep_time=5, do_raise=True):
"""Creates a snapshot.
* connection: An ec2 connection instance.
* volume_id: ID of the ebs volume which should be snapshotted.
* description: Optional description for the snapshot.
* block: If True, waits until the snapshot creation has finished.
* num_polls: Max number of polls to perform while blocking.
* sleep_time: Seconds to wait between polls while blocking.
* do_raise: Raises exception if creation is not successful after block.
Returns the snapshot object that was created.
"""
snapshot = connection.create_snapshot(volume_id, description)
if block:
wait_for_status(snapshot, 'completed', num_polls, sleep_time, do_raise)
return snapshot
def delete_snapshot(connection, snapshot_id=None):
"""Deletes a snapshot.
* connection: An ec2 connection instance.
* volume_id: ID of snapshot to delete.
Returns True if deletion is successful.
"""
return connection.delete_snapshot(snapshot_id)
def get_snapshot_tags(connection, snapshot_id):
"""Returns the tags of a snapshot."""
snapshot = get_snapshot(connection, snapshot_id)
return snapshot.tags
def add_snapshot_tag(connection, snapshot_id, key, value=''):
"""Adds key/value as a tag to a snapshot."""
snapshot = get_snapshot(connection, snapshot_id)
return snapshot.add_tag(key, value)
def remove_snapshot_tag(connection, snapshot_id, key, value=None):
"""Removes a tag from a snapshot."""
snapshot = get_snapshot(connection, snapshot_id)
return snapshot.remove_tag(key, value)
"""
Useful utilities
"""
def is_ec2_instance(hostname=None):
"""Checks if *hostname* refers to an ec2 instance. If hostname is not
given, assumes the check is for the local machine.
"""
if hostname is None:
hostname = socket.getfqdn()
domain = (hostname.split('.'))[-1]
return domain == 'internal'
|
import re
from setuptools import setup
with open('wumpus/__init__.py') as f:
contents = f.read()
try:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', contents, re.M
).group(1)
except AttributeError:
raise RuntimeError('Could not identify version') from None
# look at this boilerplate code
try:
author = re.search(
r'^__author__\s*=\s*[\'"]([^\'"]*)[\'"]', contents, re.M
).group(1)
except AttributeError:
author = 'jay3332'
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('requirements.txt', encoding='utf-8') as f:
requirements = f.readlines()
setup(
name='wumpus.py',
author=author,
url='https://github.com/jay3332/wumpus.py',
project_urls={
"Issue tracker": "https://github.com/jay3332/wumpus.py/issues",
"Discord": "https://discord.gg/FqtZ6akWpd"
},
version='0.0.0', # version (Reserve 0.1.0 for the finished release)
packages=[
'wumpus',
'wumpus.core',
'wumpus.models',
'wumpus.typings'
],
license='MIT',
description="An asynchronous wrapper around Discord's API.",
long_description=readme,
long_description_content_type="text/markdown",
include_package_data=True,
install_requires=requirements,
extras_require={
'docs': [
'sphinx>=4.1.1',
'furo',
],
'performance': [
'orjson>=1.3.0'
]
},
python_requires='>=3.8.0',
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Internet',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
)
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class Recurso(models.Model):
id = models.AutoField(blank=True, null=True)
descricao = models.CharField(max_length=255, blank=True, null=True)
tipo_laboratorio = models.CharField(max_length=255, blank=True, null=True)
numero = models.IntegerField(blank=True, null=True)
tipo_recurso = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'Recurso'
class Reserva(models.Model):
data_hora_chegada = models.DateTimeField(blank=True, null=True)
data_hora_saida = models.DateTimeField(blank=True, null=True)
disciplina = models.CharField(max_length=255, blank=True, null=True)
justificativa = models.CharField(max_length=255, blank=True, null=True)
observacao = models.CharField(max_length=255, blank=True, null=True)
curso = models.CharField(max_length=255, blank=True, null=True)
nome_professor = models.CharField(max_length=255, blank=True, null=True)
turno = models.CharField(max_length=255, blank=True, null=True)
primeira_aula = models.BooleanField(blank=True, null=True)
segunda_aula = models.BooleanField(blank=True, null=True)
confirmacao = models.BooleanField(blank=True, null=True)
id_recurso = models.IntegerField(blank=True, null=True)
situacao = models.CharField(max_length=255, blank=True, null=True)
tipo_recurso = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'Reserva'
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) 2015
# Gmail:liuzheng712
#
from ansible import playbook, callbacks
import tornado.web
import tornado.ioloop
import tornado.websocket
import os, sys
import subprocess
import time
import StringIO
import json
import tornado.escape
class Index(tornado.web.RequestHandler):
def get(self):
self.render('./static/index.html')
def run_playbook(playbook_path, hosts_path):
stats = callbacks.AggregateStats()
playbook_cb = callbacks.PlaybookCallbacks(verbose=0)
runner_cb = callbacks.PlaybookRunnerCallbacks(stats, verbose=0)
playbook.PlayBook(
playbook=playbook_path,
host_list=hosts_path,
stats=stats,
forks=4,
callbacks=playbook_cb,
runner_callbacks=runner_cb,
).run()
return stats
class ChatSocketHandler(tornado.websocket.WebSocketHandler):
waiters = set()
cache = []
cache_size = 300
def allow_draft76(self):
# for iOS 5.0 Safari
return True
def check_origin(self, origin):
return True
def open(self):
print "Chat WebSocket Open"
ChatSocketHandler.waiters.add(self)
def on_close(self):
print "Chat WebSocket Clost"
ChatSocketHandler.waiters.remove(self)
def on_message(self, msg):
print "on_message "
# msg = tornado.escape.json_decode(msg)
chat = {
# "id": str(uuid.uuid4()),
"time": time.mktime(time.localtime()),
"msg": msg
}
for waiter in ChatSocketHandler.waiters:
waiter.write_message(chat)
class PLSocketHandler(tornado.websocket.WebSocketHandler):
def allow_draft76(self):
# for iOS 5.0 Safari
return True
def check_origin(self, origin):
return True
def open(self):
print "Playbook WebSocket Open"
def on_close(self):
print "Playbook WebSocket Close"
def on_message(self, e):
command = e
print command
if command.strip() == "ping -c 4 localhost":
# stdout = sys.stdout
# sys.stdout = file = StringIO.StringIO()
# self.write_message(file.getvalue())
# for nextline in iter(child.stdout.readline, b''):
# self.write_message("[ "+time.strftime('%Y-%m-%d %H:%M:%S',time.localtime())+"] "+nextline)
# child.stdout.close()
# child.wait()
child = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
while True:
nextline = child.stdout.readline()
if nextline.strip() == "" and child.poll() != None:
break
self.write_message("[ " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + "] " + nextline)
elif command.strip() == "ansible-playbook -i hosts test.yml":
callbacks.AggregateStats.compute = self.compute
stats = run_playbook(
playbook_path='./test.yml',
hosts_path='./hosts',
)
else:
self.write_message("Input Error")
def compute(self, runner_results, setup=False, poll=False, ignore_errors=False):
''' walk through all results and increment stats '''
for (host, value) in runner_results.get('contacted', {}).iteritems():
# if not ignore_errors and (('failed' in value and bool(value['failed'])) or
# ('failed_when_result' in value and [value['failed_when_result']] or ['rc' in value and value['rc'] != 0])[0]):
# callbacks.AggregateStats()._increment('failures', host)
# elif 'skipped' in value and bool(value['skipped']):
# callbacks.AggregateStats()._increment('skipped', host)
# elif 'changed' in value and bool(value['changed']):
# if not setup and not poll:
# callbacks.AggregateStats()._increment('changed', host)
# callbacks.AggregateStats()._increment('ok', host)
# else:
# if not poll or ('finished' in value and bool(value['finished'])):
# callbacks.AggregateStats()._increment('ok', host)
global lazy_out
lazy_out = runner_results
self.write_message("liuzheng: " + json.dumps(host) + " => " + json.dumps(value))
# for (host, value) in runner_results.get('dark', {}).iteritems():
# callbacks.AggregateStats()._increment('dark', host)
if __name__ == '__main__':
app = tornado.web.Application([
('/playbook', PLSocketHandler),
('/chat', ChatSocketHandler),
])
app.listen(8001)
tornado.ioloop.IOLoop.instance().start()
|
from ..greengraph import Greengraph
from ..map import Map
|
from typing import Dict, List, Optional, Tuple
import symro.src.mat as mat
from symro.src.prob.problem import Problem, BaseProblem
import symro.src.handlers.metaentitybuilder as eb
DEFAULT_MP_SYMBOL = "Master"
DEFAULT_PRIMAL_SP_SYMBOL = "Primal"
DEFAULT_FBL_SP_SYMBOL = "Feasibility"
CUT_COUNT_PARAM_SYMBOL = "CUT_COUNT"
CUTS_SET_SYMBOL = "CUTS"
IS_FEASIBLE_PARAM_SYMBOL = "is_feasible"
STORED_OBJ_PARAM_SYMBOL = "obj_stored"
ETA_VAR_SYMBOL = "eta"
MASTER_OBJ_SYMBOL = "MASTER_OBJ"
PRIMAL_OBJ_SYMBOL = "PRIMAL_OBJ"
FBL_OBJ_SYMBOL = "FBL_OBJ"
OPT_CUT_CON_SYMBOL = "GBD_OPT_CUT"
FBL_CUT_CON_SYMBOL = "GBD_FBL_CUT"
CAN_INT_CUT_CON_SYMBOL = "GBD_CANON_INT_CUT"
class GBDSubproblemContainer:
def __init__(self,
primal_sp: Optional[BaseProblem],
fbl_sp: Optional[BaseProblem],
sp_index: Optional[mat.Element]):
self.primal_sp: Optional[BaseProblem] = primal_sp
self.fbl_sp: Optional[BaseProblem] = fbl_sp
self.sp_index: Optional[mat.Element] = sp_index
self.comp_var_idx_sets: Dict[str, Optional[mat.IndexingSet]] = {}
self.mixed_comp_con_idx_set: Dict[str, Optional[mat.IndexingSet]] = {}
def get_primal_meta_obj(self) -> mat.MetaObjective:
return self.primal_sp.model_meta_objs[0]
def get_fbl_meta_obj(self) -> mat.MetaObjective:
return self.fbl_sp.model_meta_objs[0]
class GBDProblem(Problem):
def __init__(self,
problem: Problem,
mp_symbol: str,
default_primal_sp_symbol: str,
default_fbl_sp_symbol: str,
primal_sp_obj_sym: str,
working_dir_path: str = None):
super(GBDProblem, self).__init__(symbol=None,
description=problem.description,
working_dir_path=working_dir_path)
Problem.copy(problem, self)
# --- Name ---
self.symbol = problem.symbol + ".gbd"
# --- Script ---
self.compound_script.included_scripts.clear()
# --- Symbols ---
self.mp_symbol: str = mp_symbol if mp_symbol is not None else DEFAULT_MP_SYMBOL
self.mp_symbol = self.generate_unique_symbol(self.mp_symbol)
self.default_primal_sp_sym: str = default_primal_sp_symbol if default_primal_sp_symbol is not None \
else DEFAULT_PRIMAL_SP_SYMBOL
self.default_primal_sp_sym = self.generate_unique_symbol(self.default_primal_sp_sym)
self.default_fbl_sp_sym: str = default_fbl_sp_symbol if default_fbl_sp_symbol is not None \
else DEFAULT_FBL_SP_SYMBOL
self.default_fbl_sp_sym = self.generate_unique_symbol(self.default_fbl_sp_sym)
self.cuts_unb_sym: str = "ct"
self.cuts_sym: str = self.generate_unique_symbol(CUTS_SET_SYMBOL)
self.cut_count_sym = self.generate_unique_symbol(CUT_COUNT_PARAM_SYMBOL)
self.is_feasible_sym = self.generate_unique_symbol(IS_FEASIBLE_PARAM_SYMBOL)
self.stored_obj_sym = self.generate_unique_symbol(STORED_OBJ_PARAM_SYMBOL)
self.eta_sym = self.generate_unique_symbol(ETA_VAR_SYMBOL)
self.mp_obj_sym: str = self.generate_unique_symbol(MASTER_OBJ_SYMBOL)
self.primal_sp_obj_sym: str = primal_sp_obj_sym
self.default_fbl_sp_obj_sym: str = self.generate_unique_symbol(FBL_OBJ_SYMBOL)
self.opt_cut_con_sym = self.generate_unique_symbol(OPT_CUT_CON_SYMBOL)
self.fbl_cut_con_sym = self.generate_unique_symbol(FBL_CUT_CON_SYMBOL)
self.can_int_cut_con_sym = self.generate_unique_symbol(CAN_INT_CUT_CON_SYMBOL)
# --- Algorithm Meta-Entities ---
# Meta-Sets
self.idx_meta_sets: Dict[str, mat.MetaSet] = {}
self.cuts: Optional[mat.MetaSet] = None
# Meta-Parameters
self.cut_count: Optional[mat.MetaParameter] = None
self.is_feasible: Optional[mat.MetaParameter] = None
self.stored_obj: Optional[mat.MetaParameter] = None
self.stored_comp_decisions: Dict[str, mat.MetaParameter] = {}
self.duality_multipliers: Dict[int, mat.MetaParameter] = {}
# Meta-Variables
self.comp_meta_vars: Dict[str, mat.MetaVariable] = {}
self.eta: Optional[mat.MetaVariable] = None
self.slack_vars: Dict[str, mat.MetaVariable] = {}
self.aux_f_meta_var: Optional[mat.MetaVariable] = None
self.aux_g_meta_vars: Optional[Dict[int, mat.MetaVariable]] = {}
# Meta-Objectives
self.master_obj: Optional[mat.MetaObjective] = None
self.primal_sp_objs: Dict[str, mat.MetaObjective] = {} # key: subproblem symbol; value: meta-objective
self.fbl_sp_objs: Dict[str, mat.MetaObjective] = {} # key: subproblem symbol; value: meta-objective
# Meta-Constraints
self.pure_comp_cons: Dict[str, mat.MetaConstraint] = {}
self.mixed_comp_cons: Dict[str, mat.MetaConstraint] = {}
self.non_comp_cons: Dict[str, mat.MetaConstraint] = {}
self.origin_to_std_con_map: Optional[Dict[str, List[mat.MetaConstraint]]] = None
self.std_to_sl_map: Dict[str, Tuple[List[mat.MetaVariable], mat.MetaConstraint]] = {}
self.sl_fbl_cons: Dict[str, mat.MetaConstraint] = {}
self.aux_f_meta_con: Optional[mat.MetaConstraint] = None
self.aux_g_meta_cons: Optional[Dict[int, mat.MetaConstraint]] = {}
self.gbd_cuts: Dict[str, mat.MetaConstraint] = {}
# Problems
self.primal_sps: List[BaseProblem] = []
self.fbl_sps: List[BaseProblem] = []
self.sp_containers: List[GBDSubproblemContainer] = []
self.mp: Optional[BaseProblem] = None
def build_mp_constructs(self, init_lb: float):
self.cut_count = eb.build_meta_param(
problem=self,
symbol=self.cut_count_sym,
default_value=0)
self.add_meta_parameter(self.cut_count, is_auxiliary=True)
self.cuts_unb_sym = self.generate_unique_symbol("ct")
ord_set_node = mat.OrderedSetNode(start_node=mat.NumericNode(value=1),
end_node=mat.DeclaredEntityNode(self.cut_count_sym,
type=mat.PARAM_TYPE))
self.cuts = eb.build_meta_set(
problem=self,
symbol=self.cuts_sym,
dimension=1,
dummy_symbols=[self.cuts_unb_sym],
reduced_dummy_symbols=[self.cuts_unb_sym],
defined_value_node=ord_set_node)
self.add_meta_set(self.cuts, is_auxiliary=True)
self.is_feasible = eb.build_meta_param(
problem=self,
symbol=self.is_feasible_sym,
idx_meta_sets=[self.cuts],
default_value=0)
self.add_meta_parameter(self.is_feasible, is_auxiliary=True)
self.stored_obj = eb.build_meta_param(
problem=self,
symbol=self.stored_obj_sym,
idx_meta_sets=[self.cuts],
default_value=0)
self.add_meta_parameter(self.stored_obj, is_auxiliary=True)
# Meta-Variables
self.eta = eb.build_meta_var(
problem=self,
symbol=self.eta_sym,
lower_bound=init_lb)
self.add_meta_variable(self.eta, is_auxiliary=True)
def get_idx_meta_sets(self) -> List[mat.MetaSet]:
return [ms for ms in self.idx_meta_sets.values()]
def get_comp_var_syms(self) -> List[str]:
return [mv.symbol for mv in self.comp_meta_vars.values()]
|
from pyrfuniverse.envs import NailCardEnv
import numpy as np
from stable_baselines3.common.env_checker import check_env
import gym
env = NailCardEnv(
rotation_factor=0,
goal_baseline=0.02,
)
# Check environment
# check_env(env)
# exit()
env.reset()
for i in range(10):
action = np.array([0, -1, 0, 1])
obs, reward, done, info = env.step(action)
# for i in range(10):
# env._step()
for i in range(2):
action = np.array([0, 0, -1, 0])
obs, reward, done, info = env.step(action)
# for i in range(3):
# env._step()
for i in range(5):
action = np.array([0, 1, -0.1, 0])
obs, reward, done, info = env.step(action)
# for i in range(10):
# env._step()
if info['is_success'] > 0:
print('Success')
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import random
import unittest
from collections import OrderedDict
from typing import Dict
import numpy as np
from test_util import GenArgList
import oneflow.compatible.single_client.unittest
from oneflow.compatible import single_client as flow
from oneflow.compatible.single_client import typing as tp
def _compare_elu_with_np(
input_shape, alpha, device_type, value_type, machine_ids, device_counts
):
if value_type[1] == flow.float16:
input_1 = np.random.uniform(-1, 1, size=input_shape).astype(np.float16)
input_1 = np.array(input_1, dtype=value_type[0])
else:
input_1 = np.random.uniform(-1, 1, size=input_shape).astype(value_type[0])
assert device_type in ["cpu", "gpu"]
flow.clear_default_session()
if device_type == "cpu":
flow.config.cpu_device_num(device_counts)
else:
flow.config.gpu_device_num(device_counts)
func_config = flow.FunctionConfig()
func_config.default_placement_scope(flow.scope.placement(device_type, machine_ids))
if value_type[1] == flow.float16:
func_config.default_data_type(flow.float32)
else:
func_config.default_data_type(value_type[1])
def np_elu(input, alpha):
elem_cnt = input.size
init_shape = input.shape
input = input.flatten()
out = np.zeros_like(input)
for i in range(elem_cnt):
if input[i] > 0:
out[i] = input[i]
else:
out[i] = alpha * (np.exp(input[i]) - 1)
out = np.reshape(out, init_shape)
return np.array(out).astype(value_type[0])
np_out_elu = np_elu(input_1, alpha)
def np_diff(input, alpha):
input_shape = input.shape
input = input.flatten()
elem_cnt = input.size
diff = np.zeros(shape=(elem_cnt,))
for i in range(elem_cnt):
if input[i] > 0:
diff[i] = 1
else:
diff[i] = alpha * np.exp(input[i])
diff = np.reshape(diff, newshape=input_shape)
diff = np.array(diff, dtype=value_type[0])
return diff
_np_grad = np_diff(input_1, alpha)
def assert_prediction_grad(blob: tp.Numpy):
if value_type[1] == flow.float16:
assert np.allclose(blob, _np_grad, atol=0.001)
else:
assert np.allclose(blob, _np_grad, atol=1e-05)
if value_type[1] == flow.float16:
@flow.global_function(type="train", function_config=func_config)
def oneflow_elu(
of_input_1: tp.Numpy.Placeholder(shape=input_1.shape, dtype=flow.float32)
) -> tp.Numpy:
with flow.scope.placement(device_type, "0:0"):
v = flow.get_variable(
shape=input_1.shape,
dtype=flow.float32,
initializer=flow.zeros_initializer(),
name="x_var",
)
x_var = of_input_1 + v
x_f16 = flow.cast(x_var, flow.float16)
of_elu_out_f16 = flow.nn.elu(x_f16, alpha)
of_elu_out_f32 = flow.cast(of_elu_out_f16, flow.float32)
with flow.scope.placement(device_type, "0:0"):
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [0.001]), momentum=0
).minimize(of_elu_out_f32)
flow.watch_diff(x_var, assert_prediction_grad)
return of_elu_out_f32
else:
@flow.global_function(type="train", function_config=func_config)
def oneflow_elu(
of_input_1: tp.Numpy.Placeholder(shape=input_1.shape, dtype=value_type[1])
) -> tp.Numpy:
with flow.scope.placement(device_type, "0:0"):
v = flow.get_variable(
shape=input_1.shape,
dtype=value_type[1],
initializer=flow.zeros_initializer(),
name="x_var",
)
x_var = of_input_1 + v
flow.watch_diff(x_var, assert_prediction_grad)
of_elu_out = flow.nn.elu(x_var, alpha)
with flow.scope.placement(device_type, "0:0"):
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [0.001]), momentum=0
).minimize(of_elu_out)
return of_elu_out
of_out_elu = oneflow_elu(input_1)
if value_type[1] == flow.float16:
assert np.allclose(of_out_elu, np_out_elu, atol=0.001)
else:
assert np.allclose(of_out_elu, np_out_elu, atol=1e-05)
def _gen_arg_dict(shape, alpha, device_type, value_type, machine_ids, device_counts):
arg_dict = OrderedDict()
arg_dict["input_shape"] = [shape]
arg_dict["alpha"] = [alpha]
arg_dict["device_type"] = [device_type]
if value_type == "float" and device_type == "cpu":
arg_dict["value_type"] = [
(np.float32, flow.float32),
(np.float64, flow.float64),
]
else:
arg_dict["value_type"] = [
(np.float32, flow.float16),
(np.float32, flow.float32),
(np.float64, flow.float64),
]
arg_dict["machine_ids"] = [machine_ids]
arg_dict["device_counts"] = [device_counts]
return arg_dict
@flow.unittest.skip_unless_1n1d()
class Testelu1n1d(flow.unittest.TestCase):
def test_elu_cpu(test_case):
arg_dict = _gen_arg_dict(
shape=(3, 3),
alpha=1.0,
device_type="cpu",
value_type="float",
machine_ids="0:0",
device_counts=1,
)
for arg in GenArgList(arg_dict):
_compare_elu_with_np(*arg)
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_elu_gpu(test_case):
arg_dict = _gen_arg_dict(
shape=(4, 4),
alpha=2.0,
device_type="gpu",
value_type="float",
machine_ids="0:0",
device_counts=1,
)
for arg in GenArgList(arg_dict):
_compare_elu_with_np(*arg)
@flow.unittest.skip_unless_1n2d()
class Testelu1n2d(flow.unittest.TestCase):
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_elu_gpu_1n2d(test_case):
arg_dict = _gen_arg_dict(
shape=(4, 8, 4),
alpha=1.0,
device_type="gpu",
value_type="float",
machine_ids="0:0-1",
device_counts=2,
)
for arg in GenArgList(arg_dict):
_compare_elu_with_np(*arg)
if __name__ == "__main__":
unittest.main()
|
"""
Spotify OAuth integration
"""
from base64 import b64encode
from datetime import datetime
from typing import Dict
from typing import Optional
from typing import Tuple
from urllib.parse import urlencode
from fastapi import Depends
from httpx import HTTPError
from httpx import Response
from app.extensions import http_client
from app.models.db.user import AuthAccount
from app.models.db.user import AuthProvider
from app.services.auth.base import OAuthRoute
from app.services.auth.base import bearer_auth
from app.settings import SPOTIFY_ID
from app.settings import SPOTIFY_REDIRECT_URI
from app.settings import SPOTIFY_SCOPE
from app.settings import SPOTIFY_SECRET
from app.utils.exceptions import UnauthorizedError
class SpotifyAuth(OAuthRoute):
"""Spotify auth integration"""
provider = AuthProvider.SPOTIFY
auth_endpoint = "https://accounts.spotify.com/api/token"
account_endpoint = "https://api.spotify.com/v1/me/"
sign_in_endpoint = "https://accounts.spotify.com/authorize"
async def code_auth(self, code: str) -> Tuple[str, str, int]:
authorization = b64encode(
f"{SPOTIFY_ID}:{SPOTIFY_SECRET}".encode("utf-8")
).decode("utf-8")
headers: Dict[str, str] = {"Authorization": f"Basic {authorization}"}
data: Dict[str, str] = {
"redirect_uri": SPOTIFY_REDIRECT_URI,
"code": code,
"grant_type": "authorization_code",
}
response: Response = await http_client.post(
url=self.auth_endpoint, data=data, headers=headers
)
try:
response.raise_for_status()
except HTTPError:
raise UnauthorizedError
auth_data = response.json()
now_seconds = int(datetime.utcnow().timestamp())
access_token: str = auth_data["access_token"]
refresh_token: str = auth_data["refresh_token"]
expires: int = now_seconds + int(auth_data["expires_in"])
return access_token, refresh_token, expires
async def get_account_info(self, access_token: str) -> Dict[str, str]:
headers = {"Authorization": f"Bearer {access_token}"}
response = await http_client.get(url=self.account_endpoint, headers=headers)
try:
response.raise_for_status()
except HTTPError:
raise UnauthorizedError
profile_info = response.json()
profile_image: str = profile_info["images"][-1]["url"] if profile_info.get(
"images"
) else ""
formatted_data = {
"_id": str(profile_info.get("id")),
"name": profile_info.get("display_name"),
"image": profile_image,
"url": profile_info.get("external_urls", {}).get("spotify"),
}
return formatted_data
async def create_auth_link(self) -> str:
params: Dict[str, str] = {
"response_type": "code",
"client_id": SPOTIFY_ID,
"scope": SPOTIFY_SCOPE,
"redirect_uri": SPOTIFY_REDIRECT_URI,
}
query: str = urlencode(params)
url: str = f"{self.sign_in_endpoint}?{query}"
return url
async def spotify_auth(
user_id: Optional[str] = Depends(bearer_auth),
) -> str:
"""Spotify auth dependence"""
if not user_id:
raise UnauthorizedError
auth_account: Optional[AuthAccount] = await AuthAccount.filter(
user_id=user_id, provider=AuthProvider.SPOTIFY
).first()
if not auth_account:
raise UnauthorizedError
now_seconds = int(datetime.utcnow().timestamp())
if auth_account.expires < now_seconds:
access_token, refresh_token, expires = await refresh_spotify_token(
refresh_token=auth_account.refresh_token
)
auth_account.access_token = access_token # type: ignore
auth_account.refresh_token = refresh_token # type: ignore
auth_account.expires = expires # type: ignore
await auth_account.save()
return auth_account.access_token
async def refresh_spotify_token(refresh_token: str) -> Tuple[str, str, int]:
"""
Exchange refresh token to new access token
:param refresh_token: spotify user's refresh token
:return: access token, refresh token, when token expires in timestamp
"""
refresh_url: str = "https://accounts.spotify.com/api/token"
data: Dict[str, str] = {
"grant_type": "refresh_token",
"refresh_token": refresh_token,
}
authorization = b64encode(f"{SPOTIFY_ID}:{SPOTIFY_SECRET}".encode("utf-8")).decode(
"utf-8"
)
headers: Dict[str, str] = {"Authorization": f"Basic {authorization}"}
response: Response = await http_client.post(
url=refresh_url, data=data, headers=headers
)
try:
response.raise_for_status()
except HTTPError:
raise UnauthorizedError
response_data = response.json()
now_seconds = int(datetime.utcnow().timestamp())
access_token: str = response_data["access_token"]
expires: int = now_seconds + int(response_data["expires_in"])
return access_token, refresh_token, expires
|
# Copyright 2019, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Federated Stack Overflow next word prediction library using TFF."""
import functools
import tensorflow as tf
import tensorflow_federated as tff
from optimization.shared import keras_metrics
from optimization.shared import training_specs
from utils import training_utils
from utils.datasets import stackoverflow_word_prediction
from utils.models import stackoverflow_models
def configure_training(
task_spec: training_specs.TaskSpec,
vocab_size: int = 10000,
num_oov_buckets: int = 1,
sequence_length: int = 20,
max_elements_per_user: int = 1000,
num_validation_examples: int = 10000,
embedding_size: int = 96,
latent_size: int = 670,
num_layers: int = 1,
shared_embedding: bool = False) -> training_specs.RunnerSpec:
"""Configures training for Stack Overflow next-word prediction.
This method will load and pre-process datasets and construct a model used for
the task. It then uses `iterative_process_builder` to create an iterative
process compatible with `federated_research.utils.training_loop`.
Args:
task_spec: A `TaskSpec` class for creating federated training tasks.
vocab_size: Integer dictating the number of most frequent words to use in
the vocabulary.
num_oov_buckets: The number of out-of-vocabulary buckets to use.
sequence_length: The maximum number of words to take for each sequence.
max_elements_per_user: The maximum number of elements processed for each
client's dataset.
num_validation_examples: The number of test examples to use for validation.
embedding_size: The dimension of the word embedding layer.
latent_size: The dimension of the latent units in the recurrent layers.
num_layers: The number of stacked recurrent layers to use.
shared_embedding: Boolean indicating whether to tie input and output
embeddings.
Returns:
A `RunnerSpec` containing attributes used for running the newly created
federated task.
"""
model_builder = functools.partial(
stackoverflow_models.create_recurrent_model,
vocab_size=vocab_size,
num_oov_buckets=num_oov_buckets,
embedding_size=embedding_size,
latent_size=latent_size,
num_layers=num_layers,
shared_embedding=shared_embedding)
loss_builder = functools.partial(
tf.keras.losses.SparseCategoricalCrossentropy, from_logits=True)
special_tokens = stackoverflow_word_prediction.get_special_tokens(
vocab_size, num_oov_buckets)
pad_token = special_tokens.pad
oov_tokens = special_tokens.oov
eos_token = special_tokens.eos
def metrics_builder():
return [
keras_metrics.MaskedCategoricalAccuracy(
name='accuracy_with_oov', masked_tokens=[pad_token]),
keras_metrics.MaskedCategoricalAccuracy(
name='accuracy_no_oov', masked_tokens=[pad_token] + oov_tokens),
# Notice BOS never appears in ground truth.
keras_metrics.MaskedCategoricalAccuracy(
name='accuracy_no_oov_or_eos',
masked_tokens=[pad_token, eos_token] + oov_tokens),
keras_metrics.NumBatchesCounter(),
keras_metrics.NumTokensCounter(masked_tokens=[pad_token])
]
train_clientdata, _, _ = tff.simulation.datasets.stackoverflow.load_data()
# TODO(b/161914546): consider moving evaluation to use
# `tff.learning.build_federated_evaluation` to get metrics over client
# distributions, as well as the example weight means from this centralized
# evaluation.
_, validation_dataset, test_dataset = stackoverflow_word_prediction.get_centralized_datasets(
vocab_size=vocab_size,
max_sequence_length=sequence_length,
num_validation_examples=num_validation_examples,
num_oov_buckets=num_oov_buckets)
train_dataset_preprocess_comp = stackoverflow_word_prediction.create_preprocess_fn(
vocab=stackoverflow_word_prediction.create_vocab(vocab_size),
num_oov_buckets=num_oov_buckets,
client_batch_size=task_spec.client_batch_size,
client_epochs_per_round=task_spec.client_epochs_per_round,
max_sequence_length=sequence_length,
max_elements_per_client=max_elements_per_user)
input_spec = train_dataset_preprocess_comp.type_signature.result.element
def tff_model_fn() -> tff.learning.Model:
return tff.learning.from_keras_model(
keras_model=model_builder(),
input_spec=input_spec,
loss=loss_builder(),
metrics=metrics_builder())
iterative_process = task_spec.iterative_process_builder(tff_model_fn)
if hasattr(train_clientdata, 'dataset_computation'):
@tff.tf_computation(tf.string)
def train_dataset_computation(client_id):
client_train_data = train_clientdata.dataset_computation(client_id)
return train_dataset_preprocess_comp(client_train_data)
training_process = tff.simulation.compose_dataset_computation_with_iterative_process(
train_dataset_computation, iterative_process)
client_ids_fn = training_utils.build_sample_fn(
train_clientdata.client_ids,
size=task_spec.clients_per_round,
replace=False,
random_seed=task_spec.client_datasets_random_seed)
# We convert the output to a list (instead of an np.ndarray) so that it can
# be used as input to the iterative process.
client_sampling_fn = lambda x: list(client_ids_fn(x))
else:
training_process = tff.simulation.compose_dataset_computation_with_iterative_process(
train_dataset_preprocess_comp, iterative_process)
client_sampling_fn = training_utils.build_client_datasets_fn(
dataset=train_clientdata,
clients_per_round=task_spec.clients_per_round,
random_seed=task_spec.client_datasets_random_seed)
training_process.get_model_weights = iterative_process.get_model_weights
evaluate_fn = training_utils.build_centralized_evaluate_fn(
model_builder=model_builder,
eval_dataset=validation_dataset,
loss_builder=loss_builder,
metrics_builder=metrics_builder)
validation_fn = lambda model_weights, round_num: evaluate_fn(model_weights)
test_fn = training_utils.build_centralized_evaluate_fn(
model_builder=model_builder,
# Use both val and test for symmetry with other experiments, which
# evaluate on the entire test set.
eval_dataset=validation_dataset.concatenate(test_dataset),
loss_builder=loss_builder,
metrics_builder=metrics_builder)
return training_specs.RunnerSpec(
iterative_process=training_process,
client_datasets_fn=client_sampling_fn,
validation_fn=validation_fn,
test_fn=test_fn)
|
# Copyright 2020 Sebastian Ahmed
# This file, and derivatives thereof are licensed under the Apache License, Version 2.0 (the "License");
# Use of this file means you agree to the terms and conditions of the license and are in full compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESSED OR IMPLIED.
# See the License for the specific language governing permissions and limitations under the License.
import importlib
import adventure_pkg.modules.Utils as utils
import game
from adventure_pkg.modules.TestClasses import GameConfig,PlayScript
def main():
results={}
levels = utils.readLevelJSON()
for level in levels.keys():
print(f"Loading level:{level}")
full_level_name = "adventure_pkg.levels." + level
levelObject = importlib.import_module(full_level_name).level
results[levelObject.name]='No script found'
if levelObject.testScript:
config = GameConfig(levelName=level,disableDamage=False)
scriptObj=PlayScript(levelObject.testScript,config)
#print(scriptObj._script)
flag = game.main(guiEnable=False,scriptedMode=True,scriptObj=scriptObj,configObj=config)
print(f"Test result = {flag}")
results[levelObject.name]=flag
print("Test Results Summary:")
print("=====================")
for level,result in results.items():
print(f"Level: {level} : Pass={result}")
if __name__ == '__main__':
main()
|
import numpy as np
import tensorflow as tf
import cv2
class Waifu2x:
__MODEL_PATH__ = "./GUI//src/saved_model/Waifu2x"
def __init__(self, sess):
if sess is None:
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.2
self.sess = tf.Session(config=config)
else:
self.sess = sess
self.__build_model()
self.__zero_init()
def __build_model(self):
self.__model = tf.contrib.saved_model.load_keras_model(self.__MODEL_PATH__)
def __zero_init(self):
feed = np.zeros(shape=(1, 512, 512, 3))
self.__model.predict(feed)
def __image_preprocessing(self, image):
x, y, _ = image.shape
image = cv2.resize(image, dsize=(2 * y, 2 * x))
return np.array([image]) / 255.0
def __image_postprocessing(self, image):
image = np.clip(image[0], 0, 1) * 255
return image.astype(np.uint8)
def upscale(self, image):
image = self.__image_preprocessing(image=image)
image = self.__model.predict(image)
image = self.__model.predict(image)
return self.__image_postprocessing(image=image)
|
"""Dummy test."""
def test_dummy():
"""Dummy."""
assert 1 + 1 == 2
|
# Initialize an instance of the following class. Use a variable to store the object and then call the info function to print out the attributes.
class Dog(object):
def __init__(self, name, height, weight, breed):
self.name = name
self.height = height
self.weight = weight
self.breed = breed
def info(self):
print("Name:", self.name)
print("Weight:", str(self.weight) + " Pounds")
print("Height:", str(self.height) + " Inches")
print("Breed:", self.breed)
|
#!/usr/bin/env python
# found on <http://files.majorsilence.com/rubbish/pygtk-book/pygtk-notebook-html/pygtk-notebook-latest.html#SECTION00430000000000000000>
# simple example of a tray icon application using PyGTK
import gtk
def message(data=None):
"Function to display messages to the user."
msg=gtk.MessageDialog(None, gtk.DIALOG_MODAL,
gtk.MESSAGE_INFO, gtk.BUTTONS_OK, data)
msg.run()
msg.destroy()
def open_app(data=None):
message(data)
def close_app(data=None):
message(data)
gtk.main_quit()
def make_menu(event_button, event_time, data=None):
menu = gtk.Menu()
open_item = gtk.MenuItem("Open App")
close_item = gtk.MenuItem("Close App")
#Append the menu items
menu.append(open_item)
menu.append(close_item)
#add callbacks
open_item.connect_object("activate", open_app, "Open App")
close_item.connect_object("activate", close_app, "Close App")
#Show the menu items
open_item.show()
close_item.show()
#Popup the menu
menu.popup(None, None, None, event_button, event_time)
def on_right_click(data, event_button, event_time):
make_menu(event_button, event_time)
def on_left_click(event):
message("Status Icon Left Clicked")
if __name__ == '__main__':
icon = gtk.status_icon_new_from_stock(gtk.STOCK_ABOUT)
icon.connect('popup-menu', on_right_click)
icon.connect('activate', on_left_click)
gtk.main()
|
import os
import configparser
def get_param(section, name):
"""
Возращает значение параметра "name" из секции "section"
Args:
section: имя секции
name: имя параметра
Returns:
значение параметра.
"""
path = os.path.expanduser("~/.mpython_conf")
if not os.path.exists(path):
print("{} not found".format(path))
return
config = configparser.ConfigParser()
config.read(path)
value = config.get(section, name)
return value
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.