text stringlengths 8 6.05M |
|---|
#!/usr/bin/env python3
# Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
'''
This script finds all HTML pages in a folder and downloads all images, replacing
the urls with local ones.
'''
import os, sys, optparse, subprocess, multiprocessing
from os.path import abspath, basename, dirname, join
SWARM_PATH = dirname(abspath(__file__))
CLIENT_PATH = dirname(dirname(SWARM_PATH))
CLIENT_TOOLS_PATH = join(CLIENT_PATH, 'tools')
# Add the client tools directory so we can find htmlconverter.py.
sys.path.append(CLIENT_TOOLS_PATH)
import htmlconverter
converter = CLIENT_TOOLS_PATH + '/htmlconverter.py'
# This has to be a top level function to use with multiprocessing
def convertImgs(infile):
global options
try:
htmlconverter.convertForOffline(infile,
infile,
verbose=options.verbose,
encode_images=options.inline_images)
print('Converted ' + infile)
except BaseException as e:
print('Caught error: %s' % e)
def Flags():
""" Constructs a parser for extracting flags from the command line. """
parser = optparse.OptionParser()
parser.add_option(
"--inline_images",
help=("Encode img payloads as data:// URLs rather than local files."),
default=False,
action='store_true')
parser.add_option("--verbose",
help="Print verbose output",
default=False,
action="store_true")
return parser
def main():
global options
parser = Flags()
options, args = parser.parse_args()
print("args: %s" % args)
if len(args) < 1 or 'help' in args[0]:
print('Usage: %s DIRECTORY' % basename(sys.argv[0]))
return 1
dirname = args[0]
print('Searching directory ' + dirname)
files = []
for root, dirs, fnames in os.walk(dirname):
for fname in fnames:
if fname.endswith('.html'):
files.append(join(root, fname))
count = 4 * multiprocessing.cpu_count()
pool = multiprocessing.Pool(processes=count)
# Note: need a timeout to get keyboard interrupt due to a Python bug
pool.map_async(convertImgs, files).get(3600) # one hour
if __name__ == '__main__':
main()
|
#str 타입 csv으로 변경
csv_values = """
이름, 연락처, 나이, 이메일
철수, "010-1234-4567", 23, "chulsu@gmail.com"
영희, "010-1234-2345", 30, "234@naver.com"
"""
#첫과 마지막 줄을 지워죠.
csv_values = csv_values.strip('\n')
#스티링
csv_list = csv_values.split('\n')
print(csv_list)
# key 값 리스트 만들기
keys = []
for el in csv_list[0].split(','):
keys.append(el.strip(' '))
print(keys)
results = []
for val in csv_list[1:]:
result_dict = {}
i = 0
for el in val.split(','):
result_dict[keys[i]] = el
i += 1
results.append(result_dict)
print(results)
#Set 데이터 구조
#집합
#union 합집합
#different 차집합(-연산자를 사용해도됨.)
#interserction(교집합)
#100의 수 중 3, 5, 15 배수 만들기
set1 = set()
set2 = set()
set3 = set()
for i in range(1, 101):
if i % 3 == 0:
set1.add(i)
if i % 5 == 0:
set2.add(i)
if i % 15 == 0:
set3.add(i)
print(set1)
print(set2)
print(set3)
#List commpise
a = []
for i in range(1,11):
a.append(i**2)
print(a)
a2 = [ i for i in range(1, 10+1) if i % 2 ==0]
print(a2)
# 리스 정보
# .. |
import api.helpers.endpoint_checks as endpoint_checks
from seat.models.token import Token
from django.http import HttpResponseServerError, JsonResponse
def validate_token_success_json_model(exam_id):
return JsonResponse({
'success' : True,
'error' : False,
'exam_id': exam_id
})
def validate_token_failure_json_model(message):
return JsonResponse({
'success' : False,
'error' : True,
'message' : str(message)
})
def validate_token_logic(student, request):
try:
token_set = Token.objects.filter(token=request.POST['token']).all()
if not token_set:
return validate_token_failure_json_model("Invalid token")
token = token_set[0]
if not token.open:
return validate_token_failure_json_model("Token not open anymore")
else:
request.session['token'] = request.POST['token']
return validate_token_success_json_model(token.exam.id)
except Exception as error:
return HttpResponseServerError("Server error")
def validate_token(request):
return endpoint_checks.standard_student_endpoint(
"validate_token",
['token'],
'POST',
request,
validate_token_logic) |
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.db.models.signals import pre_delete, post_save
from django.db import models
import os
def media_upload(instance, filename):
return '{0}/{1}'.format(instance.user.username, filename)
class Post(models.Model):
content = models.TextField(null=True, blank=True)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="post_user")
media = models.FileField(upload_to=media_upload, null=True, blank=True)
like_count = models.PositiveIntegerField(default=0)
def __unicode__(self):
return str(self.user)
def get_absolute_url(self):
return reverse("posts:detail", kwargs={"id": self.id})
def extension(self):
name, extension = os.path.splitext(self.media.name)
return extension[1:]
def liked_by_user(self, user):
if Like.objects.filter(post=self, user=user).exists():
return True
return False
class Meta:
ordering = ["-timestamp","-updated"]
class Like(models.Model):
user=models.ForeignKey(User)
post=models.ForeignKey(Post, on_delete=models.CASCADE)
def __unicode__(self):
return str(self.user)
class Comment(models.Model):
user=models.ForeignKey(User)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
content=models.TextField()
timestamp=models.DateTimeField(auto_now_add=True, auto_now=False)
parent=models.ForeignKey("self", null=True, blank=True)
class Meta:
ordering=["-timestamp"]
def __unicode__(self):
return str(self.user.username)+" - "+self.content[:10]
def children(self):
return Comment.objects.filter(parent=self)
@property
def is_parent(self):
if self.parent is None:
return True
return False
def profile_pic_upload(instance, filename):
return'{0}/{1}/{2}'.format(instance.user.username, 'profile_pictures', filename)
GENDER_CHOICES=(
('male,Male'),
('female','Female'),
('other','Other')
)
RELATIONSHIP_CHOICES=(
('single','Single'),
('married','Married'),
('complicated',"It's complicated" )
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
profile_pic = models.ImageField(upload_to=profile_pic_upload, default='default.png', null=True, blank=True)
gender = models.CharField(max_length=10, null=True, blank=True)#, choices=GENDER_CHOICES)
address = models.TextField(null=True, blank=True)
phone = models.CharField(max_length=10, null=True, blank=True)
dob = models.DateField(null=True, blank=True)
relationship_status = models.CharField(max_length=20, null=True, blank=True)#, choices=RELATIONSHIP_CHOICES)
def __unicode__(self):
return self.user.username
def user_profile_post_save_reciever(instance, created, *args, **kwargs):
if created:
u=UserProfile(user=instance)
u.save()
def post_pre_delete_reciever(sender,instance,*args,**kwargs):
content_type=ContentType.objects.get_for_model(sender)
object_id=instance.id
comments=Comment.objects.filter(content_type=content_type, object_id=object_id)
for c in comments:
for r in c.children():
r.delete()
c.delete()
path=settings.MEDIA_PATH+"\\"+str(instance.media)
try:
os.remove(path)
except:
pass
post_save.connect(user_profile_post_save_reciever, sender=User)
pre_delete.connect(post_pre_delete_reciever, sender=Post)
|
from rdflib.namespace import RDF
from source.utils import id2uri, g_add_with_valid
import json
import csv
import glob
def create_ttl(g, u, row):
"""
name: AP-1 transcription factor network
pwacc: Pathway Interaction Database:ap1_pathway
pwtype: organism_specific
category: pathway
url: http://pid.nci.nih.gov/search/pathway_landing.shtml?pathway_id=ap1_pathway&pathway_name=ap1_pathway&source=NCI-Nature curated&what=graphic&jpg=on
source: Pathway Interaction Database
srcid: 4
externalid: ap1_pathway
extid: ap1_pathway
taxid: NULL
taxname: Homo sapiens
core: 1
cids: NULL
geneids: 58|183|467|573|595|865|983|1027|1029|1278|1385|1386|1437|1499|1758|1843|1906|1958|1997|2033|2099|2113|2167|2353|2354|2355|2624|2697|2908|3091|3105|3458|3491|3558|3565|3567|3569|3576|3586|3725|3726|3727|4094|4097|4312|4318|4502|4602|4609|4773|4878|4922|5179|5328|5728|6347|6667|6934|7040|7054|7076|7157|7205|8061|9988|10018|10987|23373|149603
protacxns: O00622|O15525|O43521|O75444|P00749|P01019|P01033|P01100|P01106|P01137|P01160|P01210|P01579|P02795|P03372|P03956|P04141|P04150|P04439|P04637|P05112|P05113|P05231|P05305|P05412|P06493|P07101|P08047|P08123|P10145|P10242|P13500|P14780|P14921|P15090|P15336|P15407|P15408|P16220|P17275|P17302|P17535|P18146|P18847|P22301|P23769|P24385|P28562|P30990|P32519|P35222|P42771|P46527|P53539|P60484|P60568|P68133|Q09472|Q13316|Q13469|Q13951|Q15654|Q16665|Q5TA31|Q6UUV9|Q92905|Q99933|Q9NQB0|Q9Y222
pmids: 1527086|1719551|1749429|1827203|1827665|1945831|2110368|2111020|2111328|2115643|2138276|2467839|2497053|2498083|2504580|2513128|2516828|2825349|2974122|3103098|3130660|3135940|3136397|3142691|3142692|3143919|7623817|8058317|8289796|8397339|8754832|8837781|8875991|8994040|9111306|9349820|9510247|9511728|9878062|9889198|10080190|10359014|10790372|10942775|11756554|12121977|12853483|12881422|14510502|14523011|15308641|15489293|15601844|15699140|15828020|16007074|16518400|17146436|17689131|18247370|18535250|185
ecs: 1.14.16.2|2.3.1.-|2.3.1.48|2.3.2.27|2.7.11.22|2.7.11.23|3.1.3.16|3.1.3.48|3.1.3.67|3.4.-.-|3.4.21.73|3.4.24.35|3.4.24.7
annotation: COVID-19, COVID19, Coronavirus, Corona-virus, SARS, SARS2, SARS-CoV, SARS-CoV-2 [as per WikiPathways, DrugBank, UniProt, COVID-19 Disease Map]
"""
cid = id2uri(row["cids"], "cid")
gid = id2uri(row["geneids"], "gid")
protein = id2uri(row["protacxns"], "protein")
pmid = id2uri(row["pmids"], "pmid")
g_add_with_valid(g, cid, RDF.type, u.cid)
g_add_with_valid(g, gid, RDF.type, u.gid)
g_add_with_valid(g, gid, u.gid2pmid, pmid)
g_add_with_valid(g, protein, RDF.type, u.protein)
g_add_with_valid(g, protein, u.protein2gid, gid)
g_add_with_valid(g, pmid, RDF.type, u.pmid)
return g
|
"""
John Eslick, Carnegie Mellon University, 2013
See LICENSE.md for license and copyright details.
"""
import os
from PyQt5 import uic
mypath = os.path.dirname(__file__)
_optMessageWindowUI, _optMessageWindow = \
uic.loadUiType(os.path.join(mypath, "optMessageWindow_UI.ui"))
class optMessageWindow(_optMessageWindow, _optMessageWindowUI):
def __init__(self, parent=None):
'''
Constructor for optimization message window
'''
super(optMessageWindow, self).__init__(parent=parent)
self.setupUi(self) # Create the widgets
def closeEvent(self, e):
e.ignore()
def clearMessages(self):
self.msgTextBrowser.clear()
|
from sqlalchemy import Column, Integer, String, Text, DateTime, Float, Boolean, PickleType
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Member(Base):
__tablename__ = 'member'
id = Column(Integer, primary_key=True, nullable=False)
name = Column(String(100), nullable=False)
description = Column(Text, nullable=True)
join_date = Column(DateTime, nullable=False)
vip = Column(Boolean, nullable=False)
number = Column(Float, nullable=False)
def __repr__(self):
return '<UserModel model {}>'.format(self.id) |
#!/usr/bin/python3
# Copyright (C) 2019 Aleksa Sarai <cyphar@cyphar.com>
# Licensed under MIT.
import requests
STORE_URL = lambda key: "https://store.ncss.cloud/%s" % (key,)
def fetch(key):
resp = requests.get(STORE_URL(key))
if not resp.ok:
raise KeyError("key not present in store")
return resp.json()
def store(key, value):
resp = requests.post(STORE_URL(key), json=value)
if not resp.ok:
raise ValueError("value was rejected by store")
def delete(key):
requests.delete(STORE_URL(key))
|
print("""
Short Tutorial on Dictionaries
------------------------------
Dictionaries are a poor man's object, akin to structs in other
languages. Think of them like arrays (or lists), except instead
of each element having an index that defines its position in the
array, each element is defined and accessed by a keyword. Thus
Dictionaries are defined by specifying a set of keys and values.
Dicts are mutable and implemented as hash tables under the hood.
This means they are very fast at accessing and storing elements.
""")
#Creating dicts
print("""
Creating Dictionaries
---------------------
""")
print('Create dictionary by a = dict(one=1, two=2, three=3)')
print("Create dictionary by a = {'one': 1, 'two': 2, 'three': 3}")
print("Create dictionary by a = dict(zip(['one', 'two', 'three'], [1, 2, 3]))")
print("Create dictionary by a = dict([('two', 2), ('one', 1), ('three', 3)])")
print("Create dictionary by a = dict({'three': 3, 'one': 1, 'two': 2})")
print('Create empty dictionaries by a = {} or a = dict()')
#Accessing elements
print("""
Accessing Elements
------------------
""")
a = dict(a=1,b=2,c=3)
print('All of dict:', a)
print("Second element.\tCmd: a['b']\tReturns:", a['b'])
print("Get third element.\tCmd: a.get('c')\tReturns:", a.get('c'))
print("Check if key exists.\tCmd: 'a' in a\tReturns:", 'a' in a)
#Adding to dict
print("""
Adding to List
--------------
""")
print('Current state of list. a =', a)
a['d'] = 4
print("Add new key/elem pair.\tCmd: a['d'] = 4\tResult:", a)
#Deleting from dict
print("""
Deleting from List
------------------
""")
print('Current state of list. a =', a)
del a['d']
print("Delete 'd' element.\t\tCmd: del a['d']\tResult:", a)
print("Delete and return 'c' element.\tCmd: a.pop('c')\tReturns:", a.pop('c'))
print("Clear all elements.\t\tCmd: a.clear()\tReturns:", a.clear())
#Performing operations on a dict
print("""
Various Operations
------------------
""")
a = dict(one=1, two=2, three=3, four=4)
print('Current state of dictionary. a =', a)
print('Length of dict.\t\t\tCmd: len(a)\tReturns:', len(a))
print("Get all keys.\t\t\tCmd: a.keys()\tReturns:", a.keys())
print("Get all values.\t\t\tCmd: a.values()\tReturns:", a.values())
print("Get all key, values pairs.\tCmd: a.items()\tReturns:", a.items())
#Notes on dict properties
print("""
Notes on dict properties
------------------------
- All key names must be "hashable". Essentially any name which is valid for a
variable name is hashable.
- Values can be any python object or datatype.
""")
|
# -*- coding: utf-8 -*-
from typing import List
class Solution:
def countGoodRectangles(self, rectangles: List[List[int]]) -> int:
max_square_side_length, result = float("-inf"), 0
for length, width in rectangles:
square_side_length = min(length, width)
if square_side_length > max_square_side_length:
max_square_side_length, result = square_side_length, 1
elif square_side_length == max_square_side_length:
result += 1
return result
if __name__ == "__main__":
solution = Solution()
assert 3 == solution.countGoodRectangles([[5, 8], [3, 9], [5, 12], [16, 5]])
assert 3 == solution.countGoodRectangles([[2, 3], [3, 7], [4, 3], [3, 7]])
|
def test(n):
for i in range(1, n, 2):
if n == i*(i + 2):
return True
return False
for j in range(0, 1000):
a = (4*(j**3)) - (3*j)
if test(a) == True:
print(j)
'''
j = 0
while(1):
a = (4*(j**3)) - (3*j)
if test(a) == True:
print(j)
j = j + 1
'''
|
# # Print out list of students.
students = [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'},
{'first_name' : 'Mark', 'last_name' : 'Guillen'},
{'first_name' : 'KB', 'last_name' : 'Tonel'}
]
def classroom(dict):
for item in range(0, len(dict)):
print dict[item]["first_name"] + " " + dict[item]["last_name"]
classroom(students)
# Print out list of students and instructors, the array index, and the length of the characters.
users = {
'Students': [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'},
{'first_name' : 'Mark', 'last_name' : 'Guillen'},
{'first_name' : 'KB', 'last_name' : 'Tonel'}
],
'Instructors': [
{'first_name' : 'Michael', 'last_name' : 'Choi'},
{'first_name' : 'Martin', 'last_name' : 'Puryear'}
]
}
def classroom(dict):
for key in dict:
print key
for item in range(len(dict[key])):
char = len(dict[key][item]["first_name"]) + len(dict[key][item]["last_name"])
print "{} - ".format (item + 1) + dict[key][item]["first_name"] + " " + dict[key][item]["last_name"] + " - {}".format (char)
classroom(users) |
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from ..dtrace.apicalls import apicalls
import inspect
from sets import Set
from os import sys, path
def choose_package_class(file_type, file_name, suggestion=None):
if suggestion is not None:
name = suggestion
else:
name = _guess_package_name(file_type, file_name)
if not name:
return None
full_name = "modules.packages.%s" % name
try:
# FIXME(rodionovd):
# I couldn't figure out how to make __import__ import anything from
# the (grand)parent package, so here I just patch the PATH
sys.path.append(path.abspath(path.join(path.dirname(__file__), '..', '..')))
# Since we don't know the package class yet, we'll just import everything
# from this module and then try to figure out the required member class
module = __import__(full_name, globals(), locals(), ['*'])
except ImportError:
raise Exception("Unable to import package \"{0}\": it does not "
"exist.".format(name))
try:
pkg_class = _found_target_class(module, name)
except IndexError as err:
raise Exception("Unable to select package class (package={0}): "
"{1}".format(full_name, err))
return pkg_class
def _found_target_class(module, name):
""" Searches for a class with the specific name: it should be
equal to capitalized $name.
"""
members = inspect.getmembers(module, inspect.isclass)
return [x[1] for x in members if x[0] == name.capitalize()][0]
def _guess_package_name(file_type, file_name):
if "Bourne-Again" in file_type or "bash" in file_type:
return "bash"
elif "Mach-O" in file_type and "executable" in file_type:
return "macho"
elif "directory" in file_type and (file_name.endswith(".app") or file_name.endswith(".app/")):
return "app"
elif "Zip archive" in file_type and file_name.endswith(".zip"):
return "zip"
else:
return None
class Package(object):
""" Base analysis package """
# Our target may touch some files; keep an eye on them
touched_files = Set()
def __init__(self, target, host, **kwargs):
if not target or not host:
raise Exception("Package(): `target` and `host` arguments are required")
self.host = host
self.target = target
# Any analysis options?
self.options = kwargs.get("options", {})
# A timeout for analysis
self.timeout = kwargs.get("timeout", None)
# Command-line arguments for the target.
self.args = self.options.get("args", [])
# Choose an analysis method (or fallback to apicalls)
self.method = self.options.get("method", "apicalls")
# Should our target be launched as root or not
self.run_as_root = _string_to_bool(self.options.get("run_as_root", "False"))
def prepare(self):
""" Preparation routine. Do anything you want here. """
pass
def start(self):
""" Runs an analysis process.
This function is a generator.
"""
self.prepare()
if self.method == "apicalls":
self.apicalls_analysis()
else:
raise Exception("Unsupported analysis method. Try `apicalls`.")
def apicalls_analysis(self):
kwargs = {
'args': self.args,
'timeout': self.timeout,
'run_as_root': self.run_as_root
}
for call in apicalls(self.target, **kwargs):
# Send this API to Cuckoo host
self.host.send_api(call)
# Handle file IO APIs
self.handle_files(call)
def handle_files(self, call):
""" Remember what files our target has been working with during the analysis"""
def makeabs(filepath):
# Is it a relative path? Suppose it's relative to our dtrace working directory
if not path.isfile(filepath):
filepath = path.join(path.dirname(__file__), "..", "dtrace", filepath)
return filepath
if call.api in ["fopen", "freopen", "open"]:
self.open_file(makeabs(call.args[0]))
if call.api in ["rename"]:
self.move_file(makeabs(call.args[0]), makeabs(call.args[1]))
if call.api in ["copyfile"]:
self.copy_file(makeabs(call.args[0]), makeabs(call.args[1]))
if call.api in ["remove", "unlink"]:
self.remove_file(makeabs(call.args[0]))
def open_file(self, filepath):
self.touched_files.add(filepath)
def move_file(self, frompath, topath):
# Remove old reference if needed
if frompath in self.touched_files:
self.touched_files.remove(frompath)
self.touched_files.add(topath)
def copy_file(self, frompath, topath):
# Add both files to the watch list
self.touched_files.update([frompath, topath])
def remove_file(self, filepath):
# TODO(rodionovd): we're actually unable to dump this file
# because well, it was removed
self.touched_files.add(filepath)
def _string_to_bool(raw):
if not isinstance(raw, basestring):
raise Exception("Unexpected input: not a string :/")
return raw.lower() in ("yes", "true", "t", "1")
|
from testutil import *
import numpy as np
import smat # want module name too
from smat import *
import timeit
import os,os.path
import matplotlib
matplotlib.use('Agg') # off-screen rendering
import matplotlib.pyplot as plt
#######################################################################
def _apply_unary(b,func,repeats,A,*args,**kwargs):
for i in range(repeats):
func(A,*args,**kwargs)
b.sync()
def apply_unary(dt,b,n,m,repeats,func,*args,**kwargs): # dtype to test, backend module to test
A = b.rand(n,m,dt)
_apply = lambda: _apply_unary(b,func,repeats,A,*args,**kwargs)
_apply() # run once to stabilize running time
b.sync()
trials = [timeit.timeit(_apply,number=1)/repeats for i in range(5)]
trials.sort()
return trials[0] # return best time
#######################################################################
def perftest_logistic(dt,b): return apply_unary(dt,b,128,1000,20,b.logistic),None
def perftest_exp(dt,b): return apply_unary(dt,b,128,1000,20,b.exp),None
def perftest_tanh(dt,b): return apply_unary(dt,b,128,1000,20,b.tanh),None
def perftest_softmax(dt,b): return apply_unary(dt,b,1000,10,20,b.softmax),None
def perftest_repeat_x(dt,b): return apply_unary(dt,b,512,256,20,b.repeat,16,axis=1),None
def perftest_tile_x(dt,b): return apply_unary(dt,b,512,256,20,b.tile,(1,16)),None
#######################################################################
def perftest_reduce_5Kx1(dt,b): return apply_unary(dt,b,5000,1,100,b.sum,axis=None),None
def perftest_reducex_5Kx10(dt,b): return apply_unary(dt,b,5000,10,100,b.sum,axis=1),None
def perftest_reducey_5Kx10(dt,b): return apply_unary(dt,b,5000,10,100,b.sum,axis=0),None
def perftest_reducex_10x5K(dt,b): return apply_unary(dt,b,10,5000,100,b.sum,axis=1),None
def perftest_reducey_10x5K(dt,b): return apply_unary(dt,b,10,5000,100,b.sum,axis=0),None
def perftest_reduce_1Mx1(dt,b): return apply_unary(dt,b,1000000,1,5,b.sum,axis=None),None
def perftest_reducex_1Mx10(dt,b): return apply_unary(dt,b,1000000,10,5,b.sum,axis=1),None
def perftest_reducey_1Mx10(dt,b): return apply_unary(dt,b,1000000,10,5,b.sum,axis=0),None
def perftest_reducex_10x1M(dt,b): return apply_unary(dt,b,10,1000000,5,b.sum,axis=1),None
def perftest_reducey_10x1M(dt,b): return apply_unary(dt,b,10,1000000,5,b.sum,axis=0),None
#######################################################################
def _apply_binary(b,func,repeats,A,B,*args,**kwargs):
for i in range(repeats):
func(A,B,*args,**kwargs)
b.sync()
def apply_binary(dt,b,n,m,p,q,repeats,func,*args,**kwargs): # dtype to test, backend module to test
A = b.rand(n,m,dt)
B = b.rand(p,q,dt)
_apply = lambda: _apply_binary(b,func,repeats,A,B,*args,**kwargs)
_apply() # run once to stabilize running time
b.sync()
trials = []
for i in range(5):
# push everything out of the cache, if any
#X = b.ones((1024*1024,1))
#X = None
# do the performance test
trials.append(timeit.timeit(_apply,number=1)/repeats)
b.sync()
trials.sort()
return trials[0] # return best time
#######################################################################
def mulsum(b,A,B):
#return
b.sum(A*B)
def perftest_mul(dt,b,N): return apply_binary(dt,b,1,2**N,1,2**N,10,b.multiply),2**N
def perftest_dot(dt,b): return apply_binary(dt,b,128,784,784,500,10,b.dot),128*784*500
def perftest_dot_nt(dt,b): return apply_binary(dt,b,128,784,500,784,10,b.dot_nt),128*784*500
def perftest_dot_tn(dt,b): return apply_binary(dt,b,784,128,784,500,10,b.dot_tn),128*784*500
def perftest_dot_tt(dt,b): return apply_binary(dt,b,784,128,500,784,10,b.dot_tt),128*784*500
def perftest_dot_nt_vec(dt,b): return apply_binary(dt,b,1,1024,1,1024,20,b.dot_nt),None
def perftest_mulsum_vec(dt,b): return apply_binary(dt,b,1,1024*1024,1,1024*1024,20,lambda A,B: mulsum(b,A,B)),None
#######################################################################
def perftest_bprop(dt,b):
# Simulate training a 784-800-800-10 network on subset of MNIST
trainsize = 2000
batchsize = 200
insize = 28*28
hiddensize = 800
outsize = 10
dt_X = uint8 if uint8 in get_supported_dtypes() else float32
times = {}
X = b.rand(trainsize,insize,dtype=dt_X)
Y = b.rand(trainsize,outsize,dt)
W1 = b.rand(insize,hiddensize,dt)
b1 = b.rand(1,hiddensize,dt)
W2 = b.rand(hiddensize,hiddensize,dt)
b2 = b.rand(1,hiddensize,dt)
W3 = b.rand(hiddensize,outsize,dt)
b3 = b.rand(1,outsize,dt)
eta = 0.001
num_epoch = 2
b.sync()
tic()
for epoch in range(num_epoch):
for i in range(trainsize/batchsize):
Z0 = X[i*batchsize:i*batchsize+batchsize].astype(dt)
Y0 = Y[i*batchsize:i*batchsize+batchsize]
# forward pass
A1 = b.dot(Z0,W1) + b1
Z1 = b.logistic(A1)
A2 = b.dot(Z1,W2) + b2
Z2 = b.logistic(A2)
A3 = b.dot(Z2,W3) + b3
A3 -= b.max(A3,axis=1).reshape((batchsize,1)) # for softmax stability
Z3 = b.exp(A3)/b.sum(exp(A3),axis=1).reshape((batchsize,1)) # calculate softmax
# backward pass
D3 = (Z3-Y0)/trainsize
dW3 = b.dot_tn(Z2,D3)
db3 = sum(D3,axis=0)
D2 = (Z2-Z2**2) * b.dot_nt(D3,W3)
dW2 = b.dot_tn(Z1,D2)
db2 = sum(D2,axis=0)
D1 = (Z1-Z1**2) * b.dot_nt(D2,W2)
dW1 = b.dot_tn(Z0,D1)
db1 = sum(D1,axis=0)
# Take gradient step
W3 -= eta*dW3
b3 -= eta*db3
W2 -= eta*dW2
b2 -= eta*db2
W1 -= eta*dW1
b1 -= eta*db1
b.sync()
return toc() / num_epoch, None
#######################################################################
class gridtest_reduce(object):
def __init__(self,name,reduce,axis):
self.name = name
self.reduce = reduce
self.A = None
self.b = None
self.axis = axis
self.nrepeat = 1
def configure(self,b,dt,n,m,nrepeat):
self.A = b.rand(n,m,dt)
self.b = b
self.nrepeat = nrepeat
def __call__(self):
#print self.A.shape
for i in range(self.nrepeat):
x = self.reduce(self.A,axis=self.axis)
'''
y = np.sum(as_numpy(self.A),axis=self.axis)
try:
assert_close(x,y)
except:
print x.ravel()
print y
quit()
'''
self.b.sync()
def nflop(self):
n,m = self.A.shape
if self.axis == 1:
return (m-1)*n
else:
return (n-1)*m
#######################################################################
def run_perftest(log,dt,test,dtypes,argsets=None):
testname = test.__name__.partition("_")[2]
if dt not in dtypes:
log.write(testname+"\n")
return
if argsets is None:
argsets = [()]
for args in argsets:
print rpad("%s%s:%s..." % (testname,str(args),dtype_short_name[dt]),24),
backends = [smat,np]
best = { backend : np.inf for backend in backends }
for backend in backends:
flop = None
for trial in range(3):
runtime,flop = test(dt,backend,*args)
best[backend] = min(best[backend],runtime) # Take the best of three runs
if flop is None:
print(rpad("%s=%.4fms," % (backend.__package__,best[backend]*1000),17)), # print out the best milliseconds
else:
print(rpad("%s=%.3f GFLOPS," % (backend.__package__,flop/best[backend]/1e9),17)), # print out the best GFLOPS
if best[np] > best[smat]:
print("(%.1fx faster)" % (best[np]/best[smat]))
else:
print("(%.1fx SLOWER)" % (best[smat]/best[np]))
log.write( rpad(testname,16)
+rpad("%.6f" % best[smat],10)
+rpad("%.6f" % best[np],10)
+"\n")
def run_gridtest(log,dt,gridtest,dtypes):
if dt not in dtypes:
log.write(gridtest.name+"\n")
return
#backends = [(smat,"smat"),(np,"numpy")]
backends = [(smat,"smat")]
base = 5L
nsteps = 8
nrepeat = 3
max_size = 128*1024*1024
for b,bname in backends:
testname = "%s_%s_%s" % (bname,gridtest.name,dtype_short_name[dt])
print rpad("%s..." % testname,24),
gflops = np.zeros((nsteps,nsteps))
#flops[:] = np.nan
for mexp in range(nsteps):
for nexp in range(nsteps):
n,m = base**(nexp+1),base**(mexp+1)
if n*m > max_size:
continue
gridtest.configure(b,dt,n,m,nrepeat)
b.sync()
seconds = timeit.timeit(gridtest,number=1)/nrepeat
gflops[nexp,mexp] = gridtest.nflop()/seconds/1000/1000/1000
print
msg = ""
for row in gflops:
for val in row:
if not np.isnan(val):
msg += str(val)
msg += "\t"
msg.strip('\t')
msg += "\n"
log.write( rpad(testname,16) + "\n")
log.write(msg)
plt.figure(dpi=60)
plt.title(testname + " performance (GFLOPS)")
plt.xlabel('shape.x')
plt.ylabel('shape.y')
img = plt.imshow(gflops.squeeze(),origin='lower') #Needs to be in row,col order
img.set_interpolation('nearest')
plt.xticks(np.arange(nsteps),[base**(i+1) for i in range(nsteps)])
plt.yticks(np.arange(nsteps),[base**(i+1) for i in range(nsteps)])
plt.colorbar()
#plt.show()
plt.savefig(os.path.join("log",testname+".png"))
#######################################################################
def perftest():
print '\n------------------- PERFORMANCE TESTS ----------------------\n'
np.random.seed(42)
set_backend_options(randseed=42,verbose=0,sanitycheck=False)
if not os.path.exists("log"):
os.makedirs("log")
for dt in [float32,float64,int32,bool]:
if dt not in get_supported_dtypes():
continue
# Record the performance results in a text file that can be
# imported into a spreadsheet if so desired.
perflog = os.path.join("log","smatperf-%s.txt" % dt.__name__)
print "----- Generating %s ------" % perflog
with open(perflog,"w") as log:
log.write( rpad("test",16)
+rpad("smat",10)
+rpad("numpy",10)
+"\n")
# Performance tests with dead code elimination disabled
reset_backend(sanitycheck=False,elimdeadcode=False) # ,verbose=1,log=["exec"]
#run_perftest(log,dt,perftest_mul,dtypes_float,((i,) for i in range(4,25)))
run_perftest(log,dt,perftest_mul,dtypes_float,((i,) for i in [5,10,20,26]))
'''
run_perftest(log,dt,perftest_logistic ,dtypes_float)
run_perftest(log,dt,perftest_exp ,dtypes_float)
run_perftest(log,dt,perftest_tanh ,dtypes_float)
run_perftest(log,dt,perftest_softmax ,dtypes_float)
run_perftest(log,dt,perftest_dot ,dtypes_float)
run_perftest(log,dt,perftest_dot_nt ,dtypes_float)
run_perftest(log,dt,perftest_dot_tn ,dtypes_float)
run_perftest(log,dt,perftest_dot_tt ,dtypes_float)
run_perftest(log,dt,perftest_dot_nt_vec ,dtypes_float)
'''
#run_perftest(log,dt,perftest_mulsum_vec ,dtypes_float)
'''
run_perftest(log,dt,perftest_repeat_x ,dtypes_generic)
run_perftest(log,dt,perftest_tile_x ,dtypes_generic)
run_perftest(log,dt,perftest_reduce_5Kx1 ,dtypes_generic)
run_perftest(log,dt,perftest_reducex_5Kx10,dtypes_generic)
run_perftest(log,dt,perftest_reducey_5Kx10,dtypes_generic)
run_perftest(log,dt,perftest_reducex_10x5K,dtypes_generic)
run_perftest(log,dt,perftest_reducey_10x5K,dtypes_generic)
run_perftest(log,dt,perftest_reduce_1Mx1 ,dtypes_generic)
run_perftest(log,dt,perftest_reducex_1Mx10,dtypes_generic)
run_perftest(log,dt,perftest_reducey_1Mx10,dtypes_generic)
run_perftest(log,dt,perftest_reducex_10x1M,dtypes_generic)
run_perftest(log,dt,perftest_reducey_10x1M,dtypes_generic)
# More performance tests, where dead code elimination is now allowed (the default)
reset_backend(elimdeadcode=True)
run_perftest(log,dt,perftest_bprop,dtypes_float)
reset_backend(elimdeadcode=True)
run_gridtest(log,dt,gridtest_reduce("sum",sum,None),dtypes_float)
run_gridtest(log,dt,gridtest_reduce("sum_y",sum,0),dtypes_float)
run_gridtest(log,dt,gridtest_reduce("sum_x",sum,1),dtypes_float)
'''
|
import math
import matplotlib.pyplot as plt
import numpy as np
from scipy import integrate
def EulerIntegrator(h, y0, f):
"""
Делает один шаг методом Эйлера.
y0 - начальное значение решения в момент времени t=0,
h - шаг по времения,
f(y) - правая часть дифференциального уравнения.
Возвращает приближенное значение y(h).
"""
return y0 + h * f(y0)
def oneStepErrorPlot(f, y, integrator):
"""Рисует график зависимости погрешности одного шага
интегрирования от длины шага.
f(y) - правая часть дифференциального уравнения,
y(t) - точное решение,
integrator(h,y0,f) - аргументы аналогичны EulerIntegrator.
"""
eps = np.finfo(float).eps
steps = np.logspace(-10, 0, 50) # шаги интегрирования
y0 = y(0) # начальное значение
yPrecise = [y(t) for t in steps] # точные значения решения
yApproximate = [integrator(t, y0, f) for t in steps] # приближенные решения
h = [np.maximum(np.max(np.abs(yp - ya)), eps) for yp, ya in zip(yPrecise, yApproximate)]
plt.loglog(steps, h, '-')
plt.xlabel(u"Шаг интегрирования")
plt.ylabel(u"Погрешность одного шага")
def firstOrderPlot():
"""Рисует на текущем графике прямую y=x."""
ax = plt.gca()
steps = np.asarray(ax.get_xlim())
plt.loglog(steps, steps, '--r')
# Правая часть уравнения y'=f(y).
'''f = lambda y: np.cos(y)
# Аналитическое решение
c = 2*np.arctanh(np.tan(1/2))
yExact = lambda t: 2 * np.arctan(np.tanh((t + c) / 2))
# Строим график ошибок
oneStepErrorPlot(f, yExact, EulerIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера", u"первый порядок"], loc=2)
plt.show()'''
'''f = lambda y: y
# Аналитическое решение
yExact = lambda t: np.exp(t)
# Строим график ошибок
oneStepErrorPlot(f, yExact, EulerIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера", u"первый порядок"], loc=2)
plt.show()'''
def integrate(N, delta, f, y0, integrator):
"""
Делает N шагов длины delta метода integrator для уравнения y'=f(y) с начальными условиями y0.
Возвращает значение решения в конце интервала.
"""
for n in range(N):
y0 = integrator(delta, y0, f)
return y0
def intervalErrorPlot(f, y, integrator, T=1, maxNumberOfSteps=1000, numberOfPointsOnPlot=16):
"""
Рисует график зависимости погрешности интегрирования на интервале
от длины шага интегрирвания.
Аргументы повторяют аргументы oneStepErrorPlot.
"""
eps = np.finfo(float).eps
numberOfSteps = np.logspace(0, np.log10(maxNumberOfSteps), numberOfPointsOnPlot).astype(np.int)
steps = T / numberOfSteps # шаги интегрирования
y0 = y(0) # начальное значение
yPrecise = y(T) # точнре значения решения на правом конце
yApproximate = [integrate(N, T / N, f, y0, integrator) for N in numberOfSteps] # приближенные решения
# print('precise:', yPrecise)
# print('appr:', yApproximate)
# print(steps)
# plt.plot(steps, yApproximate, '-g')
# c = 2 * np.arctanh(np.tan(1 / 2))
# yExact = lambda t: 2 * np.arctan(np.tanh((t + c) / 2))
# plt.plot(steps, yExact(steps), '-r')
h = [np.maximum(np.max(np.abs(yPrecise - ya)), eps) for ya in yApproximate]
plt.loglog(steps, h, '.-')
plt.xlabel("Шаг интегрирования")
plt.ylabel("Погрешность интегрования на интервале")
# Правая часть уравнения y'=f(y).
'''f = lambda y: np.cos(y)
# Аналитическое решение
c = 2*np.arctanh(np.tan(1/2))
yExact = lambda t: 2 * np.arctan(np.tanh((t + c) / 2))
# Строим график ошибок
intervalErrorPlot(f, yExact, EulerIntegrator, maxNumberOfSteps=1000)
firstOrderPlot()
plt.legend(["интегратор","первый порядок"],loc=2)
plt.show()'''
'''f=lambda y: 1
yExact=lambda t: t
# Строим график ошибок
oneStepErrorPlot(f, yExact, EulerIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера",u"первый порядок"],loc=2)
plt.show()'''
def NewtonIntegrator(h, y0, f):
"""
Делает один шаг методом Эйлера.
y0 - начальное значение решения в момент времени t=0,
h - шаг по времения,
f(y) - правая часть дифференциального уравнения и его производная.
Возвращает приближенное значение y(h).
"""
return y0 + h * f[0](y0) + f[0](y0) * f[1](y0) * h * h / 2
# Правая часть уравнения y'=f(y).
'''f = (lambda y: np.cos(y), lambda y: 2 * np.arctan(np.tanh((0 + c) / 2)))
# Аналитическое решение
c = 2*np.arctanh(np.tan(1/2))
yExact = lambda t: 2 * np.arctan(np.tanh((t + c) / 2))
oneStepErrorPlot(f[0], yExact, EulerIntegrator)
oneStepErrorPlot(f, yExact, NewtonIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера",u"метод Ньютона",u"первый порядок"],loc=2)
plt.show()'''
'''f=(lambda y: y, lambda y: 1)
# Аналитическое решение
yExact=lambda t: np.exp(t)
# Строим график ошибок
oneStepErrorPlot(f[0], yExact, EulerIntegrator)
oneStepErrorPlot(f, yExact, NewtonIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера",u"метод Ньютона",u"первый порядок"],loc=2)
plt.show()'''
def ModifiedEulerIntegrator(h, y0, f):
"""
Модифицированный метод Эйлера.
Аргументы аналогичны EulerIntegrator.
"""
yIntermediate = y0 + f(y0) * h / 2
return y0 + h * f(yIntermediate)
'''f=lambda y: y
yExact=lambda t: np.exp(t)
# Строим график ошибок
oneStepErrorPlot(f, yExact, EulerIntegrator)
oneStepErrorPlot(f, yExact, ModifiedEulerIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера",u"мод. Эйлер",u"первый порядок"],loc=2)
plt.show()'''
def RungeKuttaIntegrator(h, y0, f):
"""
Классический метод Рунге-Кутты четвертого порядка.
Аргументы аналогичны EulerIntegrator.
"""
k1 = f(y0)
k2 = f(y0 + k1 * h / 2)
k3 = f(y0 + k2 * h / 2)
k4 = f(y0 + k3 * h)
return y0 + (k1 + 2 * k2 + 2 * k3 + k4) * h / 6
'''f=lambda y: y
yExact=lambda t: np.exp(t)
# Строим график ошибок
oneStepErrorPlot(f, yExact, EulerIntegrator)
oneStepErrorPlot(f, yExact, ModifiedEulerIntegrator)
oneStepErrorPlot(f, yExact, RungeKuttaIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера",u"мод. Эйлер",u"метод Рунге-Кутты",u"первый порядок"],loc=2)
plt.show()'''
def NewtonMethod(F, x0):
"""
Находит решение уравнения F(x)=0 методом Ньютона.
x0 - начальное приближение.
F=(F(x),dF(x)) - функция и ее производная.
Возвращает решение уравнения.
"""
for i in range(100): # ограничиваем максимальное число итераций
x = x0 - F[0](x0) / F[1](x0)
if x == x0: break # достигнута максимальная точность
x0 = x
return x0
def BackwardEulerIntegrator(h, y0, f):
"""
Неявный метод Эйлера.
Аргументы аналогичны NewtonIntegrator.
"""
F = (lambda y: y0 + h * f[0](y) - y, lambda y: h * f[1](y) - 1)
return NewtonMethod(F, y0)
alpha = -10
f = (lambda y: alpha * y, lambda y: alpha)
yExact = lambda t: np.exp(alpha * t)
# Строим график ошибок
'''oneStepErrorPlot(f[0], yExact, EulerIntegrator)
oneStepErrorPlot(f, yExact, BackwardEulerIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера",u"неявный Эйлер",u"первый порядок"],loc=2)
plt.show()'''
'''intervalErrorPlot(f[0], yExact, EulerIntegrator, numberOfPointsOnPlot=32)
intervalErrorPlot(f, yExact, BackwardEulerIntegrator, numberOfPointsOnPlot=16)
firstOrderPlot()
plt.legend([u"метод Эйлера", u"неявный Эйлер", u"первый порядок"], loc=2)
plt.show()'''
# Решение методом Эйлера.
f = (lambda y: np.cos(y), lambda y: 2 * np.arctan(np.tanh((0 + c) / 2)))
# Аналитическое решение
c = 2 * np.arctanh(np.tan(1 / 2))
yExact = lambda t: 2 * np.arctan(np.tanh((t + c) / 2))
intervalErrorPlot(f[0], yExact, EulerIntegrator)
intervalErrorPlot(f, yExact, NewtonIntegrator)
intervalErrorPlot(f[0], yExact, ModifiedEulerIntegrator)
intervalErrorPlot(f[0], yExact, RungeKuttaIntegrator)
intervalErrorPlot(f[0], yExact, RungeKuttaIntegrator)
firstOrderPlot()
plt.legend([u"Eulerметод Эйлера", u"NewtonEметод Ньютона", u"NewEulerпервый порядок", u"Rungeпервый порядок",
u"первый порядок"], loc=2)
plt.show()
# Строим график ошибок
oneStepErrorPlot(f[0], yExact, EulerIntegrator)
oneStepErrorPlot(f[0], yExact, ModifiedEulerIntegrator)
oneStepErrorPlot(f[0], yExact, RungeKuttaIntegrator)
firstOrderPlot()
plt.legend([u"метод Эйлера", u"мод. Эйлер", u"метод Рунге-Кутты", u"первый порядок"], loc=2)
plt.show()
# начальные условия
def f(t, u):
return -u
def exact(u0, du0, t):
# analytical solution
return u0 * math.cos(t) + du0 * math.sin(t)
def iterate(func, u, v, tmax, n):
dt = tmax / (n - 1)
t = 0.0
for i in range(n):
u, v = func(u, v, t, dt)
t += dt
return u
def euler_iter(u, v, t, dt):
v_new = v + dt * f(t, u)
u_new = u + dt * v
return u_new, v_new
def rk_iter(u, v, t, dt):
k1 = f(t, u)
k2 = f(t + dt * 0.5, u + k1 * 0.5 * dt)
k3 = f(t + dt * 0.5, u + k2 * 0.5 * dt)
k4 = f(t + dt, u + k3 * dt)
v += dt * (k1 + 2 * k2 + 2 * k3 + k4) / 6
# v doesn't explicitly depend on other variables
k1 = k2 = k3 = k4 = v
u += dt * (k1 + 2 * k2 + 2 * k3 + k4) / 6
return u, v
euler = lambda u, v, tmax, n: iterate(euler_iter, u, v, tmax, n)
runge_kutta = lambda u, v, tmax, n: iterate(rk_iter, u, v, tmax, n)
def plot_result(u, v, tmax, n):
dt = tmax / (n - 1)
t = 0.0
allt = []
error_euler = []
error_rk = []
r_exact = []
r_euler = []
r_rk = []
u0 = u_euler = u_rk = u
v0 = v_euler = v_rk = v
for i in range(n):
u = exact(u0, v0, t)
u_euler, v_euler = euler_iter(u_euler, v_euler, t, dt)
u_rk, v_rk = rk_iter(u_rk, v_rk, t, dt)
allt.append(t)
error_euler.append(abs(u_euler - u))
error_rk.append(abs(u_rk - u))
r_exact.append(u)
r_euler.append(u_euler)
r_rk.append(u_rk)
t += dt
_plot("error.png", "Error", "time t", "error e", allt, error_euler, error_rk, u_euler)
# _plot("result.png", "Result", "time t", "u(t)", allt, r_euler, r_rk, r_exact)
def _plot(out, title, xlabel, ylabel, allt, euler, rk, r, exact=None):
plt.title(title)
plt.ylabel(ylabel)
plt.xlabel(xlabel)
plt.plot(allt, euler, 'b-', label="Euler")
plt.plot(allt, rk, 'r--', label="Runge-Kutta")
#func = lambda t: 10 * math.cos(t) - 5 * math.sin(t)
#ylist = [func(x) for x in allt]
#plt.plot(allt, ylist, 'g--')
if exact:
plt.plot(allt, exact, 'g.', label='Exact')
plt.legend(loc=4)
plt.grid(True)
plt.savefig(out, dpi=None, facecolor='w', edgecolor='w',
orientation='portrait', papertype=None, format=None,
transparent=False)
plt.show()
u0 = 1
du0 = v0 = 0
tmax = 10.0
n = 2000
print("t=", tmax)
print("euler =", euler(u0, v0, tmax, n))
print("runge_kutta=", runge_kutta(u0, v0, tmax, n))
print("exact=", exact(u0, v0, tmax))
plot_result(u0, v0, tmax * 2, n * 2)
|
from flask import flask
app= Flask()
@app.route('/')
def index():
return "Home Page"
@app.route('/page2')
def hello():
return "Welcome to page 2"
@app.route('/user/<username>')
def show_user_profile(username):
return 'Hey there %s' % username
@app.route('/post/<int:post_id>')
def show_post(post_id):
return 'Post id: %d' % post_id
if __name__ =="__main__":
app.run(debug =True) |
class ConfigParser():
content = ''
config_dict = {}
def _parse(self):
self.content = self.content.replace('\r', '')
content = self.content
lines = content.split('\n')
for line in lines:
if line.startswith('#'):
continue
if line.startswith(';'):
continue
if line.startswith('['):
section = line.replace('[', '')
section = section.replace(']', '')
self.config_dict[section] = {}
continue
if line.strip() == '':
continue
stmt = line.split('=')
key = stmt[0].strip()
value = _getvalue(stmt).strip()
section_dict = self.config_dict[section]
section_dict[key] = value
def sections(self):
section_keys = self.config_dict.keys()
sections = []
for section_item in section_keys:
sections.append(section_item)
return sections
def options(self, section):
section_dict = self.config_dict[section]
option_keys = section_dict.keys()
options = []
for option_item in option_keys:
options.append(option_item)
return options
def get(self, section, option):
section_dict = self.config_dict[section]
return section_dict[option]
def set(self, section, option, value):
section_dict = self.config_dict[section]
section_dict[option] = value
# support config[key] = val
def __setitem__(self, __key, __val):
self.config_dict[__key] = __val
# support val = config[key]
def __getitem__(self, __key):
return self.config_dict[__key]
def items(self, section):
section_dict = self.config_dict[section]
section_keys = section_dict.keys()
items = []
for key in section_keys:
val = section_dict[key]
items.append([key, val])
return items
def __str__(self):
content = ''
section_keys = self.config_dict.keys()
for section_item in section_keys:
content += '[' + section_item + ']\n'
section_dict = self.config_dict[section_item]
section_keys = section_dict.keys()
for key in section_keys:
val = section_dict[key]
content += key + ' = ' + val + '\n'
content += '\n'
return content
def write(self, file_name):
print('Error: write() method not implemented')
raise
self.content = self.__str__(self)
print(self.content)
def read_string(self, content):
self.content = content
self._parse()
def read(self, file_name):
print('Error: read() method not implemented')
raise
content = ''
self.content = content
self._parse()
def _getvalue(stmt):
index = 0
val = ''
for item in stmt:
if index > 0:
if val != '':
val += ('=' + item)
else:
val += item
index = index + 1
return val
|
import os
print(__file__)
abspath=os.path.abspath(__file__)
print(abspath)
dir_path=os.path.dirname(abspath)
print(dir_path)
file_path=dir_path+"\sample.html"
print(file_path)
print(os.path.join(dir_path,"sample.html"))
|
import sqlite3
conn = sqlite3.connect('RRTS_DB.db')
class __ResidentsSchema:
def __init__(self):
self.curs = conn.cursor()
conn.execute('''
CREATE TABLE IF NOT EXISTS "Complaints" (
"complaintId" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"roadLocation" TEXT NOT NULL,
"startLocation" TEXT NOT NULL,
"endLocation" TEXT NOT NULL,
"residentID" INTEGER NOT NULL,
FOREIGN KEY("residentID") REFERENCES "Residents"("ResidentsID")
)
''')
conn.commit()
def insertNewResident(self, name, idCardNo, Address, PhoneNo):
curs = self.curs.execute('''INSERT INTO Residents(Name,IdCard,Address,PhoneNo)
VALUES (? ,? ,? ,?);''', (name, idCardNo, Address, PhoneNo))
val = curs.lastrowid
conn.commit()
return val
def getAllResident(self):
curs = self.curs.execute('SELECT * FROM Residents;')
return curs.fetchall()
def getResidentById(self,id):
curs = self.curs.execute('SELECT * FROM Residents WHERE ResidentsID = ?',(id,))
return curs.fetchone()
class __ComplainSchema:
def __init__(self):
self.curs = conn.cursor()
conn.execute('''
CREATE TABLE IF NOT EXISTS "Residents" (
"ResidentsID" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"Name" TEXT NOT NULL,
"IdCard" TEXT NOT NULL UNIQUE,
"Address" TEXT DEFAULT 'Home',
"PhoneNo" INTEGER NOT NULL UNIQUE
)
''')
conn.commit()
def makeComplaint(self, resId, roadLoc, startingPoint, endingPoint):
curs = self.curs.execute('''
INSERT INTO Complaints
("roadLocation", "startLocation", "endLocation", "residentID")
VALUES ( ?, ?, ?, ?);
''', (roadLoc, startingPoint, endingPoint, resId))
conn.commit()
return curs.lastrowid
class __ComplaintInfoSchema:
def __init__(self):
conn.execute('''
CREATE TABLE IF NOT EXISTS "ComplaintInfo" (
"complaintId" INTEGER,
"priority" INTEGER,
"rawMaterial" TEXT,
"machines" TEXT,
"statistics" TEXT,
PRIMARY KEY("complaintId"),
FOREIGN KEY("complaintId") REFERENCES "Complaints"("complaintId")
)
''')
conn.commit()
def makeComplaintInfo(self,mapping):
conn.execute('''
INSERT INTO ComplaintInfo
("complaintId", "priority", "rawMaterial", "machines", "statistics")
VALUES (:complainId, :priority, :rawMaterial, :machines, :statistics);
''',mapping)
conn.commit()
def updateComplaintInfo(self,mapping):
conn.execute('''
UPDATE ComplaintInfo
SET priority = :priority,
rawMaterial = :rawMaterial,
machines = :machines,
statistics = :statistics
WHERE complaintId = :complainId;
''',mapping)
conn.commit()
def getComplaintInfo(self,id):
curs = conn.execute('''SELECT * FROM ComplaintInfo WHERE complaintId = ? ;''',(id,))
curs.fetchone()
def makeMaterialInfo(self,mapping):
conn.execute('''
INSERT INTO ComplaintInfo
("complaintId", "rawMaterial", "machines")
VALUES (:complainId, :rawMaterial, :machines);
''',mapping)
conn.commit()
def updateMaterialInfo(self,mapping):
conn.execute('''
UPDATE ComplaintInfo
SET rawMaterial = :rawMaterial,
machines = :machines
WHERE complaintId = :complainId;
''',mapping)
conn.commit()
class __ScheduleSchema:
def getSchedule(self):
curs = conn.execute('''
SELECT * FROM Complaints as c
LEFT JOIN ComplaintInfo as ci ON c.complaintId = ci.complaintId
ORDER BY rawMaterial IS NOT NULL , machines IS NOT NULL, priority;
''')
return curs.fetchall()
residentTable = __ResidentsSchema()
complainTable = __ComplainSchema()
infoTable = __ComplaintInfoSchema()
scheduleTable = __ScheduleSchema() |
# -*- coding: utf-8 -*-
# filename: Console.py
import profile
import sys
def run(coroutine):
try:
coroutine.send(None)
except StopIteration as e:
return e.value
while True:
print ('input :')
value = sys.stdin.readline()
print(value) |
from Tkinter import *
import time
#import dbi, odbc
from socket import *
bits = 0 # 0 = 8 bits, 1 = 12 bits
# xxx [] = [ {8bits}, {12 bits}]
one_g = [40,624]
num_loop = 5000
max_value = one_g[bits]*2
min_value = one_g[bits]*2*(-1)
shift = [170,2800]
def show2(self):
print "2"
def TestConn(self):
#import msvcrt # (Windows/DOS only). This module gives you access to a number of functions in the Microsoft Visual C/C++ Runtime Library (MSVCRT).
import struct, string
running = 5
connected = 0
self.socket.settimeout(5)
#self.socket.
r = struct.pack('h', 100)
#addr = ('192.168.0.3', 2001)
addr = (self.IP_Basestation_1 , 2001)
cmd='ft'
r = struct.pack('h', 100)
#print r+':'+cmd
buf = self.socket.sendto(r+':'+cmd, addr)
try:
buf, addr = self.socket.recvfrom(1024)
print 'Connection between host and base station is ok. Testing connection throughput'
connected = 1
except:
pass
id = 0
self.file_loaded = 0
self.x_curve[id] = []
self.y_curve[id] = []
self.z_curve[id] = []
self.start[id] = 0
self.maxx[id] = 0
self.maxy[id] = 0
self.maxz[id] = 0
begin_time = float(time.clock())
for i in range(1,num_loop,1):
#self.socket.sendto("h", addr)
buf = self.socket.sendto(r+':'+cmd, addr)
buf, addr = self.socket.recvfrom(1024)
#buf, addr = self.socket.recvfrom(2048)
receive_time = float(time.clock())
r1 = int(struct.unpack('B', buf[0])[0])
r2 = int(struct.unpack('B', buf[1])[0])
r3 = int(struct.unpack('B', buf[2])[0])
r4 = int(struct.unpack('B', buf[3])[0])
r5 = int(struct.unpack('B', buf[4])[0])
r6 = int(struct.unpack('B', buf[5])[0])
r7 = int(struct.unpack('B', buf[6])[0])
r8 = int(struct.unpack('B', buf[7])[0])
r9 = int(struct.unpack('B', buf[8])[0])
r10 = int(struct.unpack('B', buf[9])[0])
r11 = int(struct.unpack('B', buf[10])[0])
r12 = int(struct.unpack('B', buf[11])[0])
r13 = int(struct.unpack('B', buf[12])[0])
r14 = int(struct.unpack('B', buf[13])[0])
r15 = int(struct.unpack('B', buf[14])[0])
r16 = int(struct.unpack('B', buf[15])[0])
r17 = int(struct.unpack('B', buf[16])[0])
r18 = int(struct.unpack('B', buf[17])[0])
r19 = int(struct.unpack('B', buf[18])[0])
r20 = int(struct.unpack('B', buf[19])[0])
r21 = int(struct.unpack('B', buf[20])[0])
r22 = int(struct.unpack('B', buf[21])[0])
r23 = int(struct.unpack('B', buf[22])[0])
r24 = int(struct.unpack('B', buf[23])[0])
r25 = int(struct.unpack('B', buf[24])[0])
r26 = int(struct.unpack('B', buf[25])[0])
r27 = int(struct.unpack('B', buf[26])[0])
#print "receive", r1, "begin time", begin_time," receive time" , receive_time ,"spend", receive_time-begin_time, " second"
print "receive:", r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,r13,r14,r15,r16,r17,r18,r19,r20,r21,r22,r23,r24,r25,r26,r27, "Time Spend:", receive_time-begin_time,"second, Max Sample Rate :", int(num_loop/(receive_time-begin_time)), "requests/second \n"
#print "receive:", r1,r2,r3,r4,r5,r6,",Time Spend:", receive_time-begin_time,"second, \nMax Sample Rate :", (int(1/(receive_time-begin_time))*2), "samples/s \n"
#print "receive:", r1,r2,r3,r4,r5,r6,r7,r8,r9,",Time Spend:", receive_time-begin_time,"second, \nMax Sample Rate :", int(1/(receive_time-begin_time))*3, "samples/s \n"
#print " spend %s"%(receive_time-begin_time)
#print 'id', self.serverlist[id], 'socket list', self.socketlist[id]
#self.socketlist[id] = socket.socket(AF_INET, SOCK_DGRAM, 0)
#self.socketlist[id].settimeout(5)
#self.socketlist[id] = mysocket
#soc = my_socket.openSocket(0, self.socketlist[id], self.serverlist[id], port)
#print 'Established connection'
#print 'socket list', self.socketlist[id]
#self.socketlist[id].send("H")
#try:
# buf = self.socketlist[id].recv(8)
# print 'Get Data'
#except:
# print 'Cannot connect to node!'
# self.socketlist[id].close()
# return
def Firmwareveresion(self):
#import msvcrt # (Windows/DOS only). This module gives you access to a number of functions in the Microsoft Visual C/C++ Runtime Library (MSVCRT).
import struct, string
running = 5
connected = 0
self.socket.settimeout(5)
addr = (self.IP_Basestation_1 , 2001)
cmd='fl\x40\x20\x03\x33\x12\x01\x01\x01'
r = struct.pack('h', 100)
#print r+':'+cmd
buf = self.socket.sendto(r+':'+cmd, addr)
try:
buf, addr = self.socket.recvfrom(1024)
print 'Connection testing is successful. Get Firmware Version'
connected = 1
except:
pass
# id = 0
# self.file_loaded = 0
# self.x_curve[id] = []
# self.y_curve[id] = []
# self.z_curve[id] = []
# self.start[id] = 0
# self.maxx[id] = 0
# self.maxy[id] = 0
# self.maxz[id] = 0
buf = self.socket.sendto(r+':'+cmd, addr)
buf, addr = self.socket.recvfrom(1024)
r1 = int(struct.unpack('B', buf[0])[0])
r2 = int(struct.unpack('B', buf[1])[0])
r3 = int(struct.unpack('B', buf[2])[0])
r4 = int(struct.unpack('B', buf[3])[0])
r5 = int(struct.unpack('B', buf[4])[0])
r6 = int(struct.unpack('B', buf[5])[0])
r7 = int(struct.unpack('B', buf[6])[0])
r8 = int(struct.unpack('B', buf[7])[0])
r9 = int(struct.unpack('B', buf[8])[0])
r10 = int(struct.unpack('B', buf[9])[0])
r11 = int(struct.unpack('B', buf[10])[0])
r12 = int(struct.unpack('B', buf[11])[0])
r13 = int(struct.unpack('B', buf[12])[0])
r14 = int(struct.unpack('B', buf[13])[0])
r15 = int(struct.unpack('B', buf[14])[0])
r16 = int(struct.unpack('B', buf[15])[0])
r17 = int(struct.unpack('B', buf[16])[0])
r18 = int(struct.unpack('B', buf[17])[0])
r19 = int(struct.unpack('B', buf[18])[0])
r20 = int(struct.unpack('B', buf[19])[0])
r21 = int(struct.unpack('B', buf[20])[0])
r22 = int(struct.unpack('B', buf[21])[0])
r23 = int(struct.unpack('B', buf[22])[0])
r24 = int(struct.unpack('B', buf[23])[0])
r25 = int(struct.unpack('B', buf[24])[0])
r26 = int(struct.unpack('B', buf[25])[0])
r27 = int(struct.unpack('B', buf[26])[0])
print "receive:", r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,r13,r14,r15,r16,r17,r18,r19,r20,r21,r22,r23,r24,r25,r26,r27, "\n"
def SetFrequency(self):
#import msvcrt # (Windows/DOS only). This module gives you access to a number of functions in the Microsoft Visual C/C++ Runtime Library (MSVCRT).
import struct, string
running = 5
connected = 0
self.socket.settimeout(5)
addr = (self.IP_Basestation_1 , 2001)
#cmd='ff\xc0\x20\x04\x33\x42\x0E\x64\x64' #Set Channel data
cmd='ff\x00\x20\x05\x33\x42\x64\x01\x01' #Query Channel data
r = struct.pack('h', 100)
#print r+':'+cmd
buf = self.socket.sendto(r+':'+cmd, addr)
try:
buf, addr = self.socket.recvfrom(1024)
self.CurrentFrequency = int(struct.unpack('B', buf[0])[0])
print 'Connection to BS is successful. Get RF channel information'
connected = 1
except:
pass
print "Current RF Channel is %i"%(self.CurrentFrequency)
self.CurrentFrequencystr.set(self.CurrentFrequency)
def setval():
id = int(self.nodestr.get())
CurrentFrequency = int(self.CurrentFrequencystr.get())
addr = (self.IP_Basestation_1 , 2001)
r = struct.pack('h', 100)
cmd='ff\xc0\x20\x04\x33\x42\x0E' +chr(CurrentFrequency) +'\x64' #Set Channel data
print cmd
buf = self.socket.sendto(r+':'+cmd, addr)
try:
buf, addr = self.socket.recvfrom(1024)
r1 = int(struct.unpack('B', buf[0])[0])
print r1
print 'The new frequency is %i'%(CurrentFrequency)
except:
print 'Changing the new frequency process is failed'
nf.destroy()
# open a popup window
nf = Toplevel()
t = "Set Frequency"
nf.title(t)
self.status.set(t)
infoFrame = Frame(nf, width = self.nodewin_width,
height = self.nodewin_height, bd = 1)
infoFrame.pack(fill = BOTH, expand = 0)
Label(infoFrame, text = 'Current RF Frequency:').grid(row = 0, column = 1, sticky = W)
Entry(infoFrame, textvariable = self.CurrentFrequencystr).grid(row = 0, column = 2)
Button(infoFrame, text = 'OK', command = setval).grid(row = 2, column = 2)
#def ParserCommand(self, file = "D:/doc/Python/WirelessController/Command.txt"):
def ParserCommand(self, file = ""):
#import msvcrt # (Windows/DOS only). This module gives you access to a number of functions in the Microsoft Visual C/C++ Runtime Library (MSVCRT).
import struct, string
from tkFileDialog import askopenfilename
running = 5
connected = 0
self.socket.settimeout(5)
addr = (self.IP_Basestation_1 , 2001)
r = struct.pack('h', 100)
hex_table = {
"0":"0",
"1":"1",
"2":"2",
"3":"3",
"4":"4",
"5":"5",
"6":"6",
"7":"7",
"8":"8",
"9":"9",
"a":"10",
"b":"11",
"c":"12",
"d":"13",
"e":"14",
"f":"15",
"A":"10",
"B":"11",
"C":"12",
"D":"13",
"E":"14",
"F":"15"
}
# def hta(s):
# s = '\000' + s
# h = ""
# for c in s:
# h = h + '%2x' % ord(c)
# return h
if file =="":
file = askopenfilename(title="Open Command File",
filetypes = (("Plain Text", "*"), #".txt"),
("All File", "*"))
)
print file
if file:
import os
profileName = os.path.basename(file)
profilePath = os.path.dirname(file)
try:
os.chdir(profilePath)
except:
pass
import string
fin = open(profileName, 'r')
line = fin.readline().split()
while len(line):
#a=line.pop(0)
#print a[0],' and ', a[1]
cmd='fl'
while len(line):
# #buf = self.socket.sendto(r+':'+cmd, addr)
# #print line.pop(0)
#cmd='fl'
temp_string = line.pop(0)
#print temp_string
#temp_string =
#print hex_table[temp_string[0]]
#print hex_table[temp_string[1]]
a = int(hex_table[temp_string[0]])*16 + int(hex_table[temp_string[1]])
cmd = cmd + chr(a)
# #print len(line)
# #cmd = cmd + chr(int(line.pop(0)))
# #print len(line)
print cmd
try:
buf = self.socket.sendto(r+':'+cmd, addr)
buf, addr = self.socket.recvfrom(1024)
r1 = int(struct.unpack('B', buf[0])[0])
r2 = int(struct.unpack('B', buf[1])[0])
r3 = int(struct.unpack('B', buf[2])[0])
r4 = int(struct.unpack('B', buf[3])[0])
r5 = int(struct.unpack('B', buf[4])[0])
r6 = int(struct.unpack('B', buf[5])[0])
r7 = int(struct.unpack('B', buf[6])[0])
r8 = int(struct.unpack('B', buf[7])[0])
r9 = int(struct.unpack('B', buf[8])[0])
r10 = int(struct.unpack('B', buf[9])[0])
r11 = int(struct.unpack('B', buf[10])[0])
r12 = int(struct.unpack('B', buf[11])[0])
r13 = int(struct.unpack('B', buf[12])[0])
r14 = int(struct.unpack('B', buf[13])[0])
r15 = int(struct.unpack('B', buf[14])[0])
r16 = int(struct.unpack('B', buf[15])[0])
r17 = int(struct.unpack('B', buf[16])[0])
r18 = int(struct.unpack('B', buf[17])[0])
r19 = int(struct.unpack('B', buf[18])[0])
r20 = int(struct.unpack('B', buf[19])[0])
r21 = int(struct.unpack('B', buf[20])[0])
r22 = int(struct.unpack('B', buf[21])[0])
r23 = int(struct.unpack('B', buf[22])[0])
r24 = int(struct.unpack('B', buf[23])[0])
r25 = int(struct.unpack('B', buf[24])[0])
r26 = int(struct.unpack('B', buf[25])[0])
r27 = int(struct.unpack('B', buf[26])[0])
print "receive:", r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,r13,r14,r15,r16,r17,r18,r19,r20,r21,r22,r23,r24,r25,r26,r27, "\n"
except:
print "UDP Error"
line = fin.readline().split()
fin.close()
def runSimuDatabase(self):
import struct, string
if self.fft == 1:
self.fft = 0
self.fftbutton.config(text = "FFT")
#t = time.time()
t = time.clock()
if self.simu == 0:
self.simu = 1
self.pause = 0
else:
self.simu = 0
self.pause = 1
self.file_loaded = 1
if self.simu == 1:
self.zoom = 0
self.t0 = t
#self.start_time = -40
self.simubuttondatabase.config(text = ' Stop ')
import threading
self.nodethread = []
self.graphthread = []
self.list_reset_thread = []
print 'start reading data...'
for i in range(0, (self.nodenum)):
self.start_time[i] = 0
self.prev_time[i] = -1
self.last_time[i] = 0
self.last_update_time = self.t0
self.nodethread.append( threading.Thread(target = self.readData1_database, args =[0]))
self.nodethread[0].start()
if not self.nogui:
#a = 1
self.thread2 = threading.Thread(target = self.runUpdateGraph)
self.thread2.start()
else:
self.status.set("GUI is running under non-displaying mode")
self.thread6 = threading.Thread(target = self.show_status_in_nongui_mode)
self.thread6.start()
if self.Show_Sample_Rate:
# #print "yes , create show sample rate thread"
self.thread3 = threading.Thread(target = self.show_sample_rate_thread)
# #print "yes , create show sample rate thread 1"
self.thread3.start()
#print "yes , create show sample rate thread 2"
if self.Auto_Scrolling:
self.thread4 = threading.Thread(target = self.autoscrolling_thread)
self.thread4.start()
if self.Show_Error_Rate:
# #print "yes , create show sample rate thread"
self.thread5 = threading.Thread(target = self.show_error_rate_thread)
# #print "yes , create show sample rate thread 1"
self.thread5.start()
#print "yes , create show sample rate thread 2"
else:
self.t1 = t
self.timeRange = self.t1-self.t0
self.simubuttondatabase.config(text = ' Start ')
####################################
# stop RIPE-MAC
r = struct.pack('h', 100)
addr = ('192.168.2.3', 2001)
cmd='fe'
self.socket.sendto(r+':'+cmd, addr)
self.socket.settimeout(5)
self.simu = 0
self.pause = 1
####################################
#for soc in self.socketlist:
# soc.close()
#def run_remote_service():
def stop_remote_service():
import telnetlib
def inititalize():
self.file_loaded = 0
self.x_curve[id] = []
self.y_curve[id] = []
self.z_curve[id] = []
self.start[id] = 0
self.maxx[id] = 0
self.maxy[id] = 0
self.maxz[id] = 0
self.lost = 0
def readData1_database(self, id):
self.inititalize
#print 'id', self.serverlist[id]
self.readData_database(id)
self.number_of_packet_loss = 0
def readData_database(self, id):
import socket, struct, string, os, math
#import telnetlib
running = 5
connected = 0
counter = 0
table = [0, 0.6,0,-0.6]
last_sequence_number = 0
current_sequence_number = 0
self.number_of_packet_loss = 0
#loss = 0
self.socket.settimeout(15)
cmd = ''
addr = (self.IP_Basestation_1 , 2001)
sys.stderr.write('Connection is successful \n')
connected = 1
first = 1
#########################################
# start RIPE-MAC
r = struct.pack('h', 100)
addr = ('192.168.2.3', 2001)
cmd='fb'
self.socket.sendto(r+':'+cmd, addr)
self.socket.settimeout(1000)
#########################################
self.start_time[id] = 0
#self.socketlist[id] = socket.socket(AF_INET, SOCK_DGRAM, 0)
#self.socketlist[id].setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
#self.socket.settimeout(1000)
#self.socket.setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
#self.Last_FFT_Recalculation_Time = time.time()
packet =[]
buf = []
self.t0 = t1 = time.clock()
print "readData_database id:",id
Length = 12 #27 #23
threshold = 0.3
while (self.simu and id == 0):
#t2 = time.clock()
packet = []
try:
buf, addr = self.socket.recvfrom(1024)
for j in range(0, Length):
packet.append(int(struct.unpack('B', buf[j])[0]))
#data_buf[k] = (data_buf[k]-171)/256*2200/333*9.8
i = packet[2] #very important
#if i >= self.nodenum:
# print i
# continue
self.last_pulling_time[i] = time.clock()
#adjust = self.offset[i] * 15
for j in range(3, Length):
#packet[j] = packet[j]-shift[bits] # for 8 bits ADC format
#packet[j] = int(float(packet[j]-171)/256*2200/333*9.81)
packet[j] = int(float(packet[j]-171)*0.35225842)
#packet[j] = int(float(packet[j]-170)*0.25316723)
if packet[j] > 50.0:#29.43: # 3g
packet[j]= 50.0#29.43
if packet[j] < -50.0:#-29.43: # -3g
packet[j]= -50.0#-29.43
#packet[j] = round(packet[j] / one_g[bits])
packet[j] = packet[j] / 30.0#9.81
#print packet
#print "last pull time:", self.last_pulling_time[i]
#xdata = packet[3]
#if abs(xdata) > self.maxx[id]: self.maxx[id] = abs(xdata)
#if abs(xdata) > self.maxx[i]: self.maxx[i] = abs(xdata)
#set_offset = 1
#offset = packet[3]
#for j in range (4, Length):
# if math.fabs(packet[j] - packet[j-1]) > threshold:
# set_offset = 0
# break
# offset += packet[j]
#if(set_offset):
# self.offset[i] = offset / (Length - 3)
for j in range (3, Length):
self.x_curve[i].append(packet[j])
self.y_curve[i].append(0)
self.z_curve[i].append(0)
self.timelist[i].append(self.last_pulling_time[i]-self.t0)
#print "==============="
#if len(self.x_curve[i]) > l:
# l += 650
# print self.x_curve[i]
#print str(len(self.x_curve[i]))
except NameError,c:
print 'error ', c, 'undefined : error while receiving packet '
except:
pass
#print self.x_curve[0]
#print "Running for ", t1-self.t0, " seconds. Total ", len(self.x_curve[0]), " samples"
#print "Number of Loss Packet :", int(self.number_of_packet_loss), "Packet Loss Rate: " ,float(self.number_of_packet_loss / float((len(self.x_curve[0])+self.number_of_packet_loss)))*100.0, " %"
#print self.raw[2]
#tn.write("\x1C")
#tn.write("exit\n")
try:
self.socketlist[id].close()
except:
pass
def runUpdateGraph(self): # the name was changed by SM. [8/18/05]
#import time
#t0 = time.time()
if self.simu==1:
for i in range(0, self.nodenum):
self.updateGraph1(i)
self.master.after(self.Thread_Update_Time, self.runUpdateGraph)
def reset_list(self):
#if (( time.time()- self.last_reset_time ) > (self.List_Reset_Time+ 0.2 ) ): #and self.Auto_Scrolling == 1:
print "yes!! reset the chain"
#self.last_reset_time = time.time()
#
self.pause = 1
#time.sleep(0.1)
#for i in range(0, 10000):
# j=i
self._lock.acquire()
#for i in self.nodethread:
# i.join()
self.thread2.join()
for i in range(0, (self.nodenum)):
#
# self.x_curve[i].pop(0)# = [0]
# self.y_curve[i].pop(0)# = [0]
# self.z_curve[i].pop(0)# = [0]
# self.fft_x_curve[i].pop(0)# = [0]
# self.fft_y_curve[i].pop(0)# = [0]
# self.fft_z_curve[i].pop(0)# = [0]
# self.timelist[i].pop(0) #= [0]
#
self.x_curve[i] = [0]
self.y_curve[i] = [0]
self.z_curve[i] = [0]
self.fft_x_curve[i] = [0]
self.fft_y_curve[i] = [0]
self.fft_z_curve[i] = [0]
self.timelist[i] = [0]
self.flush_time = self.flush_time + 1
self._lock.release()
self.pause= 0
#for i in range(0, (self.nodenum)):
# self.nodethread[i].start()
self.master.after(self.List_Reset_Time*1000, self.reset_list)
# 11/19/2005 Show Sample Rate of Sensor node by ChongJing
def show_sample_rate_thread(self):
while self.simu:
#print "inside show_sample_rate"
t1=time.time()
#print "before if"
if len(self.x_curve[0]) and ((t1-self.t0)% self.List_Reset_Time ) and (float(self.timelist[0][-1])-float(self.timelist[0][0])):
print len(self.x_curve[0]),"th frame", ((t1-self.t0)% self.List_Reset_Time ), " Already up for ", int((t1-self.t0)+(self.flush_time/self.nodenum)*self.List_Reset_Time) , " second"
#print "Sample rate : %i samples/s "%(int(len(self.timelist[0])/(float(self.timelist[0][-1])-float(self.timelist[0][0])))/10)
#self.sample_rate = int(len(self.timelist[0])/(float(self.timelist[0][-1])-float(self.timelist[0][0])))
#print "node 0 receive: %.5f %.5f %f", r1,r2,r3,"Request Rate :", int(len(self.x_curve[0])/(t1-self.t0)), "requests/second"
#if use reset thread to calculate sample rate
#print "node 0 sample rate : %i samples/s receive: %4.1f %4.1f %4.1f"%(int(len(self.x_curve[0])/((t1-self.t0)% self.List_Reset_Time )) ,r1,r2,r3)
#if use pop to calculate sample rate
#print "node 0 sample rate : %i samples/s receive: %4.1f %4.1f %4.1f"%(int(len(self.x_curve[0])/((t1-self.t0))) ,r1,r2,r3)
time.sleep(0.5)
def show_error_rate_thread(self):
while self.simu:
#print "inside show_sample_rate"
#t1=time.time()
#print "before if"
if len(self.x_curve[0]) >200:
# print len(self.x_curve[0]),"th frame", ((t1-self.t0)% self.List_Reset_Time ), " Already up for ", int((t1-self.t0)+(self.flush_time/self.nodenum)*self.List_Reset_Time) , " second"
#print "Sample rate : %i samples/s "%(int(len(self.timelist[0])/(float(self.timelist[0][-1])-float(self.timelist[0][0])))/10)
print "Number of packet loss is: ", self.number_of_packet_loss
print "Packet Loss Rate right now is ", float(self.number_of_packet_loss / (len(self.x_curve[0])+self.number_of_packet_loss))*100, " %"
#self.sample_rate = int(len(self.timelist[0])/(float(self.timelist[0][-1])-float(self.timelist[0][0])))
#print "node 0 receive: %.5f %.5f %f", r1,r2,r3,"Request Rate :", int(len(self.x_curve[0])/(t1-self.t0)), "requests/second"
#if use reset thread to calculate sample rate
#print "node 0 sample rate : %i samples/s receive: %4.1f %4.1f %4.1f"%(int(len(self.x_curve[0])/((t1-self.t0)% self.List_Reset_Time )) ,r1,r2,r3)
#if use pop to calculate sample rate
#print "node 0 sample rate : %i samples/s receive: %4.1f %4.1f %4.1f"%(int(len(self.x_curve[0])/((t1-self.t0))) ,r1,r2,r3)
time.sleep(1)
def show_status_in_nongui_mode(self):
while self.simu:
t1= time.clock()
t = 'GUI is running under non-displaying: Started %i seconds \n'%(t1 - self.t0)
self.status.set(t)
time.sleep(1)
def autoscrolling_thread(self):
while self.simu:
if (( time.time()- self.last_update_time ) > self.Auto_Scrolling_Time ):
self.last_update_time = time.time()
self.cur_node = (self.cur_node + 1) % self.nodenum
#self.updateGraph1(self.nodenum)
self.status.set("Now showing Node %i details"%(self.cur_node))
time.sleep(0.5)
def append_raw_packet(self, buf):
import struct, string
for i in range(0, self.Packet_Size):
self.raw[i].append(int(struct.unpack('B', buf[i])[0]))
def preference(self):
def setval():
id = int(self.nodestr.get())
self.low_resolution = int(self.low_resolutionstr.get())
self.high_resolution = int(self.high_resolutionstr.get())
self.sample_rate = float(self.SAMPLING_RATEstr.get())
SAMPLING_RATE = float(self.SAMPLING_RATEstr.get())
#print "Sample_rate, self.sample_rate", SAMPLING_RATE, self.sample_rate
self.Thread_Update_Time = int(self.Thread_Update_Timestr.get())
thread_update_time = int(self.Thread_Update_Timestr.get())
self.Request_Sleep_Time = float(self.Request_Sleep_Timestr.get())
Request_Sleep_Time = float(self.Request_Sleep_Timestr.get())
self.Auto_Scrolling_Time = int(self.Auto_Scrolling_Timestr.get())
Auto_Scrolling_Time = int(self.Auto_Scrolling_Timestr.get())
self.Auto_Scrolling = int(self.Auto_Scrollingstr.get())
Auto_Scrolling = int(self.Auto_Scrollingstr.get())
self.Database_enable = int(self.Database_enablestr.get())
Database_enable = int(self.Database_enablestr.get())
self.List_Reset_Time = int(self.List_Reset_Timestr.get())
List_Reset_Time = int(self.List_Reset_Timestr.get())
if (int(self.FFT_Recalculation_Timestr.get())) < 2:
print "FFT Recalcuation Time should be bigger than 2. \nFFT Recalcuation Time will be set to 2 now"
#print self.FFT_Recalculation_Timestr
self.FFT_Recalculation_Timestr.set('2')
#print self.FFT_Recalculation_Timestr
self.FFT_Recalculation_Time = int(self.FFT_Recalculation_Timestr.get())
FFT_Recalculation_Time = int(self.FFT_Recalculation_Timestr.get())
self.nogui = int(self.noguistr.get())
NoGUI = int(self.noguistr.get())
self.Show_Error_Rate = int(self.Show_Error_Ratestr.get())
Show_Error_Rate = int(self.Show_Error_Ratestr.get())
#self.IP_Basestation_1 = self.IP_Basestation_1str.get())
#IP_Basestation_1 = int(self.IP_Basestation_1str.get())
#print thread_update_time, Sample_rate
nf.destroy()
# end of setcali
# open a popup window
nf = Toplevel()
t = "Preference"
nf.title(t)
self.status.set(t)
infoFrame = Frame(nf, width = self.nodewin_width,
height = self.nodewin_height, bd = 1)
infoFrame.pack(fill = BOTH, expand = 0)
Label(infoFrame, text = 'Resolution Stepping for each node:(Default:30)').grid(row = 0, column = 1, sticky = W)
Label(infoFrame, text = 'Resolution Steping for node detail:(Default:15)').grid(row = 1, column = 1, sticky = W)
if not self.simu:
Label(infoFrame, text = 'Sample Rate:(Default:0.005)').grid(row = 2, column = 1, sticky = W)
#Label(infoFrame, text = 'Graph Thread Update Period:(Default:1)').grid(row = 3, column = 1, sticky = W)
Label(infoFrame, text = 'Thread Sleep').grid(row = 4, column = 1, sticky = W)
Label(infoFrame, text = 'Request Thread Sleep Time:(Default:0.01)').grid(row = 5, column = 1, sticky = W)
Label(infoFrame, text = 'Auto Scrolling').grid(row = 6, column = 1, sticky = W)
Label(infoFrame, text = 'Auto Scrolling Period(Second)').grid(row = 7, column = 1, sticky = W)
Label(infoFrame, text = 'Database Logging Enable').grid(row = 8, column = 1, sticky = W)
Label(infoFrame, text = 'Graph List Rest Time').grid(row = 9, column = 1, sticky = W)
Label(infoFrame, text = 'Show Sample Rate').grid(row = 10, column = 1, sticky = W)
Label(infoFrame, text = 'Base Station IP Address').grid(row = 11, column = 1, sticky = W)
Label(infoFrame, text = 'FFT7 Recalcuation Time(in second and should be grater than 2)').grid(row = 12, column = 1, sticky = W)
if not self.simu:
Label(infoFrame, text = 'Display Error Rate').grid(row = 13, column = 1, sticky = W)
Label(infoFrame, text = 'Drawing').grid(row = 14,column = 1, sticky = W)
#Label(infoFrame, text = 'Max').grid(row = 1, column = 2, sticky = W)
#Label(infoFrame, text = 'X').grid(row = 2, column = 0, sticky = W)
#Label(infoFrame, text = 'Y').grid(row = 3, column = 0, sticky = W)
#Label(infoFrame, text = 'Z').grid(row = 4, column = 0, sticky = W)
Entry(infoFrame, textvariable = self.low_resolutionstr).grid(row = 0, column = 2)
Entry(infoFrame, textvariable = self.high_resolutionstr).grid(row = 1, column = 2)
if not self.simu:
Entry(infoFrame, textvariable = self.SAMPLING_RATEstr).grid(row = 2, column = 2)
#Entry(infoFrame, textvariable = self.Thread_Update_Timestr).grid(row = 3, column = 2)
Entry(infoFrame, textvariable = self.Request_Sleep_Timestr).grid(row = 5, column = 2)
#Entry(infoFrame, textvariable = self.Auto_Scrollingstr).grid(row = 6, column = 2)
Entry(infoFrame, textvariable = self.Auto_Scrolling_Timestr).grid(row = 7, column = 2)
Entry(infoFrame, textvariable = self.List_Reset_Timestr).grid(row = 9, column = 2)
#Entry(infoFrame, textvariable = self.Show_Sample_Ratestr).grid(row = 10, column = 2)
#Entry(infoFrame, textvariable = 'TEXT2').grid(row = 10, column = 2)
Entry(infoFrame, textvariable = self.IP_Basestation_1str).grid(row = 11, column = 2)
#Entry(infoFrame, text = 'TEXT').grid(row = 11, column = 2)
Entry(infoFrame, textvariable = self.FFT_Recalculation_Timestr).grid(row = 12, column = 2)
Checkbutton(infoFrame, text="", variable= self.Request_Sleep, onvalue="1", offvalue="0").grid(row = 4, column = 0)
Checkbutton(infoFrame, text="", variable= self.Auto_Scrollingstr, onvalue="1", offvalue="0").grid(row = 6, column = 0)
Checkbutton(infoFrame, text="", variable= self.Database_enablestr, onvalue="1", offvalue="0").grid(row = 8, column = 0)
Checkbutton(infoFrame, text="", variable= self.Show_Sample_Ratestr, onvalue="1", offvalue="0").grid(row = 10, column = 0)
if not self.simu:
Checkbutton(infoFrame, text="", variable= self.Show_Error_Ratestr, onvalue="1", offvalue="0").grid(row = 13, column = 0)
Checkbutton(infoFrame, text="", variable= self.noguistr, onvalue="0", offvalue="1").grid(row = 14, column = 0)
#Entry(infoFrame, textvariable = self.xmaxOnY).grid(row = 2, column = 2)
#Entry(infoFrame, textvariable = self.yminOnY).grid(row = 3, column = 1)
#Entry(infoFrame, textvariable = self.ymaxOnY).grid(row = 3, column = 2)
#Entry(infoFrame, textvariable = self.zminOnY).grid(row = 4, column = 1)
#Entry(infoFrame, textvariable = self.zmaxOnY).grid(row = 4, column = 2)
Button(infoFrame, text = 'OK', command = setval).grid(row = 15, column = 2)
def testing(self):
#def setval():
# open a popup window
nf = Toplevel()
t = "MSP Testing"
nf.title(t)
self.status.set(t)
infoFrame = Frame(nf, width = self.nodewin_width,
height = self.nodewin_height, bd = 1)
infoFrame.pack(fill = BOTH, expand = 0)
#self.TestConnbutton = Button(infoFrame, text='Communication Testing',relief= RAISED, command=self.TestConn, fg=self.buttonfg , bg=self.buttonbg).grid(row=0, column =0)
self.TestConnbutton = Button(infoFrame, text='Communication Testing',relief= RAISED, command=self.TestConn, fg=self.buttonfg , bg=self.buttonbg)
self.TestConnbutton.pack(side = LEFT, fill = X, expand = 1)
self.Firmwarebutton = Button(infoFrame, text='Firmware Version',relief= RAISED, command=self.Firmwareveresion, fg=self.buttonfg , bg=self.buttonbg)
#self.Firmwarebutton = Button(infoFrame, text='Firmware Version',relief= RAISED, command=self.Firmwareveresion, fg=self.buttonfg , bg=self.buttonbg).grid(row=1, column =0)
self.Firmwarebutton.pack(side = LEFT, fill = X, expand = 1)
self.Parsercommandbutton = Button(infoFrame, text='Parser Command',relief= RAISED, command=self.ParserCommand, fg=self.buttonfg , bg=self.buttonbg)
#self.Firmwarebutton = Button(infoFrame, text='Firmware Version',relief= RAISED, command=self.Firmwareveresion, fg=self.buttonfg , bg=self.buttonbg).grid(row=1, column =0)
self.Parsercommandbutton.pack(side = LEFT, fill = X, expand = 1)
self.SetFrequencybutton = Button(infoFrame, text='Set Frequency',relief= RAISED, command=self.SetFrequency, fg=self.buttonfg , bg=self.buttonbg)
self.SetFrequencybutton.pack(side = LEFT, fill = X, expand = 1)
|
"""django_informixdb: Django Informix database driver"""
from .version import VERSION
|
from __future__ import absolute_import
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from models.basic_conv import BasicGraphConv
from models.graph_atrous_conv import GraphConv
from models.enc_dec import Enc_Dec
from models.graph.h36m_graph import AdjMatrixGraph
from models.graph.h36m_graph_j import AdjMatrixGraph_J
from models.graph.h36m_graph_b import AdjMatrixGraph_B
from models.graph.h36m_graph_p import AdjMatrixGraph_P
def conv_init(conv):
nn.init.kaiming_normal_(conv.weight, mode='fan_out')
# nn.init.constant_(conv.bias, 0)
def bn_init(bn, scale):
nn.init.constant_(bn.weight, scale)
nn.init.constant_(bn.bias, 0)
def fc_init(fc):
nn.init.xavier_normal_(fc.weight)
if fc.bias:
nn.init.constant_(fc.bias, 0)
class PoseGTAC(nn.Module):
"""
PoseGTAC: Graph Transformer Encoder-Decoder with Atrous Convolution.
"""
def __init__(self, hid_dim, coords_dim=(2, 3), p_dropout=None):
super(PoseGTAC, self).__init__()
graph = AdjMatrixGraph()
adj = torch.from_numpy(graph.A)
graph_j = AdjMatrixGraph_J()
graph_p = AdjMatrixGraph_P()
graph_b = AdjMatrixGraph_B()
A_binary = graph_j.A_binary
B_binary = graph_p.A_binary
C_binary = graph_b.A_binary
self.gconv_input = GraphConv(adj, coords_dim[0], hid_dim, p_dropout=p_dropout)
self.gconv_layers = Enc_Dec(hid_dim, A_binary, B_binary, C_binary, p_dropout=p_dropout)
self.gconv_output = BasicGraphConv(hid_dim, coords_dim[1], adj)
for m in self.modules():
if isinstance(m, nn.Conv1d):
conv_init(m)
elif isinstance(m, nn.BatchNorm1d):
bn_init(m, 1)
elif isinstance(m, nn.Linear):
fc_init(m)
def forward(self, x):
out = self.gconv_input(x)
out = self.gconv_layers(out)
out = self.gconv_output(out)
return out
|
class BankAccount:
NEXT_ACC_NUMBER = 1
def __init__(self):
self.cash = 0.0
BankAccount.NEXT_ACC_NUMBER += 1
def deposit_cash(self, amount):
if not isinstance(amount, float) and amount <= 0.0:
raise ValueError("Deposit can't be negative!")
self.cash += amount
def withdraw_cash(self,amount):
if amount < 0.0:
raise ValueError("Withdraw can't be negative")
self.cash -= min(self.cash, amount)
def __str__(self):
return f'Account: {self.number}, balance: {self.cash}'
print(BankAccount.NEXT_ACC_NUMBER)
myAccount = BankAccount()
print(BankAccount.NEXT_ACC_NUMBER)
myAccount2 = BankAccount()
print(BankAccount.NEXT_ACC_NUMBER)
# myaccount = BankAccount(1496454578)
# print(myaccount)
# myaccount.deposit_cash(429)
# print(myaccount)
# myaccount.deposit_cash(156)
# print(myaccount)
# myaccount.withdraw_cash(1000)
# print(myaccount)
|
import pandas as pd
import numpy as np
import talib as ta
import tushare as ts
import matplotlib.pyplot as plt
def BBANDS(ts_code,timeperiod=14,k=0.5):
dw = ts.get_k_data(ts_code)
dw = dw[10:]
dw.index = range(len(dw))
dw['upper'], dw['middle'], dw['lower'] = ta.BBANDS(
dw.close.values,
timeperiod=timeperiod,
# number of non-biased standard deviations from the mean
nbdevup=k,
nbdevdn=k,
# Moving average type: simple moving average here
matype=0)
sum=0
total=10000
asset=10000
plt.plot(dw['open'].values)
plt.show()
for i in range(0,len(dw)-1):
if dw['open'].values[i]<dw['lower'].values[i]:
total=total-dw['open'].values[i]*100
sum=sum+100
asset=dw['open'].values[i]*sum+total
elif dw['open'].values[i]>dw['upper'].values[i]:
if sum>100:
total=total+dw['open'].values[i]*100
sum=sum-100
elif sum<=100:
total=total+dw['open'].values[i]*sum
sum=0
asset=dw['open'].values[i]*sum+total
print("day: "+str(i)+"sum:"+str(sum)+"total:"+str(total)+"asset:"+str(asset))
print("total:"+str(total)+" K: "+str(k))
BBANDS("600848",k=0.5)
#for k in range(1,10):
#BBANDS("600600",k=k)# |
from ..actions.session import init_session, get_session_analysis, get_session_analysis_deprecated
from ..actions.elo import get_player_info
FUNCTION_PER_COMMAND = {
'faceit_elo': get_player_info,
# TODO: Replace this with get_session_analysis when FACEIT API gets FIXED
'faceit_session': get_session_analysis_deprecated,
'faceit_start_session': init_session
}
|
""" Testing brain segmentation module
"""
import numpy as np
from numpy.testing import (assert_almost_equal,
assert_array_equal)
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal)
from ..brain_segmentation import brain_segmentation
from ....io.files import load as load_image
from ....testing import anatfile
def test_bseg():
# Very crude smoke test
anat_img = load_image(anatfile)
ppm_img, label_img = brain_segmentation(anat_img)
assert_equal(ppm_img.ndim, 4)
assert_equal(label_img.ndim, 3)
|
from flask import Flask, request
from flask_cors import CORS, cross_origin
from flask_restful import Resource, Api
from json import dumps
from flask_jsonpify import jsonify
import psycopg2
import jinja2
app = Flask(__name__)
api = Api(app)
CORS(app)
def initDB():
conn_string = "host='ec2-54-83-50-145.compute-1.amazonaws.com' dbname='dad8agdskdaqda' port='5432' user='bxzszdjesssvjx' password='30a8521fc6b32229540335c47af5265bb684216e4f58fa81520a91e1d086a5de'"
print ("Connecting to database\n ->%s" % (conn_string))
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
print ("Connected!")
return conn, cursor
conn, cur = initDB()
@app.route("/")
def hello():
return jsonify("Hello World and DB!!")
@app.route("/dbinfo")
def dbinfo():
info = "Con: " + str (conn) + "Curr: " + str(cur)
return jsonify(info)
class Employees(Resource):
def get(self):
return {'employees': [{'id':1, 'name':'Balram'},{'id':2, 'name':'Tom'}]}
class Employees_Name(Resource):
def get(self, employee_id):
print('Employee id:' + employee_id)
result = {'data': {'id':1, 'name':'Balram'}}
return jsonify(result)
api.add_resource(Employees, '/employees') # Route_1
api.add_resource(Employees_Name, '/employees/<employee_id>') # Route_3
if __name__ == '__main__':
app.run(debug=True)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 13 17:01:32 2019
@author: thomas
"""
#In this script, we will be using the expansion and compression displacement
# data created by PRQuantitative.py
#We will attempt to find a fit for the data SSL and LSL
#The fit will be based on 3 parameters: Re = A_r*r/delta^2, St_r = r/A, and d_0?
#MODULES
import os,sys
import re
import numpy as np
import pandas as pd
from scipy import stats
from sklearn import linear_model
import matplotlib as mpl
#mpl.use('Agg')
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from matplotlib.ticker import (MultipleLocator,AutoMinorLocator)
from scipy.signal import savgol_filter
#CONSTANTS
cwd_PYTHON = os.getcwd()
PERIOD = 0.1
FREQ = 10.0
DENS = 2.0
MAXAMP = 0.3
RSMALL = 0.15
RLARGE = 0.3
RSL_DEFAULT = 0.75
rslList = [1.0,1.1,1.2,1.3,1.4,1.5,1.6,1.8,2.0]
WholeExpFit = np.zeros((2,5))
WholeComFit = np.zeros((2,5))
def StoreData(cwd_DATA):
global RSL_DEFAULT
#Load position data
#Columns
#Re rsl amp y_e y_c y_net
data = pd.read_csv(cwd_DATA+'/Shifted/ExpCom.txt',delimiter = ' ')
data = data[data.Re <= 150.0].copy()
data = data.sort_values(by=['Re','rsl','amp'])
data = data.reset_index(drop=True)
#Renormalize
data['y_e'] /= RSMALL
data['y_c'] /= RSMALL
#Let's create 5 new variables (log values)
data['logRe'] = np.log10(data.Re)
data['logy_e'] = np.log10(abs(data['y_e']))
data['logy_c'] = np.log10(abs(data['y_c']))
data['epsilon'] = data.amp/RSMALL
data['logeps'] = np.log10(data['epsilon'])
#data['d_0'] = RSL_DEFAULT*data.rsl
data['d_0'] = RSL_DEFAULT*data.rsl/RSMALL
data['logd_0'] = np.log10(data['d_0'])
data['Diff'] = abs(data['y_e'] + data['y_c'])
#Attempt2
data['M2'] = data['Re']/data['epsilon']
data['logM2'] = np.log10(data['M2'])
PlotAllStrokes(data)
data.to_csv(cwd_DATA + '/v2_ExpComAllVar.txt',index=None,header=True)
#sys.exit(0)
return data
def PlotAllStrokes(data):
ampList = [0.12,0.15,0.18,0.21,0.24]
rslList = [1.0,1.1,1.2,1.3,1.4,1.5,1.6,1.8,2.0]
#Color Palette for plotting
R1=[255/255,255/255,153/255,153/255,204/255]
G1=[153/255,204/255,255/255,204/255,153/255]
B1=[204/255,153/255,153/255,255/255,255/255]
ShadeValue = 1 #used to scale line color so there is a gradient as rsl changes
#Create Figure for each amp value
fig = plt.figure(figsize=(9,4),dpi=200,num=100)
axPos = fig.add_subplot(121)
axNeg = fig.add_subplot(122)
#Labels and Such
axPos.set_xlabel('Re',fontsize=14)
axNeg.set_xlabel('Re',fontsize=14)
axPos.set_ylabel(r'$\Delta \hat{y}_{exp}$ (m)',fontsize=14)
axNeg.set_ylabel(r'$\Delta \hat{y}_{com} - \Delta \hat{y}_{min}$ (m)',fontsize=14)
for idxAmp in range(5):
ampValue = ampList[idxAmp]
ampData = data[data.amp == ampValue].copy()
'''#Create Figure for each amp value
fig = plt.figure(figsize=(9,4),dpi=200,num=100)
axPos = fig.add_subplot(121)
axNeg = fig.add_subplot(122)
#Labels and Such
axPos.set_xlabel('Re',fontsize=14)
axNeg.set_xlabel('Re',fontsize=14)
axPos.set_ylabel(r'$\Delta y_{exp}$ (m)',fontsize=14)
axNeg.set_ylabel(r'$\Delta y_{com} - \Delta y_{min}$ (m)',fontsize=14)'''
#axPos.set_title(r'Expansion and Compression: A%.2fm'%ampValue)
ShadeValue = 1.0
for idxRSL in range(len(rslList)):
rslValue = rslList[idxRSL]
rslData = ampData[ampData.rsl == rslValue].copy()
rslData = rslData.sort_values(by=['Re'])
rslData = rslData.reset_index(drop=True)
#ShadeValue = 1.0 - 0.05*idxRSL/(len(rslList))
#Select RGB Color
R=R1[idxAmp]*ShadeValue
G=G1[idxAmp]*ShadeValue
B=B1[idxAmp]*ShadeValue
seriesCMin = rslData[['y_c']].min()
miny_c = seriesCMin[0]
rslData['y_c'] -= miny_c
rslData['logy_c'] = np.log10(rslData['y_c'])
rslData = rslData.loc[1:]
#Plot Re_c
axNeg.plot([20.0,20.0],[-10,10],color='gray',ls=':')
axPos.plot([20.0,20.0],[-10,10],color='gray',ls=':')
#Expansion
axPos.plot(rslData['Re'],rslData['y_e']/RSMALL,color=(R,G,B),zorder=5)
axPos.scatter(rslData['Re'],rslData['y_e']/RSMALL,color=(R,G,B),s=9,zorder=5)
#Compression
axNeg.plot([0.05,150.0],[0.0,0.0],color='k')
axNeg.plot(rslData['Re'],rslData['y_c']/RSMALL,color=(R,G,B),zorder=5)
axNeg.scatter(rslData['Re'],rslData['y_c']/RSMALL,color=(R,G,B),s=9,zorder=5)
ShadeValue -=0.05
axPos.set_xlim(0.5,200.0)
axNeg.set_xlim(0.5,200.0)
axNeg.set_ylim(1.0e-2,5.0e0)
axPos.set_ylim(1.0e-2,5.0e0)
axPos.set_xscale('log')
axPos.set_yscale('log')
axNeg.set_xscale('log')
axNeg.set_yscale('log')
#Axes Parameters
axPos.tick_params(which='major',axis='both',direction='in',length=6,width=1)
axPos.tick_params(which='minor',axis='both',direction='in',length=4,width=0.75)
axNeg.tick_params(which='major',axis='both',direction='in',length=6,width=1)
axNeg.tick_params(which='minor',axis='both',direction='in',length=4,width=0.75)
fig.tight_layout()
#figName = '../PR/Shifted/Fits/TestCompression/ExpComStroke_A'+str(ampValue)+'.png'
figName = cwd_PYTHON+'/../Version2/PaperFigures/Images/ExpComStroke_RawData_All_v2.svg'
fig.savefig(figName)
fig.clf()
return
def FindRoot(data):
#Identify 2 locations
#1) where v goes from + to -
#2) where v goes from - to +
#+ to - v will always happen first
nRe = len(data)
idxRe = 1
root = None
#Find where v goes from - to +
#Save as root
while(idxRe < nRe-1):
if(data[idxRe]*data[idxRe-1] <= 0.0 and
data[idxRe]*data[idxRe+1] >= 0.0 and
data[idxRe] >= 0.0):
#- to + root has been found
print('- to +: b4y_c = %.3f\ty_c = %.3f\ta4y_c = %.5f'%(data[idxRe-1],
data[idxRe],
data[idxRe+1]))
#Save + to - index value
root = idxRe
idxRe += 1
return root
def FindWholeFit(data,y_name):
#In this function, we will use sklearn.linear_model to find
# a mutivariable linear regression of the expansion or compression data
'''#Remove Outliers
filteredData = RejectOutliers(data,y_name)'''
#Variables to be fit
xData = data[['logRe','logeps','logd_0']]
#Data to be power fit
yData = data[y_name]
#Construct linear model for Expansion/Compression
lm = linear_model.LinearRegression()
model = lm.fit(xData,yData)
#Calculate predicted values from model
predictions = lm.predict(xData)
#Calculate R^2 value
score = lm.score(xData,yData)
#Return Coefficients of linear model
coef = lm.coef_
#Return intercept of linear model
intercept = lm.intercept_
#Print Stats
print("="*40)
print('Mult Vairable Whole Model:')
print(y_name)
print('M2 exponent = %.3f'%coef[0])
print('eps exponent = %.3f'%coef[1])
print('d0 exponent = %.3f'%coef[2])
print('intercept = %.3f'%intercept)
print('R^2 value = %.3f'%score)
return (coef[0],coef[1],coef[2],intercept, score)
def PlotWholeFit(ax,data,fitList,var,fitColor):
#Color Palette for plotting
R1=[255/255,255/255,153/255,153/255,204/255]
G1=[153/255,204/255,255/255,204/255,153/255]
B1=[204/255,153/255,153/255,255/255,255/255]
ShadeValue = 1.0 #used to scale line color so there is a gradient as rsl changes
#Calculate Fit Expression
powRe, poweps, powd_0, intercept = fitList[0], fitList[1], fitList[2], fitList[3]
xFit = np.linspace(0.5,250.0,1000)
epsilon = 0.18/RSMALL
#d_0 = rsl*RSL_DEFAULT
d_0 = 1.0*RSL_DEFAULT/RSMALL
yFit = 10.0**(intercept)*xFit**(powRe)*(epsilon)**(poweps)*(d_0)**(powd_0)
#Plot Re_c line
ax.plot([20.0,20.0],[-1,1],color='gray',ls=':')
#Plot Fit
ax.plot(xFit,yFit/((epsilon)**(poweps)*(d_0)**(powd_0)),color=fitColor,lw=2,ls='--')
for rsl in rslList:
rslData = data[data.rsl == rsl].copy()
rslData = rslData.reset_index(drop=True)
for idxAmp in range(0,5):
ampValue = 0.12 + 0.03*idxAmp
ampData = rslData[rslData.amp == ampValue].copy()
#Select RGB Color
R=R1[idxAmp]*ShadeValue
G=G1[idxAmp]*ShadeValue
B=B1[idxAmp]*ShadeValue
#ax.plot(xFit,yFit/((epsilon)**(poweps)),color=(R,G,B),lw=2,ls='--')
#Plot Raw Data
ax.scatter(ampData['Re'],abs(ampData[var])/(ampData['epsilon']**(poweps)*ampData['d_0']**(powd_0)),color=(R,G,B),s=20,zorder=5)#,edgecolor='k',zorder=5,linewidth=0.5)
#ax.scatter(ampData['Re'],abs(ampData[var])/(ampData['epsilon']**(poweps)),color=(R,G,B),s=20,zorder=5)#,edgecolor='k',zorder=5,linewidth=0.5)
ShadeValue -= 0.05
'''if(name=='exp'):
ax.legend(title=r'%.3flog(Re)+%.3flog(St)+%.3flog($d_0$)+%.3f'%(powRe,powSt,powd_0,intercept),loc='best',fontsize='x-small')
else:
ax.legend(title=r'%.3flog(Re)+%.3flog(St)+%.3flog($d_0$)+%.3f'%(powCRe,powCSt,powCd_0,interceptC),loc='best',fontsize='x-small')'''
#Axes Parameters
ax.tick_params(which='major',axis='both',direction='in',length=6,width=1)
ax.tick_params(which='minor',axis='both',direction='in',length=4,width=0.75)
return ax
def PlotAllCompression(ax,data):
#Color Palette for plotting
R1=[255/255,255/255,153/255,153/255,204/255]
G1=[153/255,204/255,255/255,204/255,153/255]
B1=[204/255,153/255,153/255,255/255,255/255]
ShadeValue = 1.0 #used to scale line color so there is a gradient as rsl changes
for rsl in rslList:
rslData = data[data.rsl == rsl].copy()
rslData = rslData.reset_index(drop=True)
for idxAmp in range(0,5):
ampValue = 0.12 + 0.03*idxAmp
ampData = rslData[rslData.amp == ampValue].copy()
#Select RGB Color
R=R1[idxAmp]*ShadeValue
G=G1[idxAmp]*ShadeValue
B=B1[idxAmp]*ShadeValue
#Plot Re_c line
ax.plot([20.0,20.0],[-1,1],color='gray',ls=':')
#Plot Raw Data
ax.plot(ampData['Re'],abs(ampData['y_c']),color=(R,G,B),lw=1)
ax.scatter(ampData['Re'],abs(ampData['y_c']),color=(R,G,B),s=20,zorder=5)
ShadeValue -= 0.05
'''if(name=='exp'):
ax.legend(title=r'%.3flog(Re)+%.3flog(St)+%.3flog($d_0$)+%.3f'%(powRe,powSt,powd_0,intercept),loc='best',fontsize='x-small')
else:
ax.legend(title=r'%.3flog(Re)+%.3flog(St)+%.3flog($d_0$)+%.3f'%(powCRe,powCSt,powCd_0,interceptC),loc='best',fontsize='x-small')'''
#Axes Parameters
ax.tick_params(which='major',axis='both',direction='in',length=6,width=1)
ax.tick_params(which='minor',axis='both',direction='in',length=4,width=0.75)
return ax
if __name__ == '__main__':
#This is where the main part of the code will be conducted
#Obtain the directory where data is stored
cwd_DATA = cwd_PYTHON + '/../PR'
allData = StoreData(cwd_DATA)
csfont = {'fontname':'Times New Roman'}
#Here we need to split up the data based on the following criteria
#1) Expansion: Split based on minimum value
# Do not include minimum and value to the left or right
#2) Compression: Split based on when sign switches (root)
# Do not include root and value to the left or right
#But first we need to find the root for each set of (A,d_0) data
#Create DataFrames which will store the 2 regions of info
vEData = pd.read_csv(cwd_DATA+'/v2_ExpComAllVar.txt',delimiter = ' ',nrows=0)
iEData = vEData.copy()
vCData = iEData.copy()
iCData = vCData.copy()
#Find root and split data up based on rsl and A.
#Loop over RSL
for idxRSL in range(len(rslList)):
rslValue = rslList[idxRSL]
rslData = allData[allData.rsl == rslValue].copy()
#Loop over A
for idxAmp in range(0,5):
ampValue = 0.12 + 0.03*idxAmp
print('A = %.2f: RSL = %.2f'%(ampValue,rslValue))
ampData = rslData[rslData.amp == ampValue].copy()
#Sort based on Re
ampData = ampData.sort_values(by=['Re'])
ampData = ampData.reset_index(drop=True)
print(ampData['y_e'])
#Now we should have data for 20 Re
print('# of Re = ',len(ampData.Re))
#Expansion Data
#Split data up based on minimum value
seriesMin = ampData[['y_e']].idxmin()
idxMin = seriesMin[0]
print('idxMin = ',idxMin)
vEData = pd.concat([vEData,ampData.iloc[:idxMin+1]], ignore_index=True, sort=True)
iEData = pd.concat([iEData,ampData.iloc[idxMin+1:]], ignore_index=True, sort=True)
#Compression Data
seriesCMin = ampData[['y_c']].min()
miny_c = seriesCMin[0]
ampData['y_c'] -= miny_c
ampData['logy_c'] = np.log10(ampData['y_c'])
ampData = ampData.loc[1:]
vCData = pd.concat([vCData,ampData.iloc[:idxMin+1]], ignore_index=True,sort=True)
iCData = pd.concat([iCData,ampData.iloc[idxMin+1:]], ignore_index=True,sort=True)
vEData = vEData.sort_values(by=['Re','amp','rsl'])
vEData = vEData.reset_index(drop=True)
vCData = vCData.sort_values(by=['Re','amp','rsl'])
vCData = vCData.reset_index(drop=True)
iEData = iEData.sort_values(by=['Re','amp','rsl'])
iEData = iEData.reset_index(drop=True)
iCData = iCData.sort_values(by=['Re','amp','rsl'])
iCData = iCData.reset_index(drop=True)
#Data has been split up appropriately
#For each dataframe, we will perform 4 linear regressions
#1) logRe only
#2) logRe and logSt
#3) logRe and logd_0
#4) logRe, logSt, and logd_0
#There will be 5 sets of figures (1 for viscous region, 1 for inertial region)
#1) logRe only (sorted by 9 d_0) 5 amps on each
#2) logRe only (sorted by 5 A) 9 RSL each
#3) logRe and logSt (sorted by 9 d_0)
#4) logRe and log_d_0 (sorted by 5 A)
#5) logRe, logSt, and logd_0 (1 Figure of all)
#Make a figure for each Whole Fit
#First Find fits
#Variables of interest: Re and St and d_0
#Plot Fits for Whole Data set, (Re, St, and d_0)
WholeExpFit[0] = FindWholeFit(vEData,'logy_e')
WholeComFit[0] = FindWholeFit(vCData[vCData.Re <= 2.0],'logy_c')
WholeExpFit[1] = FindWholeFit(iEData,'logy_e')
WholeComFit[1] = FindWholeFit(vCData[vCData.Re > 2.0],'logy_c')
#Important Figure Data
cwd_PLOT = cwd_PYTHON + '/../Version2/PaperFigures/Images'
#Create a Figure for ReStd_0 Fit Data
fig, ax = plt.subplots(nrows=1, ncols=2, num=1,
figsize=(9,4),dpi=200)
#Axes Labels for ReSt Fit
ax[0].set_xlabel(r'Re',fontsize=14,**csfont)
ax[0].set_ylabel(r'$\Delta \hat{y}_{exp}$ / $\epsilon^a \hat{d}_0^b$',fontsize=14,**csfont)
ax[1].set_xlabel(r'Re',fontsize=14,**csfont)
ax[1].set_ylabel(r'($\Delta \hat{y}_{com} - \Delta \hat{y}_{min}$) / $\epsilon^a \hat{d}_0^b$',fontsize=14,**csfont)
#vEData
imgName = 'Final'
ax[0] = PlotWholeFit(ax[0],vEData,WholeExpFit[0],'y_e','k')
#iEData
ax[0] = PlotWholeFit(ax[0],iEData,WholeExpFit[1],'y_e','tab:red')
ax[0].set(xlim=(0.5,200.0),ylim=(5.0e-5/RSMALL,5.0e-2/RSMALL))
#ax[0].set(xlim=(0.5,200.0),ylim=(1.0e-4,1.0e-1))
ax[0].set_xscale('log')
ax[0].set_yscale('log')
#ax[0].set_aspect(1.0)
#cData
#Reg1Test
ax[1] = PlotWholeFit(ax[1],vCData[vCData.Re <= 2.0],WholeComFit[0],'y_c','k')
#Reg2Test
ax[1] = PlotWholeFit(ax[1],vCData[vCData.Re > 2.0],WholeComFit[1],'y_c','tab:blue')
#Reg3 Test
WholeComFit[1] = FindWholeFit(iCData,'logy_c')
ax[1] = PlotWholeFit(ax[1],iCData,WholeComFit[1],'y_c','tab:red')
#Plot All of Compression Unfit
#ax[1] = PlotAllCompression(ax[1],vCData)
ax[1].set(xlim=(0.5,200.0),ylim=(5.0e-5/RSMALL,5.0e-2/RSMALL))
#ax[1].set(xlim=(0.5,150.0),ylim=(1.0e-4,8.0e-2))
ax[1].set_xscale('log')
ax[1].set_yscale('log')
#ax[1].set_aspect(0.5)
#ax[1].invert_yaxis()
fig.tight_layout()
figName = cwd_PLOT+'/'+imgName+'_WholeFitv2_l.svg'
fig.savefig(figName)
fig.clf()
plt.close()
|
#!/usr/bin/python
def displayPathtoPrincess(n,grid):
if grid[0][0] == 'p' or grid[0][0] == 'P' :
dir1 = "LEFT"
dir2 = "UP"
elif grid[n-1][n-1] == 'p' or grid[n-1][n-1] == 'P' :
dir1 = "RIGHT"
dir2 = "DOWN"
elif grid[n-1][0] == 'p' or grid[n-1][0] == 'P' :
dir1 = "LEFT"
dir2 = "DOWN"
elif grid[0][n-1] == 'p' or grid[0][n-1] == 'P' :
dir1 = "RIGHT"
dir2 = "UP"
for i in range(0, n//2):
print(dir1)
print(dir2)
m = int(input())
grid = []
for i in range(0, m):
grid.append(input().strip())
displayPathtoPrincess(m,grid)
|
from __future__ import (absolute_import, division, print_function, unicode_literals)
import copy
import numpy as np
class CommunityDetector(object):
def __init__(self, adj_dict):
"""
adj_dict is an adjacency list of node: neighbors.
make sure adj_dict has no duplicates.
num_stubs = 2 * number of edges.
In this algorithm, we think in terms of stubs
instead of in terms of edges.
node_comm_associations is a list containing
jwhich nodes are in which communities
e.g say node 1 and 2 are in a community and node 3 in a diff. community
then node_comm_associations = [[1,2], [3]]
"""
self.adj_dict = {k : list(set(v)) for (k,v) in adj_dict.items()}
# flatten the adjacency dict to get a list of all nodes.
flat_vals = [x for xs in adj_dict.values() for x in xs]
self.nodes = sorted(list(set(list(adj_dict.keys()) + flat_vals)))
self.A = self.get_adjacency_matrix(self.nodes, directed=False)
# initialize S such that each node is in its own community
self.S = np.zeros((len(self.nodes), len(self.nodes)))
np.fill_diagonal(self.S, 1)
self.node_comm_associations = [[i] for i in range(len(self.nodes))]
@property
def num_stubs(self):
return np.sum(self.A)
def get_adjacency_matrix(self, nodes, directed=False):
"""
converts an adjacency dictionary into a symmetric adjacency matrix,
if the directed flag is False, otherwise not.
"""
A = np.zeros((len(nodes), len(nodes)))
for i, _ in enumerate(A):
for j in range(i+1):
node1 = nodes[i]
node2 = nodes[j]
flag = False
if node1 in self.adj_dict and node2 in self.adj_dict[node1]:
flag = True
elif node2 in self.adj_dict and node1 in self.adj_dict[node2]:
flag = True
if not directed:
A[i,j] = A[j,i] = 1 if flag else 0
else:
if flag:
A[i,j] = 1
return A
def delta_modularity(self, node_i, community):
"""
formula:
sum over all nodes j in community
(1/num_stubs) * (2 * (A_ij - (k_i * k_j) / num_stubs) + (A_ii - (k_i*k_i)/num_stubs))
returns the value of adding node_i to community
simply multiply the value by negative 1 to get the value
of removing node i from the community
"""
k_dict = {}
def k(node_idx):
"""
returns k_i, the number of stubs that a node has, aka its outdegree
"""
return np.sum(self.A[node_idx])
# if node_idx in k_dict:
# return k_dict[node_idx]
# else:
# val = np.sum(self.A[node_idx])
# k_dict[node_idx] = val
# return val
# loop over members of community and get cumulative sum
cum_sum = 2 * sum(self.A[node_i,j] - ((k(node_i) * k(j)) / self.num_stubs)\
for j in np.nonzero(self.S[:,community])[0])
cum_sum += self.A[node_i, node_i] - ((k(node_i)**2) / self.num_stubs)
# add in value for node_i
cum_sum = cum_sum / self.num_stubs
return cum_sum
def phase1(self):
"""
phase1 takes the graph A and S and returns a better S
phase2 then takes S and squashes communities, returning a new S and A
S[i,c] = 1 if node i belongs to community c else 0
"""
# loop over nodes, finding a local max of Q
counter = 0
wasChangedInFunction = False
wasChangedInLoop = True
while wasChangedInLoop:
wasChangedInLoop = False
#print(' phase1 counter: %d' % counter)
counter+=1
# loop over each node
# this for loop takes fooooorever
for i, S_row in enumerate(self.S):
cur_community = best_community = np.nonzero(S_row)[0][0]
# remove node from its former community
self.S[i, cur_community] = 0
best_delta_Q = self.delta_modularity(i, cur_community)
# find best delta Q for all other communities
for j, _ in enumerate(S_row):
delta_Q = self.delta_modularity(i, j)
if delta_Q > best_delta_Q:
best_delta_Q = delta_Q
best_community = j
if cur_community != best_community:
wasChangedInLoop= True
wasChangedInFunction= True
self.S[i, best_community] = 1
# remove columns that are all zeros via a mask
# this removes irrelevant communities
self.S = np.transpose(self.S)
self.S = np.transpose(self.S[(self.S!=0).any(axis=1)])
return wasChangedInFunction
def phase2(self):
"""
squash communities
"""
#print(' starting phase2')
# So S = num_nodes by num_communities
# so we are going to have
# define node_comm_associations
num_communities = self.S.shape[1]
new_A = np.zeros((num_communities, num_communities))
# fill new_A
for i, row in enumerate(new_A):
for j, _ in enumerate(row):
# get set of nodes in community i and
comm_i_nodes = np.nonzero(self.S[:,i])[0]
comm_j_nodes = np.nonzero(self.S[:,j])[0]
# get number of edge intersections
edge_sum = 0
for comm_i_node in comm_i_nodes:
for comm_j_node in comm_j_nodes:
edge_sum += self.A[comm_i_node, comm_j_node]
new_A[i,j] = edge_sum
# I think this should be commented out
new_A[i,i] = 0.5 * new_A[i,i]
# update node_comm_associations
new_node_comm_associations = []
# loop over columns
self.S = np.transpose(self.S)
for row in self.S:
nodes = np.nonzero(row)[0]
# combine old nodes of node_comm_associations
temp_list = [x for y in nodes for x in self.node_comm_associations[y]]
new_node_comm_associations.append(temp_list)
# also need a list of all original nodes associated with each community
new_S = np.zeros((num_communities, num_communities))
for i, _ in enumerate(new_S):
new_S[i,i] = 1
self.A = new_A
self.S = new_S
self.node_comm_associations = new_node_comm_associations
return self.A, self.S, self.node_comm_associations
def run(self, node_names=True, verbose=False):
counter = 0
while True:
#print ('go counter: %d' % counter)
counter+=1
wasChanged = self.phase1()
if wasChanged == False:
break
self.phase2()
self.communities = copy.deepcopy(self.node_comm_associations)
if node_names:
self.communities = [
list(map(
lambda x: self.nodes[x],
community))
for community in self.communities
]
if verbose:
for c in self.communities:
print(c)
return self.communities
|
import requests
import re
import time
from multiprocessing import Pool
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
}
def re_scraper(url):
res = requests.get(url,headers=headers)
ids =re.findall('<h2>(.*?)</h2>',res.text,re.S)
contents = re.findall('<div class="content">.*?<span>(.*?)</span>',res.text,re.S)
for id,content in zip(ids,contents):
info = {
'id':id.strip(),
'content':content.strip(),
}
return info
if __name__ == '__main__':
urls = ['https://www.qiushibaike.com/text/page/{}/'.format(str(i)) for i in range(1,13)]
start_1 = time.time()
for url in urls:
re_scraper(url)
end_1 = time.time()
print('one',end_1-start_1)
start_2 = time.time()
pool = Pool(processes=2)
pool.map(re_scraper,urls)
end_2 = time.time()
print('two', end_2 - start_2)
start_3 = time.time()
pool = Pool(processes=4)
pool.map(re_scraper,urls)
end_3 = time.time()
print('four', end_3 - start_3) |
from plotter import plotter
import sys
from Jsonreader import Jsonreader
from Csvreader import Csvreader
def plotWithGroups(plotting,apartmentdict, xaxis, yaxis, group, IsGroupBuildingdict, selectbypair ):
listx=[]
listy=[]
listz=[]
for a in apartmentdict:
ap = apartmentdict[a]
buildingnumber = ap["building"]
buildingdict = Jsonreader(buildingnumber).GetBuildingDict()
if ap[xaxis] == 0:
continue
if a=="1021896":
continue
if not ap[selectbypair[0]] == selectbypair[1]:
continue
listx.append(ap[xaxis])
listy.append(ap[yaxis])
if IsGroupBuildingdict:
listz.append(buildingdict[group])
else:
listz.append(group+" "+ap[group])
title=selectbypair[1]
xtitle=xaxis
plotting.pandaplot_3groups(listx, listy, listz,title, xtitle)
return
def main(*args):
#buildingreader = Jsonreader("Conso_appartements.csv")
apartmentreader = Csvreader("Conso_appartements.csv")
apartmentdict = apartmentreader.dictOfAllApartments()
print apartmentdict
xpoints = []
ypoints = []
for a in apartmentdict:
ap = apartmentdict[a]
buildingnumber = ap["building"]
buildingdict = Jsonreader(buildingnumber).GetBuildingDict()
print buildingdict
aptype = "secondaire"
if ap["type"] == aptype:
continue
#xpoints.append(ap["rooms"])
xpoints.append(buildingdict["Annee"])
ypoints.append(ap["conso_2017"])
plotting = plotter()
plotting.scatterplot_onevariable(xpoints,ypoints, aptype)
#
plotWithGroups(plotting,apartmentdict, "rooms", "conso_2017", "EfficaciteGlobale", True, ["type", "principale"] )
plotWithGroups(plotting,apartmentdict, "rooms", "conso_2017", "building", False , ["type", "principale"] )
plotWithGroups(plotting,apartmentdict, "rooms", "conso_2017", "Annee", True, ["type", "principale"] )
if __name__ == "__main__":
main(*sys.argv)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from common.desired_caps import appium_desired
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from common.common_fun import Commom
class LoginView(Commom):
# 登录
def login_action(self,username,password):
self.check_locationBtn()
self.check_updateBtn()
self.check_skipBtn()
data=self.get_yaml_data('loginView.yaml')
self.find_element(By.ID,data['username_type']).send_keys(username)
self.find_element(By.ID,data['password_type']).send_keys(password)
self.find_element(By.ID,data['loginBtn']).click()
# error_message = "用户名或密码错误,你还可以尝试"
# limit_message = "验证失败次数过多"
# message='//*[contains(@text,\'{}\')]'.format(error_message)
# toast_text=WebDriverWait(self.driver,5).until(lambda x: x.find_element(By.XPATH,message))
# logging.info(toast_text.text)
# 检测账户登录后是否有账户下线提示
def check_account_alert(self):
data=self.get_yaml_data('loginView.yaml')
try:
element = self.find_element(By.ID,data['commitBtn'])
except NoSuchElementException:
pass
else:
element.click()
# 检测登录状态,并退出
def check_loginStatus(self): # 检测_登录状态
self.check_market_ad()
self.check_account_alert()
data=self.get_yaml_data('loginView.yaml')
try:
self.find_element(By.ID,data['button_myself']).click()
self.find_element(By.ID,data['username'])
except NoSuchElementException:
return False
else:
self.logout_action()
return True
# 退出
def logout_action(self):
data=self.get_yaml_data('loginView.yaml')
self.find_element(By.ID,data['settingBtn']).click()
self.find_element(By.ID,data['logoutBtn']).click()
self.find_element(By.ID,data['tip_commit']).click()
if __name__ == '__main__':
driver = appium_desired()
c = LoginView(driver)
c.login_action('自学网2018', 'zxw')
|
import glob, os, pexpect, pytest, sys, time
from forge.tests.common import mktree, defuzz, match
from forge.tasks import sh
DIR = os.path.dirname(__file__)
SPECS = [os.path.relpath(n, DIR) for n in glob.glob(os.path.join(DIR, "*/*.spec"))] + \
[os.path.relpath(n, DIR) for n in glob.glob(os.path.join(DIR, "*.spec"))]
TEST_ID = ("test_id_%s" % time.time()).replace(".", "_")
@pytest.mark.parametrize("spec", SPECS)
def test(spec):
print
test_spec = os.path.join(DIR, spec)
test_dir = os.path.dirname(test_spec)
if not os.path.samefile(DIR, test_dir):
tree = {
"forge.yaml": """
# Global forge configuration
# Normally you would not want to check this into git, but this is here
# for testing purposes.
docker-repo: registry.hub.docker.com/forgeorg
user: forgetest
password: >
Zm9yZ2V0ZXN0
"""
}
for path, dirs, files in os.walk(test_dir):
for name in files:
key = os.path.join(os.path.relpath(path, test_dir), name)
if key.startswith("./"):
key = key[2:]
with open(os.path.join(path, name), "r") as fd:
tree[key] = fd.read()
else:
tree = {}
root = mktree(tree, TEST_ID=TEST_ID)
print "TEST_ID: %s" % TEST_ID
print "TEST_BASE: %s" % root
with open(test_spec) as fd:
ops = fd.read()
runner = Runner(root, ops)
runner.run()
MULTILINE = "MULTILINE"
DEFAULT = "DEFAULT"
class Runner(object):
multiline = ('MATCH', 'FILE')
def __init__(self, base, spec):
self.base = base
self.cwd = base
self.environ = os.environ.copy()
self.timeout = 30
self.spec = spec
self.child = None
def run(self):
mode = DEFAULT
for line in self.spec.splitlines():
if mode == DEFAULT:
if not line.strip(): continue
for stmt in line.split(";"):
if mode == MULTILINE:
raise Exception("multiline op must be last in line")
parts = stmt.split(None, 1)
op = parts.pop(0)
arg = parts.pop(0) if parts else None
if op in self.multiline:
mode = MULTILINE
body = ""
continue
else:
self.dispatch(op, arg)
elif mode == MULTILINE:
if line.rstrip() == "END":
mode = DEFAULT
self.dispatch(op, arg, body)
else:
body += line + "\n"
if mode == MULTILINE:
raise Exception("unterminated multiline op")
self.wait()
def dispatch(self, op, arg, body=None):
attr = getattr(self, "do_%s" % op, None)
if attr is None:
assert False, "unrecognized op: %s" % op
elif op in self.multiline:
attr(arg, body)
else:
attr(arg)
def wait(self):
if self.child is not None:
self.child.expect(pexpect.EOF, timeout=self.timeout)
assert self.child.wait() == 0
def do_RUN(self, arg):
self.wait()
arg = arg.replace("TEST_ID", TEST_ID).replace("TEST_BASE", self.base)
print "RUN", arg
self.child = pexpect.spawn("sh", ["-c", arg], cwd=self.cwd, env=self.environ)
self.child.logfile = sys.stdout
def do_CWD(self, arg):
self.cwd = os.path.join(self.base, arg)
def do_ENV(self, arg):
parts = arg.split(None, 1)
if len(parts) > 1:
key, value = parts
value = sh("sh", "-c", "echo -n " + value, env=self.environ).output
else:
key = parts[0]
value = ""
self.environ[key] = value
def do_TIMEOUT(self, arg):
self.timeout = float(arg)
def do_OUT(self, arg):
self.child.expect_exact(arg.strip(), timeout=self.timeout)
def do_NOT(self, arg):
self.child.expect(pexpect.EOF, timeout=self.timeout)
assert arg not in self.child.before
def do_TYPE(self, arg):
if arg.strip().lower() == "<enter>":
self.child.sendline()
elif arg.strip().lower() == "<esc>":
self.child.send("\x1B")
else:
self.child.sendline(arg)
def do_EOF(self, arg):
self.child.sendeof()
def do_ERR(self, arg):
self.child.expect(pexpect.EOF, timeout=self.timeout)
assert self.child.wait() != 0
self.child = None
def do_MATCH(self, _, pattern):
pattern = unicode(pattern).strip()
self.child.expect(pexpect.EOF, timeout=self.timeout)
output = self.child.before.strip()
defuzzed = defuzz(output.replace(TEST_ID, "TEST_ID").replace(self.base, "TEST_BASE"))
if not match(defuzzed, pattern.strip()):
print "OUTPUT:"
print output
print "DEFUZZED OUTPUT:"
print defuzzed
print "PATTERN:"
print pattern
assert False
def do_FILE(self, name, body):
self.wait()
path = os.path.join(self.cwd, name)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(path, "write") as fd:
fd.write(body.replace("TEST_ID", TEST_ID))
|
#--encoding: utf-8 --
import scipy as sp
class Lattice(object):
"""Grid of dipoles with nearest-neighbour interactions
2D grid of dipoles which can either point up or down. Every pair of
anti-parallel neighbours adds one unit of energy. On every simulation step,
the lattice evolves by always minimizing energy and ocasionally (depending
on the temperature) gaining energy."""
def __init__(self, state):
"""Initialize the lattice to the specified state (2D array of 1,0)"""
if state.shape[0] % 2 != 0 or state.shape[1] % 2 != 0:
raise Exception('Lattice side lengths must be even')
self.state = state.astype(sp.byte)
# Stores info on the neighbours of the slice I'm updating
self.dUpdown = sp.empty((self.state.shape[0]/2, self.state.shape[1]/2),
dtype=sp.byte)
self.updateUp()
self.updateUpDown()
def updateUp(self):
self._up = sp.sum(self.state)
def updateUpDown(self):
# up ^ up = down ^ down = 0
# up ^ down = down ^ up = 1
# So if I sum the xor of all pairs, I get the number of up-down pairs.
self._updown = sp.sum(self.state[:-1,:] ^ self.state[1:,:]) \
+ sp.sum(self.state[:,:-1] ^ self.state[:,1:]) \
+ sp.sum(self.state[-1,:] ^ self.state[0,:]) \
+ sp.sum(self.state[:,-1] ^ self.state[:,0])
@property
def up(self):
"""Number of spins pointing up"""
return self._up
@property
def updown(self):
"""Number of up-down neighbours"""
return self._updown
def step(self, beta):
"""Simulate one time step with a temperature such that kT = 1/beta
The unit of energy is that required to make two neighbours antiparallel
"""
# Update spins by slices
# This allows me to vectorize the counting of neighbours
for x,y in sp.random.permutation([(0,0), (1,1), (1,0), (0,1)]):
# Change in up-down neighbours for each cell I'm updating
self.dUpdown[:,:] = -2
# Handle wrapping (different for each slice)
if x == 0:
self.dUpdown += self.state[1::2,y::2]
self.dUpdown[1:,:] += self.state[1:-1:2,y::2]
self.dUpdown[0,:] += self.state[-1,y::2]
else:
self.dUpdown[:-1,:] += self.state[2::2,y::2]
self.dUpdown[-1,:] += self.state[0,y::2]
self.dUpdown += self.state[0::2,y::2]
if y == 0:
self.dUpdown += self.state[x::2,1::2]
self.dUpdown[:,1:] += self.state[x::2,1:-1:2]
self.dUpdown[:,0] += self.state[x::2,-1]
else:
self.dUpdown[:,:-1] += self.state[x::2,2::2]
self.dUpdown[:,-1] += self.state[x::2,0]
self.dUpdown += self.state[x::2,0::2]
# Change sign if the cell is pointing down
self.dUpdown *= self.state[x::2, y::2]*2-1
# Flip spins with probability min{exp(-ß*ΔE), 1}
flip = sp.random.random(size=self.dUpdown.shape) \
< sp.exp(-beta*self.dUpdown)
self.state[x::2,y::2] ^= flip
self._updown += sp.sum(self.dUpdown * flip)
self.updateUp()
|
import requests
import os
from twilio.rest import Client
STOCK_NAME = "TSLA"
COMPANY_NAME = "Tesla Inc"
STOCK_ENDPOINT = "https://www.alphavantage.co/query"
NEWS_ENDPOINT = "https://newsapi.org/v2/everything"
STOCK_API_KEY = os.environ.get("STOCK_API_KEY")
NEWS_API_KEY = os.environ.get("NEWS_API_KEY")
SMS_API_KEY = os.environ.get("SMS_API_KEY")
SMS_SID = os.environ.get("SMS_SID")
MY_NUMBER = os.environ.get("MY_NUMBER")
SND_NUMBER = os.environ.get("SND_NUMBER")
stock_parmas = {
"function": "TIME_SERIES_DAILY",
"symbol": STOCK_NAME,
"apikey": STOCK_API_KEY
}
response = requests.get(url=STOCK_ENDPOINT, params=stock_parmas)
response.raise_for_status()
data = response.json()['Time Series (Daily)']
data_list = [value for (key, value) in data.items()]
yesterday_close = data_list[0]['4. close']
day_before_close = data_list[1]['4. close']
difference = float(yesterday_close) - float(day_before_close)
diff_percent = round((difference / float(yesterday_close)) * 100)
if abs(diff_percent) >= 0:
news_params = {
"qInTitle": COMPANY_NAME,
"apiKey": NEWS_API_KEY
}
news_response = requests.get(NEWS_ENDPOINT, params=news_params)
news_response.raise_for_status()
articles = news_response.json()['articles']
content_list = articles[:3]
article_list = [f"Headline: {article['title']}. \nBrief: {article['description']}" for article in content_list]
client = Client(SMS_SID, SMS_API_KEY)
for article in article_list:
message = client.messages.create(
body=article,
from_=MY_NUMBER,
to=SND_NUMBER
)
|
from govr.test_runner import TestRunner
def coverage_report(weights):
report = [
"============================================================",
" GOVRAGE REPORT ",
"============================================================"
]
report.extend(["[ %s ] -> [ %s%% ]" % (pkg, v["coverage"])
for (pkg, v) in weights.iteritems() if pkg != "total_coverage"])
report.extend([
"Total Coverage: [ %s%% ]" % weights["total_coverage"],
"============================================================"
])
return '\n'.join(report)
class Report:
def __init__(self, args):
self.project = args.project
self.runner = TestRunner(self.project)
def run(self):
weights = self.runner.run()
return coverage_report(weights)
|
"""
ゼロから学ぶスパイキングニューラルネットワーク
- Spiking Neural Networks from Scratch
Copyright (c) 2020 HiroshiARAKI. All Rights Reserved.
"""
import numpy as np
import matplotlib.pyplot as plt
def stdp_ltp(dt, a=1.0, tc=20):
""" Long-term Potentiation """
return a * np.exp(-dt / tc)
def stdp_ltd(dt, a=-1.0, tc=20):
""" Long-term Depression """
return a * np.exp(dt / tc)
def stdp(dt, pre=-1.0, post=1.0, tc_pre=20, tc_post=20):
""" STDP rule """
return stdp_ltd(dt[dt<0], pre, tc_pre), stdp_ltp(dt[dt>=0], post, tc_post)
if __name__ == '__main__':
# 発火時刻差集合
dt = np.arange(-50, 50, 0.5)
# LTD, LTP
ltd, ltp = stdp(dt)
plt.plot(dt[dt<0], ltd, label=r'LTD: $\Delta t < 0$')
plt.plot(dt[dt>=0], ltp, label=r'LTP: $\Delta t \leq 0$')
plt.xlabel(r'$\Delta t = t_{post} - t_{pre}$')
plt.ylabel(r'$\Delta w$')
plt.grid()
plt.legend()
plt.show()
|
import dash_bootstrap_components as dbc
list_group = dbc.ListGroup(
[
dbc.ListGroupItem("Item 1"),
dbc.ListGroupItem("Item 2"),
dbc.ListGroupItem("Item 3"),
]
)
|
def number_of_occurrences(element, sample):
return sample.count(element)
'''
Write a functionthat returns the number of occurrences of an element in an array.
Examples
sample = [0, 1, 2, 2, 3]
number_of_occurrences(0, sample) == 1
number_of_occurrences(4, sample) == 0
number_of_occurrences(2, sample) == 2
number_of_occurrences(3, sample) == 1
'''
|
import os
def clear(archiveDir):
print("CLEARING ARCHIVE")
for filename in os.listdir(archiveDir):
os.remove(archiveDir + "/" + filename)
print(" -- Deleted " + filename)
|
# -*- coding: utf-8 -*-
{
'name': "EHCS Login Captcha",
'summary': """
Add reCAPTCHA in your login page.""",
'description': """
CAPTCHA stands for Completely Automated Public Turing Test to Tell Computers and Humans Apart.
It's goal is to check if a user is a real person or a bot.
""",
'author': "ERP Harbor Consulting Services",
'website': "http://www.erpharbor.com",
'license': 'AGPL-3',
'category': 'Web',
'version': '13.0.1.0.0',
'depends': ['web'],
'data': [
'views/login_templates.xml',
],
'images': [
'static/description/banner.png',
],
}
|
# '##::::'##::::'###::::'##::: ##:'####:'########:'##:::'##:'##::::'##:'##::: ##:'########:'##::::'##:
# ##:::: ##:::'## ##::: ###:: ##:. ##::... ##..::. ##:'##:: ###::'###: ###:: ##: ##.....:: ###::'###:
# ##:::: ##::'##:. ##:: ####: ##:: ##::::: ##:::::. ####::: ####'####: ####: ##: ##::::::: ####'####:
# ##:::: ##:'##:::. ##: ## ## ##:: ##::::: ##::::::. ##:::: ## ### ##: ## ## ##: ######::: ## ### ##:
# . ##:: ##:: #########: ##. ####:: ##::::: ##::::::: ##:::: ##. #: ##: ##. ####: ##...:::: ##. #: ##:
# :. ## ##::: ##.... ##: ##:. ###:: ##::::: ##::::::: ##:::: ##:.:: ##: ##:. ###: ##::::::: ##:.:: ##:
# ::. ###:::: ##:::: ##: ##::. ##:'####:::: ##::::::: ##:::: ##:::: ##: ##::. ##: ########: ##:::: ##:
# :::...:::::..:::::..::..::::..::....:::::..::::::::..:::::..:::::..::..::::..::........::..:::::..::
# VanityMnem - create your vanity mnemonics - 2020 Valerio Vaccaro
# https://github.com/valerio-vaccaro/vanitymnem
# Trivial hacks by Rhea Myers <rhea@hey.com> .
# Don't use this version, it's been modified for a very specific purpose. :-)
import argparse
import os
import wallycore as wally
import re
import time
banner = """
'##::::'##::::'###::::'##::: ##:'####:'########:'##:::'##:'##::::'##:'##::: ##:'########:'##::::'##:
##:::: ##:::'## ##::: ###:: ##:. ##::... ##..::. ##:'##:: ###::'###: ###:: ##: ##.....:: ###::'###:
##:::: ##::'##:. ##:: ####: ##:: ##::::: ##:::::. ####::: ####'####: ####: ##: ##::::::: ####'####:
##:::: ##:'##:::. ##: ## ## ##:: ##::::: ##::::::. ##:::: ## ### ##: ## ## ##: ######::: ## ### ##:
. ##:: ##:: #########: ##. ####:: ##::::: ##::::::: ##:::: ##. #: ##: ##. ####: ##...:::: ##. #: ##:
:. ## ##::: ##.... ##: ##:. ###:: ##::::: ##::::::: ##:::: ##:.:: ##: ##:. ###: ##::::::: ##:.:: ##:
::. ###:::: ##:::: ##: ##::. ##:'####:::: ##::::::: ##:::: ##:::: ##: ##::. ##: ########: ##:::: ##:
:::...:::::..:::::..::..::::..::....:::::..::::::::..:::::..:::::..::..::::..::........::..:::::..::
VanityMnem - create your vanity mnemonics - 2020 Valerio Vaccaro
https://github.com/valerio-vaccaro/vanitymnem"""
def str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
parser = argparse.ArgumentParser(description='Create a valid Bitcoin mnemonic with a vanity address in a specific derivation.', epilog='MIT License - Copyright (c) 2020 Valerio Vaccaro')
parser.add_argument('-v', '--verbose', action='count', default=0, help='Be more verbose. Can be used multiple times.')
parser.add_argument('-n', '--network', help=' main, test (default=main)', default='main')
parser.add_argument('-p', '--pattern', help='Regex for pattern', default='^1Doge') #'^.*[vV][aA][lL][eE]')
parser.add_argument('-d', '--derivation', help="Base derivation (default=m/44'/0'/0')", default="m/44'/0'/0'")
parser.add_argument('-c', '--children', help='Check in children derivations from 0 to this value (default=100).', type=int, default=100)
feature_parser = parser.add_mutually_exclusive_group(required=True)
feature_parser.add_argument('--hardened', help='Add for have hardened child.', dest='hardened', action='store_true')
feature_parser.add_argument('--no-hardened', help='Add for have not hardened child.', dest='hardened', action='store_false')
parser.set_defaults(feature=True)
# Legacy for the 1 prefix.
parser.add_argument('-a', '--address', help='native_segwit, nested_segwit or legacy (default=legacy).', default='legacy')
args = parser.parse_args()
print(banner)
# check net
if args.network == 'main':
master_key_flags = wally.BIP32_VER_MAIN_PRIVATE
native_segwit_address_flags = 'bc'
nested_segwit_address_flags = wally.WALLY_ADDRESS_VERSION_P2SH_MAINNET
legacy_address_flags = wally.WALLY_ADDRESS_VERSION_P2PKH_MAINNET
elif args.network == 'test':
master_key_flags = wally.BIP32_VER_TEST_PRIVATE
native_segwit_address_flags = 'bc'
nested_segwit_address_flags = wally.WALLY_ADDRESS_VERSION_P2SH_TESTNET
legacy_address_flags = wally.WALLY_ADDRESS_VERSION_P2PKH_TESTNET
else:
print('Wrong network type, choose between main or test.')
exit(1)
# check address
if args.address not in ['native_segwit', 'nested_segwit', 'legacy']:
print('Wrong address type, choose between native_segwit or nested_segwit or legacy.')
exit(1)
# convert derivation
if args.derivation[0] != 'm':
print("Use a correct derivation prefix like m/44'/0'/0'.")
exit(1)
path = []
for c in args.derivation.split('/'):
der = c.split("'")
if (der[0] == 'm'):
continue
if len(der) == 2:
path = path + [0x80000000 + int(der[0])]
else:
path = path + [int(der[0])]
hardened_notation = ''
if args.hardened == True:
hardened_notation = '\''
pattern = re.compile(args.pattern)
i = 0
start = time.time()
while(True):
i = i + 1
# get entropy
# 27 = 32 - prefix
entropy = b'1Doge' + os.urandom(27)
# calculate mnemonic
mnemonic = wally.bip39_mnemonic_from_bytes(None, entropy)
# calculate the seed
seed = bytearray(64)
password = ''
wally.bip39_mnemonic_to_seed(mnemonic, password, seed)
# calculate master key
master_key = wally.bip32_key_from_seed(seed, master_key_flags, wally.BIP32_FLAG_SKIP_HASH)
if args.verbose > 1:
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print('Seed: {}'.format(seed.hex()))
print('Mnemonic: {}'.format(mnemonic))
print('Master key: {}'.format(wally.bip32_key_to_base58(master_key, 0)))
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
# derive a children
found = False
for x in range(0, args.children + 1):
child = x
if args.hardened == True:
child = child + 0x80000000
derived = wally.bip32_key_from_parent_path(master_key, path + [child], wally.BIP32_FLAG_KEY_PRIVATE);
if args.verbose > 1:
print('Derivation: {}/{}{}'.format(args.derivation, x, hardened_notation))
if args.address == 'native_segwit':
# calculate native segwit address
native_segwit = wally.bip32_key_to_addr_segwit(derived, native_segwit_address_flags, 0);
if args.verbose > 1:
print('Native segwit address: {}'.format(native_segwit))
if pattern.match(native_segwit):
found = True
if args.address == 'nested_segwit':
# calculate nested segwit address - base_58
nested_segwit = wally.bip32_key_to_address(derived, wally.WALLY_ADDRESS_TYPE_P2SH_P2WPKH, nested_segwit_address_flags);
if args.verbose > 1:
print('Nested segwit addres: {}'.format(nested_segwit))
if pattern.match(nested_segwit):
found = True
if args.address == 'legacy':
# calculate legacy address - base_58
legacy_address = wally.bip32_key_to_address(derived, wally.WALLY_ADDRESS_TYPE_P2PKH, legacy_address_flags);
if args.verbose > 1:
print('Legacy address: {}'.format(legacy_address))
if pattern.match(legacy_address):
found = True
if args.verbose > 1:
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
if found:
break
if found:
break
if i%1000 == 0:
if args.verbose > 0:
end = time.time()
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print(' Processed {} mnemonics in {} seconds ({} mnemonics per second).'.format(i, round(end-start), round(i/(end-start))))
end = time.time()
if args.verbose > 0:
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print(' Processed {} mnemonics in {} seconds ({} mnemonics per second).'.format(i, round(end-start), round(i/(end-start))))
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print('Tested mnemonics: {}'.format(i))
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print('Entropy: {}'.format(entropy))
print('Seed: {}'.format(seed.hex()))
print('Mnemonic: {}'.format(mnemonic))
print('Master key: {}'.format(wally.bip32_key_to_base58(master_key, 0)))
print('Derivation: {}/{}{}'.format(args.derivation, x, hardened_notation))
if args.address == 'native_segwit':
print('Native segwit address: {}'.format(native_segwit))
if args.address == 'nested_segwit':
print('Nested segwit addres: {}'.format(nested_segwit))
if args.address == 'legacy':
print('Legacy address: {}'.format(legacy_address))
print('::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
|
from pyramid.config import Configurator
from pyramid.authentication import AuthTktAuthenticationPolicy
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.exceptions import NotFound
from frontend_manager.py.utils.security import groupfinder, RootFactory
from shared.BaseConfig import BaseConfig
from shared.InstData import InstData
from shared.utils import has_acs
from shared.LogParser import LogParser
from frontend_manager.py.utils.ViewManager import ViewManager
from shared.server_args import parse_args
from frontend_manager.py.utils.ConnectionManager import extend_app_to_asgi
from frontend_manager.py.utils.ConnectionManager import WebsocketManager
# ------------------------------------------------------------------
def setup_app():
try:
view_manager = ViewManager(base_config=base_config)
config = Configurator(settings=settings, root_factory=RootFactory)
authn_policy = AuthTktAuthenticationPolicy(
'sosecret',
callback=groupfinder,
hashalg='sha512',
)
authz_policy = ACLAuthorizationPolicy()
config.set_authentication_policy(authn_policy)
config.set_authorization_policy(authz_policy)
config.include('pyramid_mako')
renderer = app_name + ':templates/view_common.mak'
# ------------------------------------------------------------------
# forbidden view, which simply redirects to the login
# ------------------------------------------------------------------
config.add_forbidden_view(view_manager.view_forbidden)
# ------------------------------------------------------------------
# basic view, open to everyone, for login/logout/redirect
# ------------------------------------------------------------------
config.add_route('login', '/' + app_prefix + '/login')
config.add_view(
view_manager.view_login,
route_name='login',
renderer=renderer,
)
config.add_route('logout', '/' + app_prefix + '/logout')
config.add_view(
view_manager.view_logout,
route_name='logout',
renderer=renderer,
)
config.add_route('not_found', '/' + app_prefix + '/not_found')
config.add_view(
view_manager.view_not_found,
context=NotFound,
renderer=renderer,
)
config.add_route('/', '/')
config.add_view(
view_manager.view_empty,
route_name='/',
renderer=renderer,
)
config.add_route(app_prefix, '/' + app_prefix)
config.add_view(
view_manager.view_empty,
route_name=app_prefix,
renderer=renderer,
)
# ------------------------------------------------------------------
# permission to view index page and sockets (set in Models.py for
# all groups of logged-in users)
# ------------------------------------------------------------------
perm = 'permit_all'
# ------------------------------------------------------------------
# the index page
config.add_route('index', '/' + app_prefix + '/' + 'index')
config.add_view(
view_manager.view_index,
route_name='index',
renderer=renderer,
permission=perm,
)
# # # the uri for sockets
# config.add_route('ws', '/ws')
# config.add_view(
# # view_manager.view_index,
# view_manager.socket_view,
# route_name='ws',
# permission=perm,
# )
# ------------------------------------------------------------------
# priviliged view (right now, only for pre-defined users in Models.init_user_passes)
# ------------------------------------------------------------------
perm = 'permit_a'
# ------------------------------------------------------------------
for view_name in base_config.widget_info:
config.add_route(view_name, '/' + app_prefix + '/' + view_name)
config.add_view(
view_manager.view_common,
route_name=view_name,
permission=perm,
renderer=renderer,
)
# ------------------------------------------------------------------
# add paths the server will be able to access for resources
# see: http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/assets.html
# see: http://docs.pylonsproject.org/projects/pyramid_cookbook/en/latest/pylons/static.html
# ------------------------------------------------------------------
rel_path = '../../'
static_views = ['js', 'fonts', 'static', 'styles', 'templates']
for static_view in static_views:
config.add_static_view(static_view, rel_path + static_view, cache_max_age=1)
wsgi_app = config.make_wsgi_app()
except Exception as e:
log.info([['c', e]])
raise e
return wsgi_app
# ------------------------------------------------------------------
try:
app_name = 'frontend_manager'
settings = parse_args(app_name=app_name)
# the app name (corresponding to the directory name)
app_name = settings['app_name']
# southern or northen CTA sites have different telescope configurations
site_type = settings['site_type']
# the address for the site
app_host = settings['app_host']
# local log file location
log_file = settings['log_file']
# logging level
log_level = settings['log_level']
# the port for the site
app_port = settings['app_port']
# the redis port use for this site
redis_port = settings['redis_port']
# define the prefix to all urls (must be non-empy string)
app_prefix = settings['app_prefix']
# global setting to allow panel syncronization
allow_panel_sync = bool(settings['allow_panel_sync'])
# is this a simulation
is_simulation = settings['is_simulation']
# development mode
debug_opts = settings['debug_opts']
# do we flush redis on startup
do_flush_redis = settings['do_flush_redis']
# all allowed view names
widget_info = settings['widget_info']
# all allowed widget types (class names)
allowed_widget_types = settings['allowed_widget_types']
websocket_postfix = '/websockets'
websocket_route = {
'server':
'/' + app_prefix + websocket_postfix,
'client':
'ws://' + app_host + ':' + str(app_port) + '/' + app_prefix + websocket_postfix,
}
# ------------------------------------------------------------------
# instantiate the general settings class (must come first!)
# ------------------------------------------------------------------
base_config = BaseConfig(
site_type=site_type,
redis_port=redis_port,
app_port=app_port,
app_prefix=app_prefix,
app_host=app_host,
log_level=log_level,
websocket_route=websocket_route,
allow_panel_sync=allow_panel_sync,
debug_opts=debug_opts,
is_simulation=is_simulation,
widget_info=widget_info,
allowed_widget_types=allowed_widget_types,
)
log = LogParser(
base_config=base_config,
title=__name__,
log_level=log_level,
log_file=log_file,
)
log.info([['wg', ' - Starting pyramid app -', app_name, '...']])
log.info([['c', ' - has_acs = '], [('g' if has_acs else 'r'), has_acs]])
settings_log = [['g', ' - server settings:\n']]
for k, v in settings.items():
settings_log += [['b', str(k)], [': ']]
settings_log += [['c', str(v)], [', ']]
log.info(settings_log)
# # do_flush_redis = True
# if do_flush_redis:
# from shared.RedisManager import RedisManager
# log.warn([['wr', ' ---- flusing redis ... ----']])
# _redis = RedisManager(name='_init_', base_config=base_config, log=log)
# _redis.redis.flushall()
# set the list of telescopes for this particular site and attach it to base_config
InstData(base_config=base_config)
#
setattr(WebsocketManager, 'base_config', base_config)
# ------------------------------------------------------------------
wsgi_app = setup_app()
app = extend_app_to_asgi(wsgi_app, websocket_route)
except Exception as e:
log.info([['c', e]])
raise e
|
# -*- coding: utf-8 -*-
from app.models.meta import metadata, Base
from app.models.users import User
from app.utils import Enum
from sqlalchemy import Table, Column, Integer, String, ForeignKey, DateTime
from sqlalchemy.orm import mapper, relationship
import datetime
import web
results_table = Table(
"RESULTS",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("tournament_id", Integer, ForeignKey("TOURNAMENTS.id"), nullable=False),
Column("user_id", Integer, ForeignKey("USERS.id"), nullable=False),
Column("status", String(1), nullable=False),
Column("buyin", Integer, nullable=True),
Column("rank", Integer, nullable=True),
Column("profit", Integer, nullable=True),
Column("last_registration_dt", DateTime, nullable=True)
)
class _Result(Base):
def __repr__(self):
#TODO: display the actual class
return "<Result(%s,%s,%s)>" % (self.user.pseudonym, self.buyin, self.rank)
@property
def net_profit(self):
if self.buyin is None or self.profit is None or self.profit <= self.buyin:
return None
return self.profit - self.buyin
class Result(_Result):
""" Represents a tournament result """
# A = Absent, M = Missing, P = Present
STATUSES = Enum(["A", "M", "P"])
# The last player of the game does not get 0 but MIN_SCORE instead
MIN_SCORE = 5
def __init__(self, user=None, status=None, buyin=None, rank=None, profit=None, last_registration_dt=None):
self.user = user
self.status = status
self.buyin = buyin
self.rank = rank
self.profit = profit
self.last_registration_dt = last_registration_dt
def __eq__(self, other):
return self.user == other.user \
and self.status == other.status \
and self.buyin == other.buyin \
and self.rank == other.rank \
and self.profit == other.profit
@property
def actual(self):
""" Is the result actual, i.e. does it represent real data (to be displayed, for instance) """
return self.status == Result.STATUSES.P
@property
def score(self):
if self.rank is None:
return None
return 100 - 100 * self.rank / self.tournament.nb_attending_players or self.MIN_SCORE
# Handy method which returns a tuple composed of the sort keys of an instance
# May also be used with the type parameter (Result) so that the ORDER BY clause
# can be easily set up on the ORM side
result_sort_keys = lambda r: (r.status, r.rank, r.last_registration_dt or datetime.datetime(datetime.MINYEAR, 1, 1), r.user_id)
class SeasonResult(_Result):
""" Represents a season result """
def __init__(self, user, attended, buyin, rank, profit, score):
self.user = user
self.attended = attended
self.buyin = buyin
self.rank = rank
self.profit = profit
self.score = score
@property
def actual(self):
""" Is the result actual, i.e. does it represent real data (to be displayed, for instance) ? """
return True
mapper(Result, results_table, properties={
"user": relationship(User, lazy="joined")
})
web.debug("[MODEL] Successfully mapped Result class")
|
from django.db import models
from django.urls import reverse
# from django.utils.text import slugify
# Create your models here.
class Posts(models.Model):
user = models.ForeignKey('auth.User', on_delete=models.CASCADE)
title = models.CharField(max_length=50)
content = models.TextField()
def publish(self):
self.save()
def comments(self):
return self.comments.all()
def get_absolute_url(self):
return reverse('blogem:posts_detail', kwargs={'pk': self.pk})
def __str__(self):
return self.title
class Comments(models.Model):
post = models.ForeignKey(
'blogem.Posts', related_name='comments', on_delete=models.CASCADE)
user = models.CharField(max_length=25)
content = models.TextField()
app_comment = models.BooleanField(default=True)
def get_absolute_url(self):
return reverse('blogem:post_list')
def cmt_app(self):
self.save()
def __str__(self):
return self.user
|
import sys
import os
import glob
from distutils.core import setup
setup(
name="experiments",
version='0.1',
description='Python numerical experiments engine',
author='Tigran Saluev',
author_email='tigran.saluev(at)gmail.com',
url='http://github.com/Saluev/python-experiments',
packages = ['experiments', 'experiments.computations', 'experiments.computations.caching'],
)
|
import PagePost
import RandomMeme
import time
import syslog
actual_meme = 1
folder = "/home/marisa/Programas/AutismBot/"
meme = RandomMeme.Conscious_Meme(folder)
minutes = 30
ver = "AutismBot v1.3"
seconds = minutes * 60
syslog.openlog('AutismBot')
syslog.syslog("Bot iniciado")
while True:
try:
meme = meme.make()
random_meme = meme[0]
text = ""
post_id = PagePost.photo(random_meme, text)
syslog.syslog(str(actual_meme) + " memes feitos")
actual_meme += 1
for i in range(seconds):
time.sleep(1)
except:
syslog.syslog("Erro")
continue
|
import json
from unittest import TestCase
from unittest.mock import patch
from app.adapter import PokemonsRequest
from tests.fixtures.mocks import API_MOCK_RESPONSE
from app import app
class ServiceTestCase(TestCase):
def setUp(self):
self.app = app
self.app_context = self.app.app_context()
self.app_context.push()
self.client = self.app.test_client(use_cookies=True)
@patch.object(PokemonsRequest, "request_data", return_value=API_MOCK_RESPONSE)
def test_all_pokemons(self, mocked_request_data):
mocked_request_data.return_value = API_MOCK_RESPONSE
response = self.client.get('/pokemon')
data = response.json
print(data)
self.assertTrue(mocked_request_data.called)
self.assertTrue(response.status_code == 200)
#self.assertEqual(data,) |
import youtube_dl
import variables as var
def get_playlist_info(url, start_index=0, user=""):
items = []
ydl_opts = {
'extract_flat': 'in_playlist'
}
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
attempts = var.config.getint('bot', 'download_attempts', fallback=2)
for i in range(attempts):
try:
info = ydl.extract_info(url, download=False)
# # if url is not a playlist but a video
# if 'entries' not in info and 'webpage_url' in info:
# music = {'type': 'url',
# 'title': info['title'],
# 'url': info['webpage_url'],
# 'user': user,
# 'ready': 'validation'}
# items.append(music)
# return items
playlist_title = info['title']
for j in range(start_index, min(len(info['entries']), start_index + var.config.getint('bot', 'max_track_playlist'))):
# Unknow String if No title into the json
title = info['entries'][j]['title'] if 'title' in info['entries'][j] else "Unknown Title"
# Add youtube url if the url in the json isn't a full url
url = info['entries'][j]['url'] if info['entries'][j]['url'][0:4] == 'http' else "https://www.youtube.com/watch?v=" + info['entries'][j]['url']
music = {'type': 'url',
'title': title,
'url': url,
'user': user,
'from_playlist': True,
'playlist_title': playlist_title,
'playlist_url': url,
'ready': 'validation'}
items.append(music)
except:
pass
return items
|
from utility import generate_data, average
from linear import linear_sort, linear_inline_sort
from time import time
times = []
num_tests = 1000000
for i in range(num_tests):
set = generate_data(5)
end_index = len(set) - 1
start = time()
# linear_inline_sort(set, 0, end_index)
linear_sort(set)
end = time()
times.append(end - start)
print('Average time: ' + str(average(times)))
|
## Pre-processing and library importing.
import csv
import cv2
import matplotlib.image as mpimg
import numpy as np
import os
import tensorflow as tf
current_dir = os.getcwd()
lines = []
from keras.preprocessing import image
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D, Dropout, Flatten
from keras import backend as K
from keras.layers import Input, Lambda
import tensorflow as tf
import sklearn
from sklearn.model_selection import train_test_split
## Extracting data from csv file.
with open('./data/driving_log.csv') as csvfile:
reader = csv.reader(csvfile)
for line in reader:
lines.append(line)
## Train-validation split (80-20%)
train_samples, validation_samples = train_test_split(lines[1:], test_size=0.2)
## Creating the generator.
def generator(lines, batch_size):
num_samples = len(lines)
while True:
sklearn.utils.shuffle(lines)
for offset in range(0,num_samples,batch_size):
batch_samples = lines[offset:offset+batch_size]
images = []
angles = []
for batch_sample in batch_samples:
center_name = current_dir + '/my_data/IMG/'+batch_sample[0].split('/')[-1]
left_name = current_dir + '/my_data/IMG/'+batch_sample[1].split('/')[-1]
right_name = current_dir + '/my_data/IMG/'+batch_sample[2].split('/')[-1]
center_image = mpimg.imread(center_name)
left_image = mpimg.imread(left_name)
right_image = mpimg.imread(right_name)
center_angle = float(batch_sample[3])
left_angle = center_angle + 0.2
right_angle = center_angle - 0.2
### Flipping the image.
left_flip = np.fliplr(left_image)
right_flip = np.fliplr(right_image)
lf_angle = (-1.0)*left_angle
rf_angle = (-1.0)*right_angle
## array subsetting is use to crop the image to only show road lanes.
images.extend([center_image[60:,:],left_image[60:,:],right_image[60:,:],left_flip[60:,:],right_flip[60:,:]])
angles.extend([center_angle,left_angle,right_angle,lf_angle,rf_angle])
X_train = np.array(images)
y_train = np.array(angles)
yield sklearn.utils.shuffle(X_train, y_train)
train_generator = generator(train_samples,512)
validation_generator = generator(validation_samples,512)
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, Lambda, ELU, ReLU,Conv2D
model = Sequential()
## Normalizing the the incoming image data.
model.add(Lambda(lambda x: x/127.5 -1.,
input_shape = (100,320,3),
output_shape = (100,320,3)))
## First convolution layer with 16 filters and kernel size = 8, stride = 4
## larger stride is used to downsample the image.
model.add(Conv2D(16, kernel_size= (8, 8), strides=(4, 4), padding="same"))
## Added an activation layer for adding nonlinearity.
model.add(ReLU())
## Second convolution layer with 32 filters and kernel size = 5, stride =2
## stride = 2, again downsample the image.
model.add(Conv2D(32, kernel_size= (5, 5), strides=(2, 2), padding="same"))
## Added an activation layer for adding nonlinearity.
model.add(ReLU())
## Third convolution layer with 64 fulter and kernel size = 5, stride =2
## stride = 2, again downsample the image.
model.add(Conv2D(64, kernel_size= (5, 5), strides=(2, 2), padding="same"))
## Added an activation layer for adding nonlinearity.
model.add(ReLU())
## Flattening the model so could connect the dense layer.
model.add(Flatten())
## Added a dropout layer to avoid overfitting after flattening of the layer.
model.add(Dropout(.2))
## Added a dense(fully connected) layer with 512 nodes
model.add(Dense(512))
## Added an activation layer for adding nonlinearity.
model.add(ReLU())
## Added a dense(fully connected) layer with 256 nodes
model.add(Dense(256))
## Added an activation layer for adding nonlinearity.
model.add(ReLU())
## Added a dense(fully connected) layer with 10 nodes
model.add(Dense(10))
## Added an activation layer for adding nonlinearity.
model.add(ReLU())
## Added a dense (fully connected) layer with 1 node as model is prediciting steering angle.
model.add(Dense(1))
# from keras.models import load_model
from keras.models import load_model
## Iteratively training my model.With loading the previous trained weight and then
## train on new dataset.
model = load_model('model.h5')
## Loss used in my mode is mean-squre error and 'Adam' optimizer.
model.compile(loss = 'mse',optimizer='adam')
## I found out having a higher batch_size helped me in optimizing my model faster
## in my above model architecture.
batch_size = 512
## In my case I have 80-20% train-validation split to reduce overfitting.
## Used genertor to load data so all the data is not stored in Ram and large
## dataset can cause memory overflow is it's a good practice to use generator.
model.fit_generator(train_generator, \
steps_per_epoch=len(train_samples)//batch_size,\
validation_data = validation_generator, \
validation_steps = len(validation_samples)//batch_size,\
epochs=4,verbose=1)
## Save the model
model.save('model.h5')
|
from django.urls import path, re_path
from .views import (
Dashboard,
DashboardPrint,
# InputUndss,
InputUndssView,
InputMasterIncidentView,
# ImportDataView,
UndssDetailView,
MasterIncidentDetailView,
get_district,
get_area_city,
get_incident_subtype,
load_district,
load_subtype,
load_area,
load_cityillage,
UndssImportView,
MasterIncidentsImportView,
)
from . import views
urlpatterns = [
path('', Dashboard, name='dashboard'),
# path('input/', InputUndss, name='inputdashboard'),
re_path(r'^print$', DashboardPrint, name='dashboard_print'),
path('input/', InputUndssView.as_view(), name='inputdashboard'),
path('inputmaster/', InputMasterIncidentView.as_view(), name='inputmasterincident'),
path('master_incident_detail/<int:pk>/', MasterIncidentDetailView.as_view(), name='master_incident_detail'),
path('incident_detail/<int:pk>/', UndssDetailView.as_view(), name='incident_detail'),
# path('import/', ImportDataView.as_view(), name='importdata'),
path('import_bysource/', UndssImportView.as_view(), name='importdataundss'),
path('confirm_import_bysource/', UndssImportView.as_view(confirm=True), name='confirmimportdataundss'),
path('import_master/', MasterIncidentsImportView.as_view(), name='importmasterdataundss'),
path('confirm_import_master/', MasterIncidentsImportView.as_view(confirm=True), name='confirmimportmasterdataundss'),
# chained_dropdown_url
path('get_district/<int:province_id>/', get_district, name='get_district'),
path('get_area_city/<int:province_id>/<int:district_id>/', get_area_city, name='get_area_city'),
path('get_incident_subtype/<int:incidenttype_id>/', get_incident_subtype, name='get_incident_subtype'),
path('get/district/', load_district, name='ajax_load_district'),
path('get/subtype/', load_subtype, name='ajax_load_subtype'),
path('get/area/', load_area, name='ajax_load_area'),
path('get/cityvlillage/', load_cityillage, name='ajax_load_cityillage'),
] |
# Generated by Django 3.2.8 on 2021-10-28 06:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chatbot', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='intentsamples',
name='product_ref',
field=models.IntegerField(null=True),
),
]
|
import unittest
import os
import json
import time
from six.moves import urllib
from click.testing import CliRunner
import unfurl.configurators # python2.7 workaround
import unfurl.configurators.shell # python2.7 workaround
import unfurl.configurators.supervisor # python2.7 workaround
from unfurl.yamlmanifest import YamlManifest
from unfurl.job import Runner, JobOptions
manifest = """\
apiVersion: unfurl/v1alpha1
kind: Ensemble
spec:
service_template:
imports:
- repository: unfurl
file: configurators/supervisor-template.yaml
topology_template:
node_templates:
# localhost:
# directives:
# - select
# type: tosca.nodes.Compute
supervisord:
type: unfurl.nodes.Supervisor
properties:
homeDir: . # unix socket paths can't be > 100 characters
# requirements:
# - host: localhost
unfurl_run_runner:
type: unfurl.nodes.ProcessController.Supervisor
properties:
name: test
program:
command: python3 -m http.server -b 127.0.0.1 8012
redirect_stderr: true
stdout_logfile: '%(here)s/test.log'
requirements:
- host: supervisord
"""
class SupervisorTest(unittest.TestCase):
def test_supervisor(self):
cliRunner = CliRunner()
with cliRunner.isolated_filesystem():
runner = Runner(YamlManifest(manifest))
try:
job = runner.run(JobOptions(startTime=1)) # deploy
assert not job.unexpectedAbort, job.unexpectedAbort.getStackTrace()
summary = job.jsonSummary()
self.assertEqual(
{
"id": "A01110000000",
"status": "ok",
"total": 4,
"ok": 4,
"error": 0,
"unknown": 0,
"skipped": 0,
"changed": 3,
},
summary["job"],
)
# print(json.dumps(summary, indent=2))
time.sleep(0.25)
f = urllib.request.urlopen("http://127.0.0.1:8012/")
expected = b"Directory listing for /"
self.assertIn(expected, f.read())
runner = Runner(YamlManifest(job.out.getvalue()))
job = runner.run(JobOptions(workflow="undeploy", startTime=2))
assert not job.unexpectedAbort, job.unexpectedAbort.getStackTrace()
summary = job.jsonSummary()
# print(json.dumps(summary, indent=2))
self.assertEqual(
{
"id": "A01120000000",
"status": "ok",
"total": 3,
"ok": 3,
"error": 0,
"unknown": 0,
"skipped": 0,
"changed": 3,
},
summary["job"],
)
finally:
if os.path.exists("supervisord/local/supervisord.pid"):
with open("supervisord/local/supervisord.pid") as f:
pid = f.read()
print("killing", pid)
os.kill(pid)
|
import asyncio
import os
import unittest
from urllib.error import URLError
from aiohttp import ClientConnectorError
from integration_tests.env_variable_names import SLACK_SDK_TEST_BOT_TOKEN
from integration_tests.helpers import async_test
from slack import WebClient
class TestWebClient(unittest.TestCase):
def setUp(self):
self.proxy = "http://invalid-host:9999"
self.bot_token = os.environ[SLACK_SDK_TEST_BOT_TOKEN]
def tearDown(self):
pass
def test_proxy_failure(self):
client: WebClient = WebClient(
token=self.bot_token,
run_async=False,
proxy=self.proxy,
loop=asyncio.new_event_loop())
with self.assertRaises(URLError):
client.auth_test()
@async_test
async def test_proxy_failure_async(self):
client: WebClient = WebClient(
token=self.bot_token,
proxy=self.proxy,
run_async=True
)
with self.assertRaises(ClientConnectorError):
await client.auth_test()
|
#1
import math
a = int(input("enter a "))
b = int(input("enter b "))
c = int(input("enter c "))
D = b**2 - 4 * a * c
print(D)
if D < 0:
print("The result is a complex value")
elif D == 0:
x = -b / 2 * a
else:
x1 = ((-b + math.sqrt(D)) / (2 * a))
x2 = ((-b - math.sqrt(D)) / (2 * a))
print(x1, x2)
#2
a = int(input("enter a "))
b = int(input("enter b "))
c = int(input("enter c "))
l_list = [c, b, a]
l_list.sort()
print(f"Medium is: {l_list[1]}")
# Num
# Пользователь вводит два числа, надо вывести на экран в строке, разделённые запятыми, использовать форматирование
a = int(input("enter a "))
b = int(input("enter b "))
x = '{0}, {1}'.format(a, b)
print(x)
# next
|
'''Common dataclasses'''
from datetime import datetime
class NewsArticle:
headline: str
publish_on: datetime
content: str
|
import sys
from lib.intcode import Machine
if len(sys.argv) == 1 or sys.argv[1] == '-v':
print('Input filename:')
f=str(sys.stdin.readline()).strip()
else: f = sys.argv[1]
verbose = sys.argv[-1] == '-v'
for l in open(f):
mreset = [int(x) for x in l.strip().split(',')]
class SpringDroid:
def __init__(self, m):
self.__m = m
self.__cpu = Machine(self.__m[:])
if verbose: self.__cpu.toggle_verbose()
def run(self, program):
cpu = self.__cpu
cpu.boot()
cpu.run()
self.out()
final = program.pop()
for line in list(program):
self.instruct(line, silent=True)
self.instruct(final)
def out(self, tostring: bool = True, silent: bool = False):
cpu = self.__cpu
o = []
while cpu.has_output():
b = cpu.output()
o.append(b if b > 127 else chr(b))
so = ''.join(map(str, o))
if not silent:
print(so)
return so if tostring else o
def instruct(self, command: str, tostring: bool = True, silent: bool = False):
'''
- convert text command to ASCII and feed to computer.
- return output
'''
if not silent:
print('INSTR:', command)
cpu = self.__cpu
for c in map(ord, command):
cpu.run(c)
cpu.run(10) # return
return self.out(tostring, silent)
def one(d):
'''
Solution 1
'''
print('Solution 1 \n--------------------------------------------------')
droid = SpringDroid(d)
'''
notes:
-------------------------------
1. droid jumps 4 steps at a time
2. always check to see that tile D (4th tile) is solid (for landing)
3. if you want to land on an island (###.##..####), jump 2 tiles before the first
hole: so basically jump whenever C (3rd tile ahead) is a hole.
'''
p = [
'NOT C J',
'AND D J',
'NOT A T',
'OR T J',
'WALK'
]
droid.run(p)
def two(d):
'''
Solution 2
'''
print('Solution 2 \n--------------------------------------------------')
droid = SpringDroid(d)
'''
notes:
-------------------------------
1. droid stills jumps 4 steps at a time
2. always check to see that tile D (4th tile) is solid (for landing)
3. if you want to land on an island (###.##..####), jump 2 tiles before the first
hole: so basically jump whenever C (3rd tile ahead) is a hole.
4. watch where you landing next after leaving the island.
'''
p = [
#| @ CD H |
#|#####.##.##.#.###|
'NOT C J',
'AND D J',
'AND H J',
#| @ B D |
#|#####.##.##.#.###|
'NOT B T',
'AND D T',
'OR T J',
#| @A |
#|#####.##.##.#.###|
'NOT A T',
'OR T J',
'RUN'
]
droid.run(p)
'''
Selector
'''
print('Select solution (enter 1 or 2 and press return):')
if 2 == int(sys.stdin.readline()):
two(mreset[:])
else:
one(mreset[:])
|
""""
Tarea # 1: Tarea1
##COMENTARIO
integrantes:
Almaraz Garcia Iori Alejandro(AGIA)
Carrillo Medina Alexis Adrian(CMAA)
Nombre del programa: Tarea1
"""
# ----- seccion de bibliotecas .
import numpy as np
import matplotlib.pylab as plt
# -----
#------------------------------------------------------------------------------------------------------------------------------------------------
#AGIA
##SUCESION DE FIBONACCI
def fibonacci(n):
#Este programa calcula el n-esimo numero de fibonacci
if n<1: #Si la n no representa un natural se lanza un excepcion
raise Exception("El indice es invalido")
fib_n=1 #Caso base n
fib_n_1=0 #Numero anterior al numero actual
for i in range(int(n-1)): #Recorremos la lista hasta n-2 (Para iniciar en 1)
tmp=fib_n #Creamos un temporal para almacener el numero actual
fib_n=tmp+fib_n_1 #Actualizamos el numero actual con base a la regla de la sucesion
fib_n_1=tmp #Actualizamos el numero anterior
return fib_n #Regresamos el numero n de fibonacci
#------------------------------------------------------------------------------------------------------------------------------------------------
#CMAA Y AGIAA
##INVERSA DE UNA MATRIZ ( PUSIMOS DOS METODOS, UNO QUE RECIBE UNA MATRIZ Y OTRO DONDE LA CREA ADENTRO DEL METODO)
# metodo "CHAFA" : NO RECIBE UNA MATRIZ Y SOLO CALCULA CUANDO ES MATRIZ 2x2
def inversachafa():
##PROGRAMA QUE CALCULA LA INVERSA DE UNA MATRIZ SIEMPRE Y CUANDO DET!=0
A = np.zeros((2,2)) ##creamos nuestra matriz cuadrada con ceros
#llenamos la matriz 1 con los datos del usuario entrada por entrada , por esos los 2 ciclos for
print ("Ingrese los elementos de la matriz")
for i in range(2):
for j in range(2):
A[i][j] =float(input('Elemento (%2d,%2d): ' % (i, j)))
print( "su matriz es:")
print(A)
determinante= float(A[0][0]*A[1][1] - A[1][0]*A[0][1]) ### Calculamos el determiante ingresando al valor que tiene en esa entrada o espacio de memoria
if determinante !=0:
inversa = np.zeros((2,2))
factor1=1/determinante
inversa[0][0]=factor1*A[1][1]
inversa[0][1]=factor1*-A[0][1]
inversa[1][0]=factor1*-A[1][0]
inversa[1][1]=factor1*A[0][0]
print(f'la matriz inversa de una matriz cuadrada es: \n {inversa}')
print('comprobando que es matriz inversa \n')
print(np.dot(A,inversa))
else:
print('LO SIENTO SOLO SE CALCULAR LA MATRIZ INVERSA DE UNA MATRIZ 2X2 CUANDO EL DETERMINANTE ES DISTINTO DE CERO')
#---------------------------------------------------------------------------------------------------------------------------------------------------------#
## INVERSA DE UNA MATRIZ CUADRADA (SIN NUMPY(EXCEPTO PARA LA MATRIZ DE MENORES) Y CON METODOS AUXILIARES)
### Para todos los metodos estamos suponiendo que recibimos una matriz cuadrada
## --------------------------- METODO AUXILIARES --------------------------- ##
### Estos metodos solo se ejecutan si la matriz es de mas de 2x2
def matrizMenores(m, r, c):
copia = np.copy(m)
copia = np.delete(copia, (r), axis=0)
copia = np.delete(copia, (c), axis=1)
return copia
def matrizTranspuesta(m):
#Funcion que nos regresa la matriz transpuesta de una matriz
nMatriz=[0]*len(m) #Definimos una matriz auxiliar con len(m) ceros
for i in range(len(m)):
nMatriz[i]=[0]*len(m[0]) #Creamos listas,dentro de nMatriz, con len(m[i]) ceros
for j in range(len(m[0])):
nMatriz[i][j]=m[j][i] #Aplicamos la definicion de matriz transpuesta
return nMatriz #Regresamos la matriz auxiliar
def determinant(m):
#Funcion que nos regresa el determinante de una matriz
if len(m) == 2: #Caso base
return m[0][0]*m[1][1]-(m[0][1]*m[1][0])
else: # Recursividad
determinante = 0
for i in range(len(m)):
determinante += ((-1)**i)*(m[0][i])*determinant(matrizMenores(m,0,i)) #Definicion de determinante recursiva
return determinante
def matrizCofactores(m):
#Funcion que nos regresa la matriz de cofactores
cofactores = [] #Creamos una matriz auxiliar
for r in range(len(m)):
cofactoresRen = [] #Creamos una lista auxiliar que representa los renglones de la matriz de cofactores
for c in range(len(m)): #Definicion de matriz de cofactores
cofactoresRen.append(((-1)**(r+c)) * determinant(matrizMenores(m,r,c))) #Calculamos el determinante en dicha entrada
cofactores.append(cofactoresRen)
cofactores = matrizTranspuesta(cofactores) #Obtenemos la transpuesta
return cofactores
## --------------------------- METODO PRINCIPAL --------------------------- ##
def matrizInversa(m):
#Funcion que nos regresa la matriz inversa
det = determinant(m) #Calculamos el determinante de la matriz
if(det==0): #Si el determinante es cero, no existe inversa
raise Exception("La matriz no tiene inversa")
if(len(m)==2):
inversa=[[0,0],[0,0]]
inversa[0][0]=m[1][1]/det
inversa[0][1]=-m[0][1]/det
inversa[1][0]=-m[1][0]/det
inversa[1][1]=m[0][0]/det
return np.array(inversa)
inversa = matrizCofactores(m) #Calculamos la matriz de cofactores y la llamamos inversa
for i in range(len(inversa)):
for j in range(len(inversa[0])):
inversa[i][j]=inversa[i][j]/det #Definicion de inversa
return np.array(inversa)
#----------------------------------------------------------------------------------------------------------------------------------------------------
###DISTRIBUCION UNIFORME
def distribucion (N,M):
#FUNCIONALIDAD
#Semilla
np.random.seed(2)
##Codigo
lGNumeros=np.zeros(N)
for i in range(M):
uniforme=np.random.uniform(-1,1,N)
for j in range(N):
lGNumeros[j]+=(uniforme[j]/M)
return lGNumeros
###### FIN PROGRAMA
|
import matplotlib.pyplot as plt
import numpy as np
from sklearn.datasets import fetch_mldata
from chainer import cuda, Variable, FunctionSet, optimizers
import chainer.functions as F
from xlwings import xrange
plt.style.use('ggplot')
batchsize = 100
n_epoch = 20
n_units = 1000
# MNISTの手書き数字データのダウンロード
# #HOME/scikit_learn_data/mldata/mnist-original.mat にキャッシュされる
print('fetch MNIST dataset')
mnist = fetch_mldata('MNIST original')
# mnist.data : 70,000件の784次元ベクトルデータ
mnist.data = mnist.data.astype(np.float32)
mnist.data /= 255 # 0-1のデータに変換
# mnist.target : 正解データ(教師データ)
mnist.target = mnist.target.astype(np.int32)
# 学習用データを N個、検証用データを残りの個数と設定
N = 60000
x_train, x_test = np.split(mnist.data, [N])
y_train, y_test = np.split(mnist.target, [N])
N_test = y_test.size
# Prepare multi-layer perceptron model
# 多層パーセプトロンモデルの設定
# 入力 784次元、出力 10次元
model = FunctionSet(l1=F.Linear(784, n_units),
l2=F.Linear(n_units, n_units),
l3=F.Linear(n_units, 10))
# Neural net architecture
# ニューラルネットの構造
def forward(x_data, y_data, train=True):
x, t = Variable(x_data), Variable(y_data)
h1 = F.dropout(F.relu(model.l1(x)), train=train)
h2 = F.dropout(F.relu(model.l2(h1)), train=train)
y = model.l3(h2)
# 多クラス分類なので誤差関数としてソフトマックス関数の
# 交差エントロピー関数を用いて、誤差を導出
return F.softmax_cross_entropy(y, t), F.accuracy(y, t)
# Setup optimizer
optimizer = optimizers.Adam()
optimizer.setup(model.collect_parameters())
train_loss = []
train_acc = []
test_loss = []
test_acc = []
l1_W = []
l2_W = []
l3_W = []
# Learning loop
for epoch in xrange(1, n_epoch+1):
print('epoch', epoch)
# training
# N個の順番をランダムに並び替える
perm = np.random.permutation(N)
sum_accuracy = 0
sum_loss = 0
# 0〜Nまでのデータをバッチサイズごとに使って学習
for i in xrange(0, N, batchsize):
x_batch = x_train[perm[i:i+batchsize]]
y_batch = y_train[perm[i:i+batchsize]]
# 勾配を初期化
optimizer.zero_grads()
# 順伝播させて誤差と精度を算出
loss, acc = forward(x_batch, y_batch)
# 誤差逆伝播で勾配を計算
loss.backward()
optimizer.update()
sum_loss += float(cuda.to_cpu(loss.data)) * batchsize
sum_accuracy += float(cuda.to_cpu(acc.data)) * batchsize
# 訓練データの誤差と正解精度を表示
print('train mean loss={}, accuracy={}'.format(sum_loss / N, sum_accuracy / N))
train_loss.append(sum_loss / N)
train_acc.append(sum_accuracy / N)
# evaluation
# テストデータで誤差と、正解精度を算出し汎化性能を確認
sum_accuracy = 0
sum_loss = 0
for i in xrange(0, N_test, batchsize):
x_batch = x_test[i:i+batchsize]
y_batch = y_test[i:i+batchsize]
# 順伝播させて誤差と精度を算出
loss, acc = forward(x_batch, y_batch, train=False)
sum_loss += float(cuda.to_cpu(loss.data)) * batchsize
sum_accuracy += float(cuda.to_cpu(acc.data)) * batchsize
# テストデータでの誤差と、正解精度を表示
print('test mean loss={}, accuracy={}'.format(sum_loss / N_test, sum_accuracy / N_test))
test_loss.append(sum_loss / N_test)
test_acc.append(sum_accuracy / N_test)
# 学習したパラメーターを保存
l1_W.append(model.l1.W)
l2_W.append(model.l2.W)
l3_W.append(model.l3.W)
# 精度と誤差をグラフ描画
plt.figure(figsize=(8,6))
plt.plot(range(len(train_acc)), train_acc)
plt.plot(range(len(test_acc)), test_acc)
plt.legend(["train_acc", "test_acc"], loc=4)
plt.title("Accuracy of digit recognition.")
plt.plot()
print('end')
|
from django.db import models
# Create your models here.
class Eleicao (models.Model):
local = models.CharField(max_length=20)
dataInicio = models.DateTimeField(blank=False, null=False)
dataFim = models.DateTimeField(blank=False, null=False)
class Token (models.Model):
codigo = models.CharField(max_length=5,blank=False,null=False,unique=True)
def __str__ (self):
return self.codigo
class Candidato (models.Model):
nome = models.CharField(max_length=40)
rg = models.CharField(max_length=9)
cpf = models.CharField(max_length=14)
idade = models.CharField(max_length=3)
def __str__ (self):
return self.nome
class Vaga (models.Model):
cargo = models.CharField(max_length=40)
candidato = models.ManyToManyField (Candidato)
def __str__(self):
return self.cargo
class Eleitor (models.Model):
nome = models.CharField(max_length=40)
cpf = models.CharField(max_length=14)
token = models.ForeignKey (Token)
def __str__ (self):
return self.nome
class Votacao (models.Model):
eleitor = models.ForeignKey (Eleitor)
candidato = models.ForeignKey (Candidato,blank=True,null=True)
votoBranco = models.BooleanField("Voto em branco",default=False)
dataHora = models.DateTimeField(blank=False, null=False)
def __str__ (self):
return "Eleitor: "+self.eleitor.nome
|
import tensorflow as tf
import tensorflow_compression as tfc
import functions
def one_step_rnn(tensor, state_c, state_h, Height, Width, num_filters, scale, kernal, act):
tensor = tf.expand_dims(tensor, axis=1)
cell = functions.ConvLSTMCell(shape=[Height // scale, Width // scale], activation=act,
filters=num_filters, kernel=kernal)
state = tf.nn.rnn_cell.LSTMStateTuple(state_c, state_h)
tensor, state = tf.nn.dynamic_rnn(cell, tensor, initial_state=state, dtype=tensor.dtype)
state_c, state_h = state
tensor = tf.squeeze(tensor, axis=1)
return tensor, state_c, state_h
def MV_analysis(tensor, num_filters, out_filters, Height, Width, c_state, h_state, act):
"""Builds the analysis transform."""
with tf.variable_scope("MV_analysis", reuse=tf.AUTO_REUSE):
with tf.variable_scope("layer_0"):
layer = tfc.SignalConv2D(
num_filters, (3, 3), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN())
tensor = layer(tensor)
with tf.variable_scope("layer_1"):
layer = tfc.SignalConv2D(
num_filters, (3, 3), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN())
tensor = layer(tensor)
with tf.variable_scope("recurrent"):
tensor, c_state_out, h_state_out = one_step_rnn(tensor, c_state, h_state,
Height, Width, num_filters,
scale=4, kernal=[3, 3], act=act)
with tf.variable_scope("layer_2"):
layer = tfc.SignalConv2D(
num_filters, (3, 3), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN())
tensor = layer(tensor)
with tf.variable_scope("layer_3"):
layer = tfc.SignalConv2D(
out_filters, (3, 3), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=None)
tensor = layer(tensor)
return tensor, c_state_out, h_state_out
def MV_synthesis(tensor, num_filters, Height, Width, c_state, h_state, act):
"""Builds the synthesis transform."""
with tf.variable_scope("MV_synthesis", reuse=tf.AUTO_REUSE):
with tf.variable_scope("layer_0"):
layer = tfc.SignalConv2D(
num_filters, (3, 3), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN(inverse=True))
tensor = layer(tensor)
with tf.variable_scope("layer_1"):
layer = tfc.SignalConv2D(
num_filters, (3, 3), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN(inverse=True))
tensor = layer(tensor)
with tf.variable_scope("recurrent"):
tensor, c_state_out, h_state_out = one_step_rnn(tensor, c_state, h_state,
Height, Width, num_filters,
scale=4, kernal=[3, 3], act=act)
with tf.variable_scope("layer_2"):
layer = tfc.SignalConv2D(
num_filters, (3, 3), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN(inverse=True))
tensor = layer(tensor)
with tf.variable_scope("layer_3"):
layer = tfc.SignalConv2D(
2, (3, 3), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=None)
tensor = layer(tensor)
return tensor, c_state_out, h_state_out
def Res_analysis(tensor, num_filters, out_filters, Height, Width, c_state, h_state, act):
"""Builds the analysis transform."""
with tf.variable_scope("analysis", reuse=tf.AUTO_REUSE):
with tf.variable_scope("layer_0"):
layer = tfc.SignalConv2D(
num_filters, (5, 5), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN())
tensor = layer(tensor)
with tf.variable_scope("layer_1"):
layer = tfc.SignalConv2D(
num_filters, (5, 5), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN())
tensor = layer(tensor)
with tf.variable_scope("recurrent"):
tensor, c_state_out, h_state_out = one_step_rnn(tensor, c_state, h_state,
Height, Width, num_filters,
scale=4, kernal=[5, 5], act=act)
with tf.variable_scope("layer_2"):
layer = tfc.SignalConv2D(
num_filters, (5, 5), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN())
tensor = layer(tensor)
with tf.variable_scope("layer_3"):
layer = tfc.SignalConv2D(
out_filters, (5, 5), corr=True, strides_down=2, padding="same_zeros",
use_bias=True, activation=None)
tensor = layer(tensor)
return tensor, c_state_out, h_state_out
def Res_synthesis(tensor, num_filters, Height, Width, c_state, h_state, act):
"""Builds the synthesis transform."""
with tf.variable_scope("synthesis", reuse=tf.AUTO_REUSE):
with tf.variable_scope("layer_0"):
layer = tfc.SignalConv2D(
num_filters, (5, 5), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN(inverse=True))
tensor = layer(tensor)
with tf.variable_scope("layer_1"):
layer = tfc.SignalConv2D(
num_filters, (5, 5), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN(inverse=True))
tensor = layer(tensor)
with tf.variable_scope("recurrent"):
tensor, c_state_out, h_state_out = one_step_rnn(tensor, c_state, h_state,
Height, Width, num_filters,
scale=4, kernal=[5, 5], act=act)
with tf.variable_scope("layer_2"):
layer = tfc.SignalConv2D(
num_filters, (5, 5), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=tfc.GDN(inverse=True))
tensor = layer(tensor)
with tf.variable_scope("layer_3"):
layer = tfc.SignalConv2D(
3, (5, 5), corr=False, strides_up=2, padding="same_zeros",
use_bias=True, activation=None)
tensor = layer(tensor)
return tensor, c_state_out, h_state_out
def rec_prob(tensor, num_filters, Height, Width, c_state, h_state, k=3, act=tf.tanh):
with tf.variable_scope("CNN_input"):
tensor = tf.expand_dims(tensor, axis=1)
y1 = functions.recurrent_cnn(tensor, 1, layer=4, num_filters=num_filters, stride=1,
out_filters=num_filters, kernel=[k, k], act=tf.nn.relu, act_last=None)
y1 = tf.squeeze(y1, axis=1)
with tf.variable_scope("RNN"):
y2, c_state_out, h_state_out = one_step_rnn(y1, c_state, h_state,
Height, Width, num_filters,
scale=16, kernal=[k, k], act=act)
with tf.variable_scope("CNN_output"):
y2 = tf.expand_dims(y2, axis=1)
y3 = functions.recurrent_cnn(y2, 1, layer=4, num_filters=num_filters, stride=1,
out_filters=2 * num_filters, kernel=[k, k], act=tf.nn.relu, act_last=None)
y3 = tf.squeeze(y3, axis=1)
return y3, c_state_out, h_state_out
def bpp_est(x_target, sigma_mu, num_filters, tiny=1e-10):
sigma, mu = tf.split(sigma_mu, [num_filters, num_filters], axis=-1)
half = tf.constant(.5, dtype=tf.float32)
upper = tf.math.add(x_target, half)
lower = tf.math.add(x_target, -half)
sig = tf.maximum(sigma, -7.0)
upper_l = tf.sigmoid(tf.multiply((upper - mu), (tf.exp(-sig) + tiny)))
lower_l = tf.sigmoid(tf.multiply((lower - mu), (tf.exp(-sig) + tiny)))
p_element = upper_l - lower_l
p_element = tf.clip_by_value(p_element, tiny, 1 - tiny)
ent = -tf.log(p_element) / tf.log(2.0)
bits = tf.math.reduce_sum(ent)
return bits, sigma, mu |
from office365.sharepoint.base_entity import BaseEntity
class EventReceiverDefinition(BaseEntity):
pass
|
import socket
def recv_msg(udp_socket_recv):
"""接收消息"""
recv_data = udp_socket.recvfrom(1024)
print("%s:%s" % (str(recv_data[1]),recv_data[0].decode("utf-8")))
def main():
"""client_recv用于实现用户端对服务端的全时监听"""
udp_socket_recv = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
#绑定信息
udp_socket_recv.bind(("",7788))
#循环来保持监听
while True:
recv_msg(udp_socket_recv)
if __name__ == "__main__" :
main() |
print('hello world')
#import pandas as pd
a=1223567 |
# Questão 2 - Lista Telefônica Econômica
n = int(input())
lista_tel = []
#n = 3
#lista_tel = [[5,3,5,4,5,6],[5,3,5,4,8,8],[8,3,5,4,5,6]]
#n = 2
#lista_tel = [[1,2,3,4,5],[1,2,3,5,4]]
max = 0
# leitura da lista
for i in range(n):
num = list(input())
lista_tel.append(num)
# procurar a linha com maior numero economizado
for i in range(n):
for j in range(len(lista_tel[0])):
if ((n-1) <= i):
break
if (lista_tel[i][j] == lista_tel[i+1][j]):
max = max + 1
if (j+1 < max):
max = max -1
print(max) |
from tkinter import *
from PIL import Image,ImageTk
class Window(Frame):
def __init__(self,master=None):
Frame.__init__(self,master)
self.master = master
self.init_window()
def init_window(self):
self.master.title("Hello Tkinter")
self.pack(fill=BOTH,expand=1)
#quitButton = Button(self,text="Quit",command=self.client_exit)
#quitButton.place(x=0,y=0)
menu = Menu(self.master)
self.master.config(menu=menu)
file = Menu(menu)
file.add_command(label='Exit',command=self.client_exit)
menu.add_cascade(label='File',menu=file)
edit = Menu(menu)
edit.add_command(label='Show Image',command=self.showImg)
edit.add_command(label='Show Text',command=self.showTxt)
menu.add_cascade(label='Edit',menu=edit)
def showImg(self):
load = Image.open('pp.png')
render = ImageTk.PhotoImage(load)
img = Label(self,image=render)
img.image = render
img.place(x=0,y=0)
def showTxt(self):
text = Label(self,text='Hey there!!')
text.pack()
def client_exit(self):
exit()
root = Tk()
root.geometry("400x300")
app = Window(root)
root.mainloop()
#app.mainloop()
|
#insertion, deletion or substitution
global words
def edit_distance(s1,s2):
m=len(s1)+1
n=len(s2)+1
tbl = {}
for i in range(m): tbl[i,0]=i
for j in range(n): tbl[0,j]=j
for i in range(1, m):
for j in range(1, n):
cost = 0 if s1[i-1] == s2[j-1] else 1
tbl[i,j] = min(tbl[i, j-1]+1, tbl[i-1, j]+1, tbl[i-1, j-1]+cost)
return tbl[i,j]
def spelling_checker():
length = len(mispelled_word)
potential_words = {}
for word in words:
if(len(word)<=length+3 and len(word)>=length-3):
ED = edit_distance(mispelled_word,word)
if(ED<=3):
potential_words[word] = ED
for key, value in sorted(potential_words.iteritems(), key=lambda (k,v): (v,k)):
print "%s: %s" % (key, value)
words = open("dictionary.txt").readlines()
words = [word.strip() for word in words]
mispelled_word = raw_input("type a word: ")
while(mispelled_word!="end spelling checker"):
print("check for: " + mispelled_word)
if(mispelled_word in words):
print("Word is properly spelled.")
else:
spelling_checker()
mispelled_word = raw_input("type a word: ")
|
#!/usr/bin/env python
from __future__ import print_function
import fastjet as fj
import fjcontrib
import fjext
import tqdm
import argparse
import os
import numpy as np
import array
import copy
import random
import uproot
import pandas as pd
from pyjetty.mputils import logbins
from pyjetty.mputils import MPBase
from pyjetty.mputils import BoltzmannEvent
from pyjetty.mputils import CEventSubtractor, CSubtractorJetByJet
from pyjetty.mputils import RTreeWriter
from pyjetty.mputils import fill_tree_matched, fill_tree_data, JetAnalysis, JetAnalysisWithRho, JetAnalysisPerJet
from pyjetty.mputils import DataBackgroundIO
from alice_efficiency import AliceChargedParticleEfficiency
from heppy.pythiautils import configuration as pyconf
import pythia8
import pythiafjext
import pythiaext
import ROOT
ROOT.gROOT.SetBatch(True)
def main():
parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__))
pyconf.add_standard_pythia_args(parser)
parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true')
parser.add_argument('--output', default="output.root", type=str)
parser.add_argument('--alpha', default=0, type=float)
parser.add_argument('--dRmax', default=0.0, type=float)
parser.add_argument('--zcut', default=0.1, type=float)
parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true')
parser.add_argument('--embed', help='run embedding from a file list', default='', type=str)
parser.add_argument('--SDsignal', help='embed only SD signal prongs', default=False, action='store_true')
parser.add_argument('--SDsignal-single', help='embed only SD signal - only leading prong!', default=False, action='store_true')
parser.add_argument('--efficiency', help='apply charged particle efficiency', default=False, action='store_true')
parser.add_argument('--benchmark', help='benchmark pthat setting - 80 GeV', default=False, action='store_true')
parser.add_argument('--csjet', help='constituent subtration jet-by-jet', default=False, action='store_true')
args = parser.parse_args()
if args.output == 'output.root':
args.output = 'output_alpha_{}_dRmax_{}_SDzcut_{}.root'.format(args.alpha, args.dRmax, args.zcut)
if args.py_seed >= 0:
args.output = 'output_alpha_{}_dRmax_{}_SDzcut_{}_seed_{}.root'.format(args.alpha, args.dRmax, args.zcut, args.py_seed)
if args.embed:
args.output = args.output.replace('.root', '_emb.root')
if args.efficiency:
args.output = args.output.replace('.root', '_effi.root')
if args.SDsignal:
args.output = args.output.replace('.root', '_SDsignal.root')
if args.SDsignal_single:
args.output = args.output.replace('.root', '_SDsignal_single.root')
if args.csjet:
args.output = args.output.replace('.root', '_csjet.root')
if os.path.isfile(args.output):
if not args.overwrite:
print('[i] output', args.output, 'exists - use --overwrite to do just that...')
return
print(args)
# alice specific
max_eta = 0.9
# print the banner first
fj.ClusterSequence.print_banner()
print()
# set up our jet definition and a jet selector
jet_R0 = 0.4
jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0)
print(jet_def)
mycfg = []
if args.benchmark:
mycfg = ['PhaseSpace:pThatMin = 80', 'PhaseSpace:pThatMax = -1']
jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax(100.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0)
# jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0)
else:
args.py_biaspow = 4
args.py_biasref = 10
jet_selector = fj.SelectorPtMin(20) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0)
# jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0)
if args.ignore_mycfg:
mycfg = []
pythia = pyconf.create_and_init_pythia_from_args(args, mycfg)
if not pythia:
print("[e] pythia initialization failed.")
return
sd_zcut = args.zcut
sd = fjcontrib.SoftDrop(0, sd_zcut, jet_R0)
jarho = JetAnalysisWithRho(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta)
ja = JetAnalysis(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta)
be = None
embd = None
if len(args.embed) > 0:
embd = DataBackgroundIO(file_list=args.embed)
print(embd)
else:
be = BoltzmannEvent(mean_pt=0.6, multiplicity=2000 * max_eta * 2, max_eta=max_eta, max_pt=100)
print(be)
cs = None
if args.dRmax > 0:
cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=max_eta, bge_rho_grid_size=0.25, max_pt_correct=100)
print(cs)
csjet = None
if args.csjet:
csjet = CSubtractorJetByJet(max_eta=max_eta, bge_rho_grid_size=0.25)
print(csjet)
parts_selector = fj.SelectorAbsEtaMax(max_eta)
if args.nev < 1:
args.nev = 1
outf = ROOT.TFile(args.output, 'recreate')
outf.cd()
t = ROOT.TTree('t', 't')
tw = RTreeWriter(tree=t)
te = ROOT.TTree('te', 'te')
twe = RTreeWriter(tree=te)
# effi_pp = AliceChargedParticleEfficiency(csystem='pp')
effi_PbPb = None
if args.efficiency:
effi_PbPb = AliceChargedParticleEfficiency(csystem='PbPb')
print(effi_PbPb)
### EVENT LOOP STARTS HERE
for iev in tqdm.tqdm(range(args.nev)):
if not pythia.next():
continue
parts_pythia = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged])
parts_gen = parts_selector(parts_pythia)
if effi_PbPb:
parts = effi_PbPb.apply_efficiency(parts_gen)
else:
parts = parts_gen
signal_jets = fj.sorted_by_pt(jet_selector(jet_def(parts)))
if len(signal_jets) < 1:
continue
for sjet in signal_jets:
if args.SDsignal or args.SDsignal_single:
sd_sjet = sd.result(sjet)
pe1 = fj.PseudoJet()
pe2 = fj.PseudoJet()
has_parents = sd_sjet.has_parents(pe1, pe2)
if has_parents:
jparts = fj.vectorPJ()
pe1.set_user_index(0)
pe2.set_user_index(1)
if args.SDsignal_single:
if pe1.pt() > pe2.pt():
jparts.push_back(pe1)
else:
jparts.push_back(pe2)
else:
jparts.push_back(pe1)
jparts.push_back(pe2)
sjets = fj.sorted_by_pt(jet_selector(jet_def(jparts)))
if len(sjets) == 1:
sjet = sjets[0]
else:
continue
else:
continue
if embd:
bg_parts = embd.load_event(offset=10000)
# for p in bg_parts:
# print(p.user_index())
else:
bg_parts = be.generate(offset=10000)
# for p in bg_parts:
# print(p.user_index())
full_event = bg_parts
tmp = [full_event.push_back(psj) for psj in sjet.constituents()]
if cs:
cs_parts = cs.process_event(full_event)
rho = cs.bge_rho.rho()
jarho.analyze_event(cs_parts)
tmp = [fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets]
tmp = [fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets]
else:
jarho.analyze_event(full_event)
rho = jarho.rho
if csjet:
#_csjet = fjcontrib.ConstituentSubtractor(jarho.bg_estimator)
# subtr_jets = [_csjet.result(ej) for ej in jarho.jets]
csjet.set_event_particles(full_event)
#subtr_jets = [csjet.process_jet(ej) for ej in jarho.jets]
#print ('jbyj cs', len(subtr_jets), 'from', len(jarho.jets))
#subtr_jets_wconstits = [_j for _j in subtr_jets if _j.has_constituents()]
#for _j in subtr_jets_wconstits:
# print(len(_j.constituents()))
subtr_jets_wconstits = csjet.process_jets(jarho.jets)
japerjet = JetAnalysisPerJet(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta, input_jets=subtr_jets_wconstits)
# for _j in japerjet.jets:
# for _c in _j.constituents():
# if _c.user_index() >= 0:
# print('user index kept?', _c.user_index())
# # else:
# # print('user index kept?', _c.user_index(), _c.pt())
# _sd_j = sd.result(_j)
# https://phab.hepforge.org/source/fastjetsvn/browse/contrib/contribs/RecursiveTools/trunk/Recluster.cc L 270
# tmp = [fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.sigmaGen()) for ej in subtr_jets_wcs]
tmp = [fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in japerjet.jets]
tmp = [fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in japerjet.jets]
else:
tmp = [fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets]
tmp = [fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets]
pythia.stat()
outf.Write()
outf.Close()
print('[i] written', outf.GetName())
if __name__ == '__main__':
main()
|
#coding=utf-8
__author__ = 'shifx'
from django.views.decorators.csrf import csrf_exempt #用于处理post请求出现的错误
from research_report.models import ReportUser
from django.shortcuts import render_to_response
from research_report.thread import ThreadControl
#主页面
def report_main(request):
# ProbTotals.objects.all().delete()
# thread_list = ProbUser.objects.get(thread_name=th_name)
report_user_list = ReportUser.objects.all()
for prob_user in report_user_list:
c = ThreadControl()
try:
#查看是否处于活跃状态
status = c.is_alive(prob_user.report_name)
if status:
#设置状态为1
prob_user.report_status = 1
prob_user.save()
else:
#设置状态为0
prob_user.report_status = 0
prob_user.save()
except:
print prob_user.report_name, " not start"
prob_user.report_status = 0
prob_user.save()
return render_to_response('report_main.html',{"report_user_list":report_user_list})
@csrf_exempt #处理Post请求出错的情况
def control_report_thread(request):
report_name = request.POST['user_name']
control = request.POST['control']
info_dict = {}
#显示活跃状态
report_user = ReportUser.objects.get(report_name=report_name)
if control == 'start':
driver = 3
info_dict["driver"] = driver
#状态信息
c = ThreadControl()
#出现错误,则线程不存在,因此启动线程
try:
status = c.is_alive(report_name)
print "thread is alive? ",status
if status:
print "thread is alive,caonot start twice!"
else:
print "start ..........thread1"
c.start(report_name, info_dict)
except:
print "thread is not alive start!!!"
c.start(report_name, info_dict)
report_user.report_status = 1
report_user.save()
if control == 'stop':
c = ThreadControl()
try :
c.stop(report_name)
report_user.report_status = 0
report_user.save()
except:
print "not thread alive"
report_user_list = ReportUser.objects.all()
return render_to_response('report_main.html',{"report_user_list":report_user_list})
def set_user(request):
try:
# user_name = request.POST['in_user']
# user_password = request.POST['in_pwd']
# control = request.POST['control']
report_name = 'research_report_1'
report_desc = '研究报告'
control = 'add'
report_id = len(ReportUser.objects.all()) + 1
report_status = False
if(control == 'add'):
obj_pro = ReportUser(report_id=report_id, report_name=report_name, report_desc=report_desc, report_status=report_status)
obj_pro.save()
if(control == 'delete'):
ReportUser.objects.filter(report_name=report_name).delete()
obj_pro = ReportUser.objects.all()
except:
obj_pro = ReportUser.objects.all()
return render_to_response('report_main.html', {"report_user_list":obj_pro})
return render_to_response('report_main.html', {"report_user_list":obj_pro}) |
class Solution(object):
def cloneGraph(self, node):
def dfs(node):
if node in map: return map[node]
clone = Node(node.val, [])
map[node] = clone
for nei in node.neighbors:
clone.neighbors.append(dfs(nei))
return clone
if node == None: return None
map = defaultdict(Node) # Map OLD node to NEW node
return dfs(node) |
#
# (C) Copyright 2012 Enthought, Inc., Austin, TX
# All right reserved.
#
# This file is open source software distributed according to the terms in
# LICENSE.txt
#
import threading
from traits.api import HasTraits, Bool, Int, Str, Enum, Tuple, Set, Instance, Property
from .animated_context import AbstractAnimatedContext
MouseButton = Enum('left', 'right', 'middle')
ModifierKeys = Enum('shift', 'alt', 'control')
class AbstractMouseState(HasTraits):
""" Class which tracks the mouse state
Where state is not dynamically queryable, these variables should store the
values from the most recent mouse event.
"""
#: the position of the mouse in context-local coordinates
position = Tuple(Int, Int)
#: the x-coordinate of the mouse in context-local coordinates
x = Property(Int, depends_on='position')
#: the x-coordinate of the mouse in context-local coordinates
y = Property(Int, depends_on='position')
#: the mouse buttons which are down
buttons = Set(MouseButton)
#: whether the left mouse button is down
left_button = Property(Bool, depends_on='buttons')
#: whether the left mouse button is down
right_button = Property(Bool, depends_on='buttons')
#: whether the left mouse button is down
middle_button = Property(Bool, depends_on='buttons')
def _get_x(self):
return self.position[0]
def _get_y(self):
return self.position[1]
def _get_left_button(self):
return 'left' in self.buttons
def _get_right_button(self):
return 'left' in self.buttons
def _get_middle_button(self):
return 'left' in self.buttons
class AbstractKeyboardState(HasTraits):
#: the most recently emitted unicode character
character = Str
#: a buffer holding unicode characters, which is cleared when read
buffer = Property(Str)
#: the most recently pressed keycode
key_code = Str
#: the modifier keys pressed, if any
modifiers = Set(ModifierKeys)
#: whether the shift key is down
shift_down = Property(Bool)
#: whether the shift key is down
alt_down = Property(Bool)
#: whether the shift key is down
control_down = Property(Bool)
#: a buffer holding unicode characters pressed
_buffer = Str
#: a lock to prevent trying to modify the buffer while we are reading it
_buffer_lock = Instance(threading.RLock, ())
def _get_buffer(self):
with self._buffer_lock:
buffer = self._buffer
self._buffer = ''
return buffer
def _set_buffer(self, value):
with self._buffer_lock:
self._buffer += value
def _get_shift_down(self):
return 'shift' in self.modifiers
def _get_alt_down(self):
return 'shift' in self.modifiers
def _get_control_down(self):
return 'shift' in self.modifiers
class InteractiveContext(AbstractAnimatedContext):
mouse = Instance(AbstractMouseState)
keyboard = Instance(AbstractKeyboardState)
|
from PyQt5 import QtCore, QtGui, QtWidgets#, QtWidgets.QFileDialog
#from PyQt5 import *#.QtWidgets import QFileDialog
#from PyQt5.QtWidgets import QMainWindow, QApplication, QWidget, QInputDialog, QLineEdit, QFileDialog
#from PyQt5 import QtCore, QtGui, QtWidgets
import pyodbc
import sys, os
import copy
import datetime
import time
import numpy as np
#import see as s
from Threader import StoppableThread
from dateutil import tz
from Settings import Settings
from aboutWindow import Ui_aboutDialog
from colorSettingsWindow import Ui_colorDialog
from connectionSettingsWindow import Ui_connectDialog
from fitSettingsWindow import Ui_fitSettingsDialog
from FitTab import FitTab
from fitTabFunctions import restoreFit
from FunctionsDrude import (
eq_eU, eq_eD, e_eq,
mag, phase, leastsq_function,
lorentzMag_function, lorentzPhase_function,
leastsq_functionNK, N_function, K_function)
from FunctionsFit import fit, optimizeDrudeParameters
from FunctionsTRA import (getWaveList, calculateTRA,
addMaterialInfoToStack, calculateRMS, calculateColorValues)
from generalSettingsWindow import Ui_GeneralSettingsDialog
from GUIObjects import GraphFrame, DragDropTableView, TableModel
from helperFunctions import (is_number, getThicknessAndUnit,getThicknessFromString)
from material_input import convert_drude_units, convert_to_inputunits,get_drude_param_range,get_test_param
##d = s.timeTool() #checking
#print('here we go again=====>setupUi')#checking
##d.datetimeConverter()#checking
class Ui_MainWindow(object):#big ass qt object class holding all the objects which will be randered to ui
def setupUi(self, MainWindow, stack, settings):
self.stack = stack
self.settings = settings
self.MainWindow = MainWindow
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1230, 716)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(10)
sizePolicy.setVerticalStretch(10)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setSizeIncrement(QtCore.QSize(1, 1))
self.tabWidget.setObjectName("tabWidget")
self.tabBuild = QtWidgets.QWidget()
self.tabBuild.setObjectName("tabBuild")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.tabBuild)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setContentsMargins(-1, -1, 0, -1)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.gridLayout_3 = QtWidgets.QGridLayout()
self.gridLayout_3.setObjectName("gridLayout_3")
self.saveStackPB = QtWidgets.QPushButton(self.tabBuild)
self.saveStackPB.setObjectName("saveStackPB")
self.gridLayout_3.addWidget(self.saveStackPB, 1, 0, 1, 1)
self.deleteLayerPB = QtWidgets.QPushButton(self.tabBuild)
self.deleteLayerPB.setObjectName("deleteLayerPB")
self.gridLayout_3.addWidget(self.deleteLayerPB, 1, 1, 1, 1)
self.loadStackPB = QtWidgets.QPushButton(self.tabBuild)
self.loadStackPB.setObjectName("loadStackPB")
self.gridLayout_3.addWidget(self.loadStackPB, 0, 0, 1, 1)
self.reverseStackPB = QtWidgets.QPushButton(self.tabBuild)
self.reverseStackPB.setObjectName("reverseStackPB")
self.gridLayout_3.addWidget(self.reverseStackPB, 0, 2, 1, 1)
self.addLayerPB = QtWidgets.QPushButton(self.tabBuild)
self.addLayerPB.setObjectName("addLayerPB")
self.gridLayout_3.addWidget(self.addLayerPB, 0, 1, 1, 1)
self.verticalLayout_4.addLayout(self.gridLayout_3)
self.materialTabWidget = QtWidgets.QTabWidget(self.tabBuild)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.materialTabWidget.sizePolicy().hasHeightForWidth())
self.materialTabWidget.setSizePolicy(sizePolicy)
self.materialTabWidget.setObjectName("materialTabWidget")
self.tabStack = QtWidgets.QWidget()
self.tabStack.setObjectName("tabStack")
self.gridLayout_4 = QtWidgets.QGridLayout(self.tabStack)
self.gridLayout_4.setContentsMargins(0, 0, 0, 0)
self.gridLayout_4.setObjectName("gridLayout_4")
self.stackListWidget = QtWidgets.QListWidget(self.tabStack)
self.stackListWidget.setObjectName("stackListWidget")
self.gridLayout_4.addWidget(self.stackListWidget, 0, 0, 1, 1)
self.materialTabWidget.addTab(self.tabStack, "")
self.tabMaterial = QtWidgets.QWidget()
self.tabMaterial.setObjectName("tabMaterial")
self.gridLayout_2 = QtWidgets.QGridLayout(self.tabMaterial)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.materialListWidget = QtWidgets.QListWidget(self.tabMaterial)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.materialListWidget.sizePolicy().hasHeightForWidth())
self.materialListWidget.setSizePolicy(sizePolicy)
self.materialListWidget.setObjectName("materialListWidget")
self.gridLayout_2.addWidget(self.materialListWidget, 0, 0, 1, 1)
self.materialTabWidget.addTab(self.tabMaterial, "")
self.verticalLayout_4.addWidget(self.materialTabWidget)
self.materialDetailTable = QtWidgets.QTableWidget(self.tabBuild)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.materialDetailTable.sizePolicy().hasHeightForWidth())
self.materialDetailTable.setSizePolicy(sizePolicy)
self.materialDetailTable.setMinimumSize(QtCore.QSize(200, 220))
self.materialDetailTable.setMaximumSize(QtCore.QSize(16777215, 220))
self.materialDetailTable.setAcceptDrops(False)
self.materialDetailTable.setDragEnabled(True)
self.materialDetailTable.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
self.materialDetailTable.setAlternatingRowColors(False)
self.materialDetailTable.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.materialDetailTable.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.materialDetailTable.setShowGrid(True)
self.materialDetailTable.setObjectName("materialDetailTable")
self.materialDetailTable.setColumnCount(1)
self.materialDetailTable.setRowCount(6)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.materialDetailTable.setItem(5, 0, item)
self.materialDetailTable.horizontalHeader().setVisible(False)
self.materialDetailTable.horizontalHeader().setCascadingSectionResizes(False)
self.materialDetailTable.horizontalHeader().setSortIndicatorShown(False)
self.materialDetailTable.horizontalHeader().setStretchLastSection(True)
self.materialDetailTable.verticalHeader().setDefaultSectionSize(33)
self.materialDetailTable.verticalHeader().setStretchLastSection(True)
self.verticalLayout_4.addWidget(self.materialDetailTable)
self.horizontalLayout_4.addLayout(self.verticalLayout_4)
self.verticalLayout_7 = QtWidgets.QVBoxLayout()
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.plotFrame = GraphFrame(self.tabBuild)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.plotFrame.sizePolicy().hasHeightForWidth())
self.plotFrame.setSizePolicy(sizePolicy)
self.plotFrame.setMinimumSize(QtCore.QSize(800, 361))
self.plotFrame.setCursor(QtGui.QCursor(QtCore.Qt.CrossCursor))
self.plotFrame.setAutoFillBackground(True)
self.plotFrame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.plotFrame.setFrameShadow(QtWidgets.QFrame.Plain)
self.plotFrame.setLineWidth(2)
self.plotFrame.setMidLineWidth(3)
self.plotFrame.setObjectName("plotFrame")
self.verticalLayout_7.addWidget(self.plotFrame)
self.colorTableWidget = QtWidgets.QTableWidget(self.tabBuild)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.colorTableWidget.sizePolicy().hasHeightForWidth())
self.colorTableWidget.setSizePolicy(sizePolicy)
self.colorTableWidget.setMaximumSize(QtCore.QSize(16777215, 61))
self.colorTableWidget.setAutoFillBackground(True)
self.colorTableWidget.setFrameShape(QtWidgets.QFrame.NoFrame)
self.colorTableWidget.setFrameShadow(QtWidgets.QFrame.Plain)
self.colorTableWidget.setAutoScroll(True)
self.colorTableWidget.setTabKeyNavigation(False)
self.colorTableWidget.setProperty("showDropIndicator", False)
self.colorTableWidget.setDragDropOverwriteMode(False)
self.colorTableWidget.setShowGrid(True)
self.colorTableWidget.setGridStyle(QtCore.Qt.SolidLine)
self.colorTableWidget.setColumnCount(6)
self.colorTableWidget.setObjectName("colorTableWidget")
self.colorTableWidget.setRowCount(2)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.colorTableWidget.setHorizontalHeaderItem(5, item)
self.colorTableWidget.horizontalHeader().setVisible(False)
self.colorTableWidget.horizontalHeader().setHighlightSections(False)
self.colorTableWidget.verticalHeader().setVisible(True)
self.colorTableWidget.verticalHeader().setHighlightSections(False)
self.verticalLayout_7.addWidget(self.colorTableWidget)
self.designTabBottomFrame = QtWidgets.QFrame(self.tabBuild)
self.designTabBottomFrame.setMinimumSize(QtCore.QSize(300, 150))
self.designTabBottomFrame.setAcceptDrops(True)
self.designTabBottomFrame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.designTabBottomFrame.setFrameShadow(QtWidgets.QFrame.Raised)
self.designTabBottomFrame.setObjectName("designTabBottomFrame")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.designTabBottomFrame)
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.labelLightTop = QtWidgets.QLabel(self.designTabBottomFrame)
self.labelLightTop.setTextFormat(QtCore.Qt.PlainText)
self.labelLightTop.setAlignment(QtCore.Qt.AlignCenter)
self.labelLightTop.setObjectName("labelLightTop")
self.verticalLayout_2.addWidget(self.labelLightTop)
self.stackWidget = DragDropTableView(self.designTabBottomFrame, self)
self.stackWidget.setMaximumSize(QtCore.QSize(380, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.stackWidget.setFont(font)
self.stackWidget.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.stackWidget.setSizeAdjustPolicy(QtWidgets.QAbstractScrollArea.AdjustIgnored)
self.stackWidget.setDragEnabled(True)
self.stackWidget.setDragDropOverwriteMode(False)
self.stackWidget.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
self.stackWidget.setDefaultDropAction(QtCore.Qt.MoveAction)
self.stackWidget.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.stackWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.stackWidget.setObjectName("stackWidget")
self.stackWidget.horizontalHeader().setDefaultSectionSize(0)
self.stackWidget.horizontalHeader().setMinimumSectionSize(100)
self.stackWidget.verticalHeader().setCascadingSectionResizes(True)
self.stackWidget.verticalHeader().setMinimumSectionSize(16)
self.verticalLayout_2.addWidget(self.stackWidget)
self.labelLightBottom = QtWidgets.QLabel(self.designTabBottomFrame)
self.labelLightBottom.setEnabled(True)
self.labelLightBottom.setInputMethodHints(QtCore.Qt.ImhNone)
self.labelLightBottom.setAlignment(QtCore.Qt.AlignCenter)
self.labelLightBottom.setObjectName("labelLightBottom")
self.verticalLayout_2.addWidget(self.labelLightBottom)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.stackDetailTable = QtWidgets.QTableWidget(self.designTabBottomFrame)
self.stackDetailTable.setAutoFillBackground(True)
self.stackDetailTable.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.stackDetailTable.setObjectName("stackDetailTable")
self.stackDetailTable.setColumnCount(1)
self.stackDetailTable.setRowCount(7)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.stackDetailTable.setHorizontalHeaderItem(0, item)
self.stackDetailTable.horizontalHeader().setVisible(False)
self.stackDetailTable.horizontalHeader().setDefaultSectionSize(100)
self.stackDetailTable.horizontalHeader().setStretchLastSection(True)
self.stackDetailTable.verticalHeader().setCascadingSectionResizes(True)
self.stackDetailTable.verticalHeader().setDefaultSectionSize(27)
self.stackDetailTable.verticalHeader().setMinimumSectionSize(20)
self.stackDetailTable.verticalHeader().setStretchLastSection(True)
self.horizontalLayout.addWidget(self.stackDetailTable)
self.verticalLayout_7.addWidget(self.designTabBottomFrame)
self.horizontalLayout_4.addLayout(self.verticalLayout_7)
self.tabWidget.addTab(self.tabBuild, "")
self.tabFit = QtWidgets.QWidget()
self.tabFit.setObjectName("tabFit")
self.gridLayout = QtWidgets.QGridLayout(self.tabFit)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.plotFitFrame = GraphFrame(self.tabFit)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.plotFitFrame.sizePolicy().hasHeightForWidth())
self.plotFitFrame.setSizePolicy(sizePolicy)
self.plotFitFrame.setMinimumSize(QtCore.QSize(400, 300))
self.plotFitFrame.setCursor(QtGui.QCursor(QtCore.Qt.CrossCursor))
self.plotFitFrame.setAutoFillBackground(True)
self.plotFitFrame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.plotFitFrame.setFrameShadow(QtWidgets.QFrame.Plain)
self.plotFitFrame.setLineWidth(2)
self.plotFitFrame.setMidLineWidth(3)
self.plotFitFrame.setObjectName("plotFitFrame")
self.gridLayout.addWidget(self.plotFitFrame, 0, 1, 1, 1)
#self.gridLayout.addWidget(self.plotFitFrame)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.fitTableWidget = QtWidgets.QTableWidget(self.tabFit)
self.fitTableWidget.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.fitTableWidget.setObjectName("fitTableWidget")
self.fitTableWidget.setColumnCount(7)
self.fitTableWidget.setRowCount(14)
self.fitTableWidget.setMaximumSize(QtCore.QSize(16777215, 360))
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(12, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setVerticalHeaderItem(13, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.fitTableWidget.setHorizontalHeaderItem(6, item)
self.fitTableWidget.horizontalHeader().setVisible(False)
self.fitTableWidget.horizontalHeader().setDefaultSectionSize(100)
self.fitTableWidget.horizontalHeader().setMinimumSectionSize(15)
self.fitTableWidget.verticalHeader().setDefaultSectionSize(22)
self.fitTableWidget.verticalHeader().setStretchLastSection(True)
self.horizontalLayout_2.addWidget(self.fitTableWidget)
self.colorFitTableWidget = QtWidgets.QTableWidget(self.tabFit)
self.colorFitTableWidget.setMinimumSize(QtCore.QSize(200, 0))
self.colorFitTableWidget.setMaximumSize(QtCore.QSize(200, 360))
self.colorFitTableWidget.setObjectName("tableWidget")
self.colorFitTableWidget.setColumnCount(2)
self.colorFitTableWidget.setRowCount(6)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.colorFitTableWidget.setHorizontalHeaderItem(1, item)
self.colorFitTableWidget.horizontalHeader().setVisible(True)
self.colorFitTableWidget.verticalHeader().setVisible(False)
self.horizontalLayout_2.addWidget(self.colorFitTableWidget)
self.gridLayout.addLayout(self.horizontalLayout_2, 1, 0, 1, 2)
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.deviceComboBox = QtWidgets.QComboBox(self.tabFit)
self.deviceComboBox.setObjectName("deviceComboBox")
self.verticalLayout_5.addWidget(self.deviceComboBox)
self.onlinePB = QtWidgets.QPushButton(self.tabFit)
self.onlinePB.setObjectName("onlinePB")
self.verticalLayout_5.addWidget(self.onlinePB)
self.selectParamPB = QtWidgets.QPushButton(self.tabFit)
self.selectParamPB.setObjectName("selectParamPB")
self.verticalLayout_5.addWidget(self.selectParamPB)
self.fitPB = QtWidgets.QPushButton(self.tabFit)
self.fitPB.setObjectName("fitPB")
self.verticalLayout_5.addWidget(self.fitPB)
self.restoreFitPB = QtWidgets.QPushButton(self.tabFit)
self.restoreFitPB.setToolTip("")
self.restoreFitPB.setObjectName("restoreFitPB")
self.verticalLayout_5.addWidget(self.restoreFitPB)
self.reportFitPB = QtWidgets.QPushButton(self.tabFit)
self.reportFitPB.setObjectName("reportFitPB")
self.verticalLayout_5.addWidget(self.reportFitPB)
self.gridLayout.addLayout(self.verticalLayout_5, 0, 0, 1, 1)
self.tabWidget.addTab(self.tabFit, "")
self.verticalLayout.addWidget(self.tabWidget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1230, 22))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuEdit = QtWidgets.QMenu(self.menubar)
self.menuEdit.setObjectName("menuEdit")
self.menuSettings = QtWidgets.QMenu(self.menubar)
self.menuSettings.setObjectName("menuSettings")
self.menuHelp = QtWidgets.QMenu(self.menubar)
self.menuHelp.setObjectName("menuHelp")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionOpen_Stack = QtWidgets.QAction(MainWindow)
self.actionOpen_Stack.setObjectName("actionOpen_Stack")
self.actionOpen_Material = QtWidgets.QAction(MainWindow)
self.actionOpen_Material.setObjectName("actionOpen_Material")
self.actionSave_Stack = QtWidgets.QAction(MainWindow)
self.actionSave_Stack.setObjectName("actionSave_Stack")
self.actionConnections = QtWidgets.QAction(MainWindow)
self.actionConnections.setObjectName("actionConnections")
self.actionFit = QtWidgets.QAction(MainWindow)
self.actionFit.setObjectName("actionFit")
self.actionDeleteStack = QtWidgets.QAction(MainWindow)
self.actionDeleteStack.setObjectName("actionDeleteStack")
self.actionReload_DBs = QtWidgets.QAction(MainWindow)
self.actionReload_DBs.setObjectName("actionReload_DBs")
self.actionInstructions = QtWidgets.QAction(MainWindow)
self.actionInstructions.setObjectName("actionInstructions")
self.actionAbout = QtWidgets.QAction(MainWindow)
self.actionAbout.setObjectName("actionAbout")
self.actionGeneral = QtWidgets.QAction(MainWindow)
self.actionGeneral.setObjectName("actionGeneral")
self.actionFit_Turn_all_On_Off = QtWidgets.QAction(MainWindow)
self.actionFit_Turn_all_On_Off.setObjectName("actionFit_Turn_all_On_Off")
self.actionColor = QtWidgets.QAction(MainWindow)
self.actionColor.setObjectName("actionColor")
self.menuFile.addAction(self.actionOpen_Stack)
self.menuFile.addAction(self.actionOpen_Material)
self.menuFile.addAction(self.actionSave_Stack)
self.menuEdit.addAction(self.actionDeleteStack)
self.menuEdit.addAction(self.actionReload_DBs)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionFit_Turn_all_On_Off)
self.menuSettings.addAction(self.actionGeneral)
self.menuSettings.addAction(self.actionColor)
self.menuSettings.addAction(self.actionFit)
self.menuSettings.addAction(self.actionConnections)
self.menuHelp.addAction(self.actionInstructions)
self.menuHelp.addAction(self.actionAbout)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuEdit.menuAction())
self.menubar.addAction(self.menuSettings.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
self.materialTabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
self.plotFrame.setContentsMargins(0,0,0,0)
self.plotFitFrame.setContentsMargins(0,0,0,0)
self.menubar.setNativeMenuBar(False) #FOR MAC
#FILE
self.actionSave_Stack.triggered.connect(self.saveStack)
self.actionOpen_Stack.triggered.connect(self.addStacksToDB)
self.actionOpen_Material.triggered.connect(self.addMaterialToDB)
#EDIT
self.actionDeleteStack.triggered.connect(self.removeCompleteStack)
self.actionReload_DBs.triggered.connect(self.reload_DBs)
self.actionFit_Turn_all_On_Off.triggered.connect(self.fit_Turn_all_On_Off)
#SETTINGS
self.actionGeneral.triggered.connect(self.openGeneralSettingsWindow)
self.actionFit.triggered.connect(self.openFitSettingsWindow)
self.actionColor.triggered.connect(self.openColorSettingsWindow)
self.actionConnections.triggered.connect(self.openConnectWindow)
self.actionAbout.triggered.connect(self.openAboutWindow)
##d.processingTime()#checking
def openGeneralSettingsWindow(self):
##print('here we go again=====>openGeneralSettingsWindow')#checking
###d.datetimeConverter()#checking
self.generalDialog = QtWidgets.QDialog()
self.uiGeneralDialog = Ui_GeneralSettingsDialog()
self.uiGeneralDialog.setupUi(self.generalDialog, self)
self.generalDialog.show()
###d.processingTime()#checking
def openColorSettingsWindow(self):
##print('here we go again=====>openColorSettingsWindow')#checking
###d.datetimeConverter()#checking
self.colorDialog = QtWidgets.QDialog()
self.uiColorDialog = Ui_colorDialog()
self.uiColorDialog.setupUi(self.colorDialog, self)
self.colorDialog.show()
###d.processingTime()#checking
def openFitSettingsWindow(self):
##print('here we go again=====>openFitSettingsWindow')#checking
###d.datetimeConverter()#checking
self.fitDialog = QtWidgets.QDialog()
self.uiFitDialog = Ui_fitSettingsDialog()
self.uiFitDialog.setupUi(self.fitDialog, self)
self.fitDialog.show()
###d.processingTime()#checking
def openConnectWindow(self):
##print('here we go again=====>openConnectWindow')#checking
###d.datetimeConverter()#checking
self.connectDialog = QtWidgets.QDialog()
self.uiConnectDialog = Ui_connectDialog()
self.uiConnectDialog.setupUi(self.connectDialog, self)
self.connectDialog.show()
###d.processingTime()#checking
def openAboutWindow(self):
##print('here we go again=====>openAboutWindow')#checking
###d.datetimeConverter()#checking
self.aboutDialog = QtWidgets.QDialog()
self.uiAboutDialog = Ui_aboutDialog()
self.uiAboutDialog.setupUi(self.aboutDialog)
self.aboutDialog.show()
###d.processingTime()#checking
def retranslateUi(self, MainWindow):
##print('here we go again=====>retranslateUi')#checking
###d.datetimeConverter()#checking
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "TOMware"))
self.saveStackPB.setText(_translate("MainWindow", "Save Stack"))
self.deleteLayerPB.setText(_translate("MainWindow", "Delete Layer"))
self.loadStackPB.setText(_translate("MainWindow", "Load Stack"))
self.reverseStackPB.setText(_translate("MainWindow", "Reverse Stack"))
self.addLayerPB.setText(_translate("MainWindow", "Add Material"))
self.materialTabWidget.setTabText(self.materialTabWidget.indexOf(self.tabStack), _translate("MainWindow", "Stacks"))
self.materialTabWidget.setTabText(self.materialTabWidget.indexOf(self.tabMaterial), _translate("MainWindow", "Materials"))
item = self.materialDetailTable.verticalHeaderItem(0)
item.setText(_translate("MainWindow", "Name"))
item = self.materialDetailTable.verticalHeaderItem(1)
item.setText(_translate("MainWindow", "Details"))
item = self.materialDetailTable.verticalHeaderItem(2)
item.setText(_translate("MainWindow", "Material"))
item = self.materialDetailTable.verticalHeaderItem(3)
item.setText(_translate("MainWindow", "Thickness"))
item = self.materialDetailTable.verticalHeaderItem(4)
item.setText(_translate("MainWindow", "Model"))
item = self.materialDetailTable.verticalHeaderItem(5)
item.setText(_translate("MainWindow", "Comments"))
item = self.materialDetailTable.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Value"))
__sortingEnabled = self.materialDetailTable.isSortingEnabled()
self.materialDetailTable.setSortingEnabled(False)
item = self.materialDetailTable.item(0, 0)
item.setText(_translate("MainWindow", "Ag-SiO2-TiO2-HB3"))
item = self.materialDetailTable.item(1, 0)
item.setText(_translate("MainWindow", "5"))
item = self.materialDetailTable.item(2, 0)
item.setText(_translate("MainWindow", "Ag (version 112017)"))
item = self.materialDetailTable.item(3, 0)
item.setText(_translate("MainWindow", "20 nm"))
item = self.materialDetailTable.item(4, 0)
item.setText(_translate("MainWindow", "Drude"))
item = self.materialDetailTable.item(5, 0)
item.setText(_translate("MainWindow", "N: 1.3, K: -1"))
self.materialDetailTable.setSortingEnabled(__sortingEnabled)
item = self.colorTableWidget.verticalHeaderItem(0)
item.setText(_translate("MainWindow", "Transmission"))
item = self.colorTableWidget.verticalHeaderItem(1)
item.setText(_translate("MainWindow", "Reflection"))
item = self.colorTableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "New Column"))
item = self.colorTableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "New Column"))
item = self.colorTableWidget.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "New Column"))
item = self.colorTableWidget.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "New Column"))
item = self.colorTableWidget.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "New Column"))
item = self.colorTableWidget.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "New Column"))
self.labelLightTop.setText(_translate("MainWindow", "Light enters here at 2˚"))
self.labelLightBottom.setText(_translate("MainWindow", "Light enters here at 2˚"))
item = self.stackDetailTable.verticalHeaderItem(0)
item.setText(_translate("MainWindow", "Name"))
item = self.stackDetailTable.verticalHeaderItem(1)
item.setText(_translate("MainWindow", "Stack"))
item = self.stackDetailTable.verticalHeaderItem(2)
item.setText(_translate("MainWindow", "Stack Count"))
item = self.stackDetailTable.verticalHeaderItem(3)
item.setText(_translate("MainWindow", "Layer #"))
item = self.stackDetailTable.verticalHeaderItem(4)
item.setText(_translate("MainWindow", "Material"))
item = self.stackDetailTable.verticalHeaderItem(5)
item.setText(_translate("MainWindow", "Thickness"))
item = self.stackDetailTable.verticalHeaderItem(6)
item.setText(_translate("MainWindow", "Index @ 550 nm"))
item = self.stackDetailTable.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Value"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabBuild), _translate("MainWindow", "Build"))
item = self.fitTableWidget.verticalHeaderItem(0)
item.setText(_translate("MainWindow", "Material"))
item = self.fitTableWidget.verticalHeaderItem(1)
item.setText(_translate("MainWindow", "Layer #"))
item = self.fitTableWidget.verticalHeaderItem(2)
item.setText(_translate("MainWindow", "Status"))
item = self.fitTableWidget.verticalHeaderItem(3)
item.setText(_translate("MainWindow", "Target Height"))
item = self.fitTableWidget.verticalHeaderItem(4)
item.setText(_translate("MainWindow", "Actual Height"))
item = self.fitTableWidget.verticalHeaderItem(5)
item.setText(_translate("MainWindow", "de"))
item = self.fitTableWidget.verticalHeaderItem(6)
item.setText(_translate("MainWindow", "UV oscil. freq. ω0^2"))
item = self.fitTableWidget.verticalHeaderItem(7)
item.setText(_translate("MainWindow", "UV oscil. strength ωp^2"))
item = self.fitTableWidget.verticalHeaderItem(8)
item.setText(_translate("MainWindow", "UV line width γ"))
item = self.fitTableWidget.verticalHeaderItem(9)
item.setText(_translate("MainWindow", "Cond. oscil. freq. ω0^2"))
item = self.fitTableWidget.verticalHeaderItem(10)
item.setText(_translate("MainWindow", "Cond. oscil. strength ωp^2"))
item = self.fitTableWidget.verticalHeaderItem(11)
item.setText(_translate("MainWindow", "Cond. line width γ"))
item = self.fitTableWidget.verticalHeaderItem(12)
item.setText(_translate("MainWindow", "N,K @ 550 nm"))
item = self.fitTableWidget.verticalHeaderItem(13)
item.setText(_translate("MainWindow", "Edit"))
item = self.fitTableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "New Column"))
item = self.fitTableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "New Column"))
item = self.fitTableWidget.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "New Column"))
item = self.fitTableWidget.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "New Column"))
item = self.fitTableWidget.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "New Column"))
item = self.fitTableWidget.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "New Column"))
item = self.fitTableWidget.horizontalHeaderItem(6)
item.setText(_translate("MainWindow", "New Column"))
item = self.colorFitTableWidget.verticalHeaderItem(0)
item.setText(_translate("MainWindow", "New Row"))
item = self.colorFitTableWidget.verticalHeaderItem(1)
item.setText(_translate("MainWindow", "New Row"))
item = self.colorFitTableWidget.verticalHeaderItem(2)
item.setText(_translate("MainWindow", "New Row"))
item = self.colorFitTableWidget.verticalHeaderItem(3)
item.setText(_translate("MainWindow", "New Row"))
item = self.colorFitTableWidget.verticalHeaderItem(4)
item.setText(_translate("MainWindow", "New Row"))
item = self.colorFitTableWidget.verticalHeaderItem(5)
item.setText(_translate("MainWindow", "New Row"))
item = self.colorFitTableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Tranmission"))
item = self.colorFitTableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Reflection"))
self.onlinePB.setText(_translate("MainWindow", "Go Online"))
self.selectParamPB.setText(_translate("MainWindow", "Select Fit Param"))
self.fitPB.setText(_translate("MainWindow", "Fit"))
self.restoreFitPB.setText(_translate("MainWindow", "Restore Fit"))
self.reportFitPB.setText(_translate("MainWindow", "Report of Run"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabFit), _translate("MainWindow", "Fit"))
self.menuFile.setTitle(_translate("MainWindow", "File"))
self.menuEdit.setTitle(_translate("MainWindow", "Edit"))
self.menuSettings.setTitle(_translate("MainWindow", "Settings"))
self.menuHelp.setTitle(_translate("MainWindow", "Help"))
self.actionOpen_Stack.setText(_translate("MainWindow", "Add Stack File"))
self.actionOpen_Material.setText(_translate("MainWindow", "Add Material File"))
self.actionSave_Stack.setText(_translate("MainWindow", "Save Stack"))
self.actionConnections.setText(_translate("MainWindow", "Connections"))
self.actionFit.setText(_translate("MainWindow", "Fit"))
self.actionDeleteStack.setText(_translate("MainWindow", "Delete Entire Stack"))
self.actionDeleteStack.setIconText(_translate("MainWindow", "Delete Entire Stack"))
self.actionDeleteStack.setToolTip(_translate("MainWindow", "Delete Entire Stack. <Shift + Del> over visual stack performs same action."))
self.actionReload_DBs.setText(_translate("MainWindow", "Reload DBs"))
self.actionInstructions.setText(_translate("MainWindow", "Instructions"))
self.actionAbout.setText(_translate("MainWindow", "About..."))
self.actionGeneral.setText(_translate("MainWindow", "General"))
self.actionFit_Turn_all_On_Off.setText(_translate("MainWindow", "Fit - Turn all \'On\' / \'Off\'"))
self.actionFit_Turn_all_On_Off.setToolTip(_translate("MainWindow", "Turns status all layers to \'On\' or \'Off\'"))
self.actionColor.setText(_translate("MainWindow", "Color"))
#***************************************
#CHANGE QtWidgets.QFrame to GraphFrame
#CHANGE self.plotFitFrame = GraphFrame(self.tabFit)
#CHANGE self.stackWidget = DragDropTableView(self.designTabBottomFrame, self) FROM self.stackWidget = QtWidgets.QTableView(self.frame)
#REMOVE lableLightTop and Bottom labels from original QT Builder file.
#ADD self.plotFrame.setContentsMargins(0,0,0,0)
#ADD self.menubar.setNativeMenuBar(False) #FOR MAC
#ADD stack to def setupUi(self, MainWindow, stack):
#ADD self.stack = stack in SetupUi()
#ADD self.MainWindow = MainWindow in SetupUi()
MainWindow.setWindowIcon(QtGui.QIcon('icon.ico'))
self.labelLightTop.setText(_translate("MainWindow", "Light enters here at {}˚".format(self.settings.incident_angle)))
self.labelLightBottom.setText(_translate("MainWindow", "Light enters here at {}˚".format(self.settings.incident_angle)))
self.activeLayer = None
#Design tab
self.labelLightTop.setVisible(not self.stack.REVERSE_STACK)
self.labelLightBottom.setVisible(self.stack.REVERSE_STACK)
self.loadStackPB.clicked.connect(self.loadStack)
self.saveStackPB.clicked.connect(self.saveStack)
self.addLayerPB.clicked.connect(self.addMaterialToStack)
self.deleteLayerPB.clicked.connect(self.removeMaterialFromStack)
self.reverseStackPB.clicked.connect(self.reverseStack)
self.stackDetailTable.itemChanged.connect(self.changeInStackDetails)
self.materialListWidget.currentRowChanged.connect(self.updateMaterialDBDetailTable)
self.stackListWidget.currentRowChanged.connect(self.updateStackDBDetailTable)
self.materialTabWidget.currentChanged.connect(self.enableStackMaterialPB)
self.tabWidget.currentChanged.connect(self.updateScreens)
header = self.colorTableWidget.horizontalHeader()
for i in range(self.colorTableWidget.columnCount()):
header.setSectionResizeMode(i, QtWidgets.QHeaderView.Stretch)
header = self.stackDetailTable.verticalHeader()
for i in range(self.stackDetailTable.rowCount()):
header.setSectionResizeMode(i, QtWidgets.QHeaderView.Stretch)
header = self.materialDetailTable.verticalHeader()
for i in range(self.materialDetailTable.rowCount()):
header.setSectionResizeMode(i, QtWidgets.QHeaderView.Stretch)
self.fitTableWidget.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
#Fit tab
self.fitTab = FitTab(self)
self.restoreFitPB.clicked.connect(self.fitTab.restoreFit)
self.onlinePB.clicked.connect(self.checkOnlineStatus)
self.parameterMode = False
self.selectParamPB.clicked.connect(self.fitTab.selectFitParametersMode)
self.fitPB.clicked.connect(self.initiateFit)
for device in self.settings.device_list:
self.deviceComboBox.addItem(device)
self.deviceComboBox.setCurrentIndex(0)
self.deviceComboBox.activated[str].connect(self.deviceSelect)
self.enableStackMaterialPB(0)
#self.buttonGroup = QtWidgets.QButtonGroup()
#self.buttonGroup.buttonClicked[int].connect(self.lockEditModeForFit)
self.updateDesignGraph(self.stack)
###d.processingTime()#checking
def deviceSelect(self, txt):
self.settings.device_select = txt
def initiateFit(self):
##print('here we go again=====>openGeneralSettingsWindow')#checking
###d.datetimeConverter()#checking
if any(mat.fitStatus == True for mat in self.stack.material):
if any(any(mat.fit_param.values()) and mat.fitStatus for mat in self.stack.material):
if any(mat.editMode for mat in self.stack.material):
self.fitTableWidget.itemChanged.disconnect(self.fitTab.storeFitParameters)
self.stack = fit(self.stack, self.settings)
self.fitTab.loadFitParameters(self.stack)
self.fitTab.updateFitGraph(self.stack)
self.fitTab.updateFitColorDataToTable(self.stack)
if any(mat.editMode for mat in self.stack.material):
self.fitTableWidget.itemChanged.connect(self.fitTab.storeFitParameters)
else:
self.raiseWarningMessage('No fit parameters selected.', 'Please select 1 to 7 fit parameters in an enabled layer.')
else:
self.raiseWarningMessage('No layer is enabled for fit.', 'Please enable at least one layer for fit.')
###d.processingTime()#checking
def updateScreens(self, e):
##print('here we go again=====>updateScreens')#checking
###d.datetimeConverter()#checking
if e == 1: #when main tab is changed to 'Fit'
self.fitTab.loadFitTableWidget(self.stack)
self.fitTab.updateFitGraph(self.stack)
self.fitTab.updateFitColorDataToTable(self.stack)
if e == 0:
try:
self.fitTableWidget.itemChanged.disconnect(self.fitTab.storeFitParameters)
except TypeError:
pass
for mat in self.stack.material:
mat.editMode = False
self.updateDesignGraph(self.stack)
self.updateStackDetailTable(0, self.stack)
###d.processingTime()#checking
def updateMaterialDBDetailTable(self):
##print('here we go again=====>updateMaterialDBDetailTable')#checking
###d.datetimeConverter()#checking
'''Function to update detail table when a material from DB is selected'''
def writeNonEditableInfo(row, col, info):
if not isinstance(info, str):
info = '-'
item = QtWidgets.QTableWidgetItem(info)
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
self.materialDetailTable.setItem(row, col, item)
row = self.materialListWidget.currentRow()
mat = self.material_db[row]
self.materialDetailTable.verticalHeaderItem(0).setText('Name')
if mat.version == '':
version = '-'
else:
version = mat.version
writeNonEditableInfo(0,0, '{} (version: {}, date: {})'.format(mat.name, version, mat.date))
self.materialDetailTable.verticalHeaderItem(1).setText('Thickness (std.)')
thickness_text = getThicknessAndUnit(mat.standard_thickness)
writeNonEditableInfo(1,0, thickness_text)
self.materialDetailTable.verticalHeaderItem(2).setText('Model')
item = QtWidgets.QTableWidgetItem('{}'.format(mat.model))
if mat.model == 'drude':
item.setToolTip('''Dielectric constant de: {:5.3f}\n\nUV band:\nOscil. freq ω0^2: {:6.2f}\nOscil. strength ωp^2: {:6.2f}\nLinewidth gamma: {:6.4f}\n\nConduction band:\nOscil. freq ω0^2: {:6.2f}\nOscil. strength ωp^2: {:6.2f}\nLinewidth gamma: {:6.4f}'''.format(*mat.getDrudeParamsForPrint()))
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
self.materialDetailTable.setItem(2, 0, item)
self.materialDetailTable.verticalHeaderItem(3).setText('Source')
writeNonEditableInfo(3,0, mat.source)
try:
n = float(mat.get_NKspline_value('N', 550))
k = float(mat.get_NKspline_value('K', 550))
infoNK = 'n: {:5.3f}, k: {:6.3f}'.format(n,k)
except ValueError:
infoNK = 'n: -, k: -'
self.materialDetailTable.verticalHeaderItem(4).setText('Index @ 550 nm')
writeNonEditableInfo(4,0, infoNK)
self.materialDetailTable.verticalHeaderItem(5).setText('Comments')
self.materialDetailTable.setItem(5, 0, QtWidgets.QTableWidgetItem('{}'.format(mat.comment)))
columnCount = self.materialDetailTable.columnCount()
header = self.materialDetailTable.verticalHeader()
for i in range(columnCount):
header.setSectionResizeMode(i, QtWidgets.QHeaderView.Stretch)
###d.processingTime()#checking
def updateStackDBDetailTable(self):
##print('here we go again=====>updateStackDBDetailTable')#checking
###d.datetimeConverter()#checking
'''Function to update detail table when a stack from DB is selected'''
def writeNonEditableInfo(row, col, info):
if not isinstance(info, str):
info = '-'
item = QtWidgets.QTableWidgetItem(info)
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
self.materialDetailTable.setItem(row, col, item)
row = self.stackListWidget.currentRow()
stack = self.stack_db[row]
self.materialDetailTable.verticalHeaderItem(0).setText('Name')
writeNonEditableInfo(0,0, stack.name)
self.materialDetailTable.verticalHeaderItem(1).setText('Layers')
writeNonEditableInfo(1,0,'-'.join(stack.layers))
self.materialDetailTable.verticalHeaderItem(2).setText('Thickness')
t_list = '-'.join(str(x) for x in stack.thickness)
writeNonEditableInfo(2,0,t_list)
self.materialDetailTable.verticalHeaderItem(3).setText('Source')
writeNonEditableInfo(3,0,stack.source)
self.materialDetailTable.verticalHeaderItem(4).setText('Date')
writeNonEditableInfo(4,0,stack.date)
self.materialDetailTable.verticalHeaderItem(5).setText('Comments')
writeNonEditableInfo(5,0,stack.comment)
columnCount = self.materialDetailTable.columnCount()
header = self.materialDetailTable.verticalHeader()
for i in range(columnCount):
header.setSectionResizeMode(i, QtWidgets.QHeaderView.Stretch)
##d.processingTime()#checking
def enableStackMaterialPB(self, e):
#print('here we go again=====> enableStackMaterialPB')#checking
###d.datetimeConverter()#checking
if e == 0:
self.loadStackPB.setEnabled(True)
self.addLayerPB.setEnabled(False)
elif e == 1:
self.loadStackPB.setEnabled(False)
self.addLayerPB.setEnabled(True)
##d.processingTime()#checking
def changeInStackDetails(self, e):
#print('here we go again=====>changeInStackDetails')#checking
##d.datetimeConverter()#checking
row = self.stackDetailTable.currentItem().row()
t = None
text = self.stackDetailTable.currentItem().text()
#Change name of stack
if row == 0:
self.stack.name = text
#Change layer number
if row == 3:
data = text.split()
if is_number(text) and self.stack.layer_count() > (int(text) - 1):
self.updateStackDetailTable(int(text) - 1, self.stack)
#enable function to write layer to other location in stack by 'X to Y'
elif len(data) == 3 and data[1] == 'to' and is_number(data[0]) and is_number(data[2]):
self.rearrangeStackTable(int(data[0])-1, int(data[2])-1, self.stack)
else: #restore original values in table.
self.updateStackDetailTable(self.activeLayer, self.stack)
#Change thickness
elif row == 5:
if self.stack.material[self.activeLayer].type == 'lbl':
text += 'l'
t = getThicknessFromString(text)
if not t == None:
# update stack
self.stack.thickness[self.activeLayer] = t
#REMOVE AFTER TESTING COMPLETE
self.stack.material[self.activeLayer].actual_thickness = t
#update stackDetailTable
self.updateStackDetailTable(self.activeLayer, self.stack)
#Update graph and stack
self.populateStackWidget(self.stack)
self.updateDesignGraph(self.stack)
##d.processingTime()#checking
def reverseStack(self):
#print('here we go again=====>reverseStack')#checking
##d.datetimeConverter()#checking
if self.stack.REVERSE_STACK:
self.stack.REVERSE_STACK = False
else:
self.stack.REVERSE_STACK = True
self.labelLightTop.setVisible(not self.stack.REVERSE_STACK)
self.labelLightBottom.setVisible(self.stack.REVERSE_STACK)
self.updateDesignGraph(self.stack)
self.updateStackDetailTable(0, self.stack)
##d.processingTime()#checking
def populateStackDBList(self, stackList):
#print('here we go again=====>populateStackDBList')#checking
##d.datetimeConverter()#checking
self.stack_db = stackList
self.stackListWidget.clear()
for i in stackList:
self.stackListWidget.addItem(i.name)
##d.processingTime()#checking
def populateMaterialDBList(self, materialList):
#print('here we go again=====>populateMaterialDBList')#checking
##d.datetimeConverter()#checking
self.material_db = materialList
self.materialListWidget.clear()
for i in materialList:
self.materialListWidget.addItem('{} ({})'.format(i.name, i.model))
##d.processingTime()#checking
def populateStackWidget(self, stack):
#print('here we go again=====> populateStackWidget')#checking
##d.datetimeConverter()#checking
#self.stackWidget.setRowCount(0)
self.stackWidget.model = TableModel()
self.stackWidget.setModel(self.stackWidget.model)
#widgetItem = QtWidgets.QTableWidgetItem(str(stack.layer_count()))
for idx in range(len(stack.layers)):
thickness_text = getThicknessAndUnit(stack.thickness[idx])
model = stack.material[idx].model
item = QtGui.QStandardItem('{} - {} ({})'.format(stack.layers[idx], thickness_text, model))
item.setEditable(False)
item.setDropEnabled(False)
item.setBackground(stack.material[idx].color)
item.setTextAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
if not isinstance(stack.thickness[idx], str):
height = stack.thickness[idx]/3 if stack.thickness[idx] < 150 else 50
else:
height = 10
item.setSizeHint(QtCore.QSize(1,height))
self.stackWidget.model.appendRow([item])
##d.processingTime()#checking
def addMaterialToStack(self):
#print('here we go again=====>addMaterialToStack')#checking
##d.datetimeConverter()#checking
currentRow = self.materialListWidget.currentRow()
if currentRow > -1:
selectedMaterial = self.material_db[currentRow]
self.stack.addMaterialToStack(selectedMaterial)
self.populateStackWidget(self.stack)
self.updateDesignGraph(self.stack)
self.updateStackDetailTable(0, self.stack)
##d.processingTime()#checking
def removeMaterialFromStack(self):
#print('here we go again=====>removeMaterialFromStack')#checking
##d.datetimeConverter()#checking
idx = self.activeLayer
if not idx == None and idx > -1:
self.stack.removeMaterialFromStack(idx)
self.populateStackWidget(self.stack)
idx = idx - 1
self.populateStackWidget(self.stack)
self.updateDesignGraph(self.stack)
self.updateStackDetailTable(idx, self.stack)
##d.processingTime()#checking
def removeCompleteStack(self):
self.stack = Stack()
self.populateStackWidget(self.stack)
self.updateDesignGraph(self.stack)
self.updateStackDetailTable(0, self.stack)
self.updateScreens(1) #Quick way to update Fit Screen.
def updateStackDetailTable(self, layer_idx, stack = None):
#print('here we go again=====>updateStackDetailTable')#checking
##d.datetimeConverter()#checking
def writeNonEditableInfo(info, row):
item = QtWidgets.QTableWidgetItem(info)
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
item.setBackground(QtGui.QColor(211,211,211,70))
self.stackDetailTable.setItem(row, 0, item)
#temporarily remove connection to stackDetailTable to prevent looping
self.stackDetailTable.itemChanged.disconnect(self.changeInStackDetails)
if stack == None:
stack = self.stack
self.activeLayer = layer_idx
#Name
self.stackDetailTable.setItem(0, 0, QtWidgets.QTableWidgetItem(stack.name))
#Layer info
info = '{}'.format('-'.join(stack.layers))
writeNonEditableInfo(info, 1)
#Set Layer count
info = '' if stack.isEmpty() else str(stack.layer_count())
writeNonEditableInfo(info, 2)
#Set selected layer
info = '' if stack.isEmpty() else "{}".format(layer_idx + 1)
self.stackDetailTable.setItem(3, 0, QtWidgets.QTableWidgetItem(info))
#Set material
if not stack.isEmpty():
mat = stack.material[layer_idx]
if mat.version == '':
version = '-'
else:
version = mat.version
info = "{} ({}, version: {})".format(mat.name, mat.model, version)
try:
n = float(mat.get_NKspline_value('N', 550))
k = float(mat.get_NKspline_value('K', 550))
infoNK = 'n: {:5.3f}, k: {:6.3f}'.format(n,k)
except ValueError:
infoNK = 'n: -, k: -'
else:
info = ''
infoNK = 'n: -, k: -'
writeNonEditableInfo(info, 4)
#Write thickness
if not stack.isEmpty():
info = getThicknessAndUnit(stack.thickness[layer_idx])
else:
info = ''
self.stackDetailTable.setItem(5, 0, QtWidgets.QTableWidgetItem(info))
writeNonEditableInfo(infoNK, 6)
self.stackDetailTable.itemChanged.connect(self.changeInStackDetails)
##d.processingTime()#checking
def rearrangeStackTable(self, dragLocation, dropLocation, stack):
#print('here we go again=====>rearrangeStackTable')#checking
##d.datetimeConverter()#checking
'''Function rearranges stack based upon drag and drop location in widget'''
lastRowIdx = len(stack.layers) - 1
if dropLocation > lastRowIdx:
dropLocation = lastRowIdx
stack.material.insert(dropLocation, stack.material.pop(dragLocation))
stack.thickness.insert(dropLocation, stack.thickness.pop(dragLocation))
stack.layers.insert(dropLocation, stack.layers.pop(dragLocation))
self.updateDesignGraph(self.stack)
self.updateStackDetailTable(0, self.stack)
self.populateStackWidget(self.stack)
##d.processingTime()#checking
#print('here we go again=====>updateDesignGraph')#checking
#d.datetimeConverter()#checking
def updateDesignGraph(self, stack):
'''Function updates TRA plot in Design tab'''
stack.RMSerror = ''
if stack.isEmpty():
stack.fit_wvl = self.settings.standard_wave_list
else:
#Calculate fitted splines
stack.fit_wvl = getWaveList(stack, self.settings.standard_wave_list_mod)
#This parameters determines if curve is calculated based on theoretical thickness or estimated actual thickness
ActualThicknessCurve = False
stack.designT, stack.designR, stack.designA = calculateTRA(stack, 'design', stack.fitting_layer, self.settings.incident_angle, self.settings.incoherence_factor, ActualThicknessCurve, stack.REVERSE_STACK)
stack.setTRAsplines(stack.fit_wvl, type = 'design')
#Calculate original splines
if not len(stack.excelT) == 0:
stack.setTRAsplines(stack.excel_wvl, type = 'original')
stack.RMSerror = calculateRMS(stack.spline_excelT(stack.fit_wvl), stack.spline_excelR(stack.fit_wvl), stack.spline_designT(stack.fit_wvl), stack.spline_designR(stack.fit_wvl))
self.plotFrame.graph_view.plot_designGraph(stack.fit_wvl, stack, self.settings)
self.updateColorDataToTable(stack)
##d.processingTime()#checking
def updateColorDataToTable(self, stack):
#print('here we go again=====>updateColorDataToTable')#checking
###d.datetimeConverter()#checking
'''Function calculates color parameters and writes to table colorTableWidget in Design Tab'''
def writeNonEditableInfo(info, row, col):
item = QtWidgets.QTableWidgetItem(info)
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
#item.setBackground(QtGui.QColor(211,211,211,70))
self.colorTableWidget.setItem(row, col, item)
rowCount = self.colorTableWidget.rowCount()
columnCount = self.colorTableWidget.columnCount()
if not stack.isEmpty():
header = self.colorTableWidget.horizontalHeader()
for i in range(columnCount):
header.setSectionResizeMode(i, QtWidgets.QHeaderView.Stretch)
T_XYZ, T_xy, T_ab, T_rgb, R_XYZ, R_xy, R_ab, R_rgb = calculateColorValues(stack.spline_designT, stack.spline_designR, self.settings)
Tv = T_XYZ[1]
writeNonEditableInfo('v: {:.3f}%'.format(Tv), 0,0)
Tx = T_xy[0]
writeNonEditableInfo('x: {:.3f}'.format(Tx), 0,1)
Ty = T_xy[1]
writeNonEditableInfo('y: {:.3f}'.format(Ty), 0,2)
Ta = T_ab[1]
writeNonEditableInfo('a*: {:.3f}'.format(Ta), 0,3)
Tb = T_ab[2]
writeNonEditableInfo('b*: {:.3f}'.format(Tb), 0,4)
item = QtWidgets.QTableWidgetItem()
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
item.setBackground(QtGui.QColor.fromRgbF(*T_rgb, 0.5))
self.colorTableWidget.setItem(0,5, item)
#ab = colour.XYZ_to_Hunter_Lab(XYZ)
#ab = colour.XYZ_to_Hunter_Rdab(XYZ)
#ab = colour.XYZ_to_K_ab_HunterLab1966(XYZ)
Rv = R_XYZ[1]
writeNonEditableInfo('v: {:.3f}%'.format(Rv), 1,0)
Rx = R_xy[0]
writeNonEditableInfo('x: {:.3f}'.format(Rx), 1,1)
Ry = R_xy[1]
writeNonEditableInfo('y: {:.3f}'.format(Ry), 1,2)
Ra = R_ab[1]
writeNonEditableInfo('a*: {:.3f}'.format(Ra), 1,3)
Rb = R_ab[2]
writeNonEditableInfo('b*: {:.3f}'.format(Rb), 1,4)
item = QtWidgets.QTableWidgetItem()
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
item.setBackground(QtGui.QColor.fromRgbF(*R_rgb, 0.5))
self.colorTableWidget.setItem(1,5, item)
else:
'''Reset table to blank.'''
for i in range(rowCount):
for j in range(columnCount-1):
writeNonEditableInfo('', i, j)
item = QtWidgets.QTableWidgetItem()
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
item.setBackground(QtGui.QColor(255,255,255,0))
self.colorTableWidget.setItem(0,columnCount-1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(item.flags() ^ QtCore.Qt.ItemIsEditable)
item.setBackground(QtGui.QColor(255,255,255,0))
self.colorTableWidget.setItem(1,columnCount-1, item)
#d.processingTime()#checking
def loadStack(self):
#print('here we go again=====>loadStack')#checking
##d.datetimeConverter()#checking
currentRow = self.stackListWidget.currentRow()
if currentRow > -1:
self.stack = copy.deepcopy(self.stack_db[currentRow])
self.stack, error, error_text, title = addMaterialInfoToStack(self.material_db, self.stack, False)
if error:
self.raiseWarningMessage(title, error_text)
else:
self.populateStackWidget(self.stack)
self.updateDesignGraph(self.stack)
self.updateStackDetailTable(0, self.stack)
##d.processingTime()#checking
def raiseWarningMessage(self, title, error_text):
#print('here we go again=====>raiseWarningMessage')#checking
##d.datetimeConverter()#checking
self.choice = QtWidgets.QMessageBox.warning(self.MainWindow, title, error_text, QtWidgets.QMessageBox.Ok)
#DL
##d.processingTime()#checking
def saveStack(self):
#print('here we go again=====>saveStack')#checking
##d.datetimeConverter()#checking
for stack in self.stack_db:
if stack.name.lower() == self.stack.name.lower():
self.raiseWarningMessage('Error stack name', 'Stack name already exists. Please modify the stack name.')
return
if len(stack.name) == 0:
self.raiseWarningMessage('Error stack name', 'No stack name. Please add unique stack name.')
return
try:
self.stack.saveStack()
except ValueError:
self.raiseWarningMessage('Error', 'Stack not saved.')
return
self.reload_DBs()
self.raiseWarningMessage('Stack saved.', 'Stack has been added to DB file. Please reload excel file in case file was open.')
##d.processingTime()#checking
def checkOnlineStatus(self):
#print('here we go again=====> checkOnlineStatus')#checking
##d.datetimeConverter()#checking
if self.stack.online:
self.stack.online = False
self.onlinePB.setStyleSheet('QPushButton {color: black;}')
self.onlinePB.setText('Go Online')
self.deviceComboBox.setEnabled(True)
self.goOffline()
else:
self.stack.online = True
self.onlinePB.setStyleSheet('QPushButton {color: green;}')
self.onlinePB.setText('Online')
self.deviceComboBox.setEnabled(False)
self.goOnline()
def goOnline(self):
#print('here we go again=====>goOnline')#checking
##d.datetimeConverter()#checking
import pyodbc
#ESTABLISH CONNECTION
connect_string = "Driver={" + self.settings.SQL_driver + "};\
Server=" + self.settings.SQL_server + ";\
Database=" + self.settings.SQL_DB + ";\
Trusted_Connection=yes;"
cnxn = pyodbc.connect(connect_string)
devices = [self.settings.device_select + ' Transmission', self.settings.device_select + ' Reflection']
#Get spectra with wave info
query_string_wave = 'SELECT TOP 1 WavelengtsArrays.Wavelengths FROM Spectra \
JOIN WavelengtsArrays ON Spectra.ResultId = WavelengtsArrays.ResultId \
WHERE Spectra.ResultName = \'{}\' ORDER BY Spectra.Id DESC'.format(devices[0])
cursor = cnxn.cursor()
cursor.execute(query_string_wave)
data = cursor.fetchone()
self.stack.measure_wvl = list(map(float, data[0].split(';')))
start_time = datetime.datetime.now()
self.thread = StoppableThread(self.getMeasurementTRA, (devices, cnxn, start_time))
self.thread.start()
#timer.add_operation(self.getMeasurementTRA, self.settings.refresh_time, args=[devices, cnxn, start_time])
##d.processingTime()#checking
def goOffline(self):
#print('here we go again=====>goOffline')#checking
##d.datetimeConverter()#checking
if self.thread.isAlive():
self.thread.stop()
self.thread.join()
del(self.thread)
##d.processingTime()#checking
def getMeasurementTRA(self, devices, cnxn, start_time):
#print('here we go again=====>getMeasurementTRA')#checking
##d.datetimeConverter()#checking
'''Get new data from Zeiss DB and update fit graph'''
stack = self.stack
#end_of_cycle_time = start_time#DL
cursor = cnxn.cursor()
while not self.thread.stopped():
#QUERY FIRST DEVICE FOR TRANSMISSION
query_string_spectrum = 'SELECT TOP 1 Spectra.Id, Spectra.Run_Id, Spectra.[values], Spectra.[Timestamp] \
FROM Spectra WHERE Spectra.ResultName = \'{}\' ORDER BY Spectra.Id DESC' .format(devices[0])
cursor.execute(query_string_spectrum)
data = cursor.fetchone()
#run_id = data[1],DL
spectrum = list(map(float, data[2].split(';')))
stack.measureT = np.array([x / 100.0 for x in spectrum])
#local_time = self.getLocalTime(data[3])
#QUERY SECOND DEVICE FOR REFLECTION
query_string_spectrum = 'SELECT TOP 1 Spectra.Id, Spectra.Run_Id, Spectra.[values], Spectra.[Timestamp] \
FROM Spectra WHERE Spectra.ResultName = \'{}\' ORDER BY Spectra.Id DESC' .format(devices[1])
# find number of rows in table from spec table SELECT * "index?row count?"
cursor.execute(query_string_spectrum)
data = cursor.fetchone()
spectrum = list(map(float, data[2].split(';')))
stack.measureR = np.array([x / 100.0 for x in spectrum])
time_UTC = data[3]
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
time_UTC = time_UTC.replace(tzinfo=from_zone)
local_time = time_UTC.astimezone(to_zone)
stack.measuredTime = local_time
stack.measureA = 1 - np.array(stack.measureT) - np.array(stack.measureR)
#UNUSED AT THE MOMENT
'''#DL
if (datetime.datetime.now() - start_time).total_seconds() > (self.settings.refresh_time + 0.5):
refresh = False
else:
refresh = True'''
self.updateGraphOnline()
'''#DL
seconds = (datetime.datetime.now() - end_of_cycle_time).total_seconds()
if seconds < self.settings.refresh_time:
time.sleep(self.settings.refresh_time - seconds)
print('T_delta: ' + str(seconds) + ' Refresh_delta: ' + str((datetime.datetime.now() - end_of_cycle_time).total_seconds()))
end_of_cycle_time = datetime.datetime.now()
'''
##d.processingTime()#checking
def updateGraphOnline(self):
#print('here we go again=====>updateGraphOnline')#checking
##d.datetimeConverter()#checking
self.stack.setTRAsplines(stack.measure_wvl, type = 'measured')
self.fitTab.updateFitGraph(stack, refresh = True)
self.fitTab.updateFitColorDataToTable(self.stack) #ADD BACK IF SLOWDOWN IS LIMITED
##d.processingTime()#checking
def reload_DBs(self):
#print('here we go again=====>reload_DBs')#checking
##d.datetimeConverter()#checking
'''Reloads Stack and Materials databases from Excel file in case user wants to modify.'''
self.stack_db = Stack.get_stacks(self.settings.defaultFile)
self.material_db = Material.get_materials(self.settings.standard_wave_list_mod, self.settings.defaultFile)
self.populateStackDBList(self.stack_db)
self.populateMaterialDBList(self.material_db)
##d.processingTime()#checking
def fit_Turn_all_On_Off(self):
#print('here we go again=====>fit_Turn_all_On_Off')#checking
##d.datetimeConverter()#checking
'''Function called in menubar turns fitStatus of all layers On (True) or Off (False) in the Fit screen, and updates
fitscreen.'''
if not any(mat.editMode for mat in self.stack.material):
if any(mat.fitStatus == False for mat in self.stack.material):
for mat in self.stack.material:
mat.fitStatus = True
else:
for mat in self.stack.material:
mat.fitStatus = False
self.updateScreens(1) #Updates fit screen.
else:
self.raiseWarningMessage('Exit Edit Mode.', 'Please exit mode to turn all layers on or off.')
##d.processingTime()#checking
def addStacksToDB(self):
#print('here we go again=====>addStacksToDB')#checking
##d.datetimeConverter()#checking
fileName = self.openFileNameDialog('Add Stack File')
if fileName:
try:
new_stacks = Stack.get_stacks(fileName)
self.stack_db.extend(new_stacks)
self.populateStackDBList(self.stack_db)
self.raiseWarningMessage('Stacks added.', 'Stack(s) have been added to DB.')
except:
self.raiseWarningMessage('Error', 'Input file not compatible. Check file format and sheet name.')
##d.processingTime()#checking
def addMaterialToDB(self):
#print('here we go again=====>addMaterialToDB')#checking
##d.datetimeConverter()#checking
fileName = self.openFileNameDialog('Add Material File')
if fileName:
try:
new_mats = Material.get_materials(self.settings.standard_wave_list_mod, fileName)
self.material_db.extend(new_mats)
self.populateMaterialDBList(self.material_db)
self.raiseWarningMessage('Materials added.', 'Material(s) have been added to DB.')
except:
self.raiseWarningMessage('Error', 'Input file not compatible. Check file format and sheet name.')
##d.processingTime()#checking
##d.datetimeConverter()#checking
def openFileNameDialog(self, title):
#print('here we go again=====>openFileNameDialog')#checking
from PyQt5.QtWidgets import QFileDialog
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(None, title, "","All Files (*);;Excel Files (*.xlsx)", options=options)
return fileName
##d.processingTime()#checking
'''
def openGeneralPropertiesButton(self):
#print('here we go again=====>openGeneralPropertiesButton')#checking
##d.datetimeConverter()#checking
window = PropertiesGeneralWindow(self)
window.show()
##d.processingTime()#checking
'''
if __name__ == "__main__":
import sys
from Stack import Stack, Material
app = QtWidgets.QApplication(sys.argv)
stack = Stack()
settings = Settings()
import ctypes
import platform
if platform.system() == 'Windows':
myappid = 'TomWare.1_0' # arbitrary string
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
# Create and display the splash screen
splash_pix = QtGui.QPixmap('icon.png')
splash = QtWidgets.QSplashScreen(splash_pix, QtCore.Qt.WindowStaysOnTopHint)
splash.setMask(splash_pix.mask())
splash.show()
app.processEvents()
stack_db = Stack.get_stacks(settings.defaultFile)
material_db = Material.get_materials(settings.standard_wave_list, settings.defaultFile)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow, stack, settings)
ui.populateStackDBList(stack_db)
ui.populateMaterialDBList(material_db)
MainWindow.show()
splash.finish(MainWindow)
ui.updateDesignGraph(stack)
sys.exit(app.exec_())
|
Battle = int(input("대결 횟수 : "))
count = 0
round = 1
while count < Battle:
Ulist = [] #["Busan","Seoul","Ewha"]
Dlist = [] #[100,400,300] => max(Dlist) => Dlist.index(400)
uni_input = int(input("%s회차 비교 학교 수 : "%round))
while uni_input > 0:
name = input("대학이름 : ")
drink = int(input("술 소비량 : "))
Ulist.append(name)
Dlist.append(drink)
print("%s %s"%(name,drink))
uni_input-=1
Uindex = Dlist.index(max(Dlist)) #Dlist에서 가장 큰 숫자가 몇 번 인덱스에 있는지 저장
print("%s회차 승리 : %s"%(round, Ulist[Uindex]))
round += 1
# del Dlist[:] #list의 내용물만 지워집니다.
# del Ulist[:]
|
from script.base_api.service_identity.versionInfo import *
|
from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from utils.AverageMeter import AverageMeter
from utils.criterion import *
import warnings
warnings.filterwarnings('ignore')
#############################################################################################
######################## Cross-entropy losses and train functions ###########################
def CE_loss(preds, labels, device, args, criterion):
prob = F.softmax(preds, dim=1)
loss_all = criterion(preds, labels)
loss = torch.mean(loss_all)
return prob, loss, loss_all
def mixup_criterion(pred, y_a, y_b, lam, criterion):
prob = F.softmax(pred, dim=1)
return prob, lam * criterion(pred, y_a) + (1 - lam) * criterion(pred, y_b)
def mixup_data(x, y, alpha=1.0, device='cuda'):
'''Returns mixed inputs, pairs of targets, and lambda'''
if alpha > 0:
lam = np.random.beta(alpha, alpha)
else:
lam = 1
batch_size = x.size()[0]
if device=='cuda':
index = torch.randperm(batch_size).cuda()
else:
index = torch.randperm(batch_size)
mixed_x = lam * x + (1 - lam) * x[index, :]
y_a, y_b = y, y[index]
return mixed_x, y_a, y_b, lam
def ricap_data_original(args, input, targets, train_loader, index, epoch, device):
'''RICAP DA'''
I_x, I_y = input.size()[2:]
w = int(np.round(I_x * np.random.beta(args.alpha, args.alpha)))
h = int(np.round(I_y * np.random.beta(args.alpha, args.alpha)))
w_ = [w, I_x - w, w, I_x - w]
h_ = [h, h, I_y - h, I_y - h]
cropped_images = {}
c_ = {}
W_ = {}
i_ = {}
for k in range(4):
idx = torch.randperm(input.size(0))
x_k = np.random.randint(0, I_x - w_[k] + 1)
y_k = np.random.randint(0, I_y - h_[k] + 1)
cropped_images[k] = input[idx][:, :, x_k:x_k + w_[k], y_k:y_k + h_[k]]
c_[k] = targets[idx]
W_[k] = w_[k] * h_[k] / (I_x * I_y)
i_[k] = index[idx]
patched_images = torch.cat(
(torch.cat((cropped_images[0], cropped_images[1]), 2),
torch.cat((cropped_images[2], cropped_images[3]), 2)), 3)
patched_images = patched_images.to(device)
return patched_images, c_, W_, i_
def ricap_criterion(criterion, pred, c_, W_):
prob = F.softmax(pred, dim=1)
l_1 = criterion(pred, c_[0])
l_2 = criterion(pred, c_[1])
l_3 = criterion(pred, c_[2])
l_4 = criterion(pred, c_[3])
loss_all = W_[0]*l_1 + W_[1]*l_2 + W_[2]*l_3 + W_[3]*l_4
loss = torch.mean(loss_all)
return prob, loss, loss_all, l_1, l_2, l_3, l_4
#############################################################################################
######################## Training function ###########################
def train_CrossEntropy(args, model, device, train_loader, optimizer, epoch, lemniscate = 0, criterion = 0):
train_loss = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
criterion = nn.CrossEntropyLoss(reduction = 'none')
# switch to train mode
model.train()
# ricap variables
l_1 = 0
i_ = 0
c_ = 0
counter = 1
for images, labels, index in train_loader:
if args.augmentation == "ricap":
labels = labels.to(device)
images, c_, W_, i_ = ricap_data_original(args, images, labels, train_loader, index, epoch, device)
elif args.augmentation == "mixup":
images, labels, index = images.to(device), labels.to(device), index.to(device)
images, targets_a, targets_b, lam = mixup_data(images, labels, args.alpha, device)
else:
images, labels, index = images.to(device), labels.to(device), index.to(device)
outputs = model(images)
if args.augmentation == "mixup":
prob, loss_all = mixup_criterion(outputs, targets_a, targets_b, lam, criterion)
loss = torch.mean(loss_all)
elif args.augmentation == 'ricap':
prob, loss, loss_all, l_1, _, _, _ = ricap_criterion(criterion, outputs, c_, W_)
else:
prob, loss, loss_all = CE_loss(outputs, labels, device, args, criterion)
loss.backward()
optimizer.step()
optimizer.zero_grad()
prec1, prec5 = accuracy_v2(outputs, labels, top=[1, 5])
train_loss.update(loss.item(), images.size(0))
top1.update(prec1.item(), images.size(0))
top5.update(prec5.item(), images.size(0))
if not args.method == "SGD":
update_sampling_metrics(args, train_loader, loss_all, prob, index, labels, l_1)
num_samples = len(train_loader.sampler)
if counter % 15 == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}, Accuracy: {:.0f}%, Learning rate: {:.6f}'.format(
epoch, counter * len(images), num_samples, 100. * counter / len(train_loader), loss.item(),
prec1, optimizer.param_groups[0]['lr']))
if args.scheduler_type == "linear":
max_it = int(len(train_loader.dataset.labels)*args.budget/args.batch_size)
optimizer = linearLR_per_it(args, optimizer, (epoch-1)*max_it + counter, max_it)
counter = counter + 1
return train_loss.avg, top5.avg, top1.avg
def update_sampling_metrics(args, train_loader, loss_all, prob, index, labels, l_1):
index = index.cpu()
if args.augmentation == 'ricap':
loss_all = l_1
loss_all = loss_all.cpu().detach().numpy()
labels = labels.cpu()
prob = prob.cpu().detach().numpy()
count = train_loader.dataset.times_seen[index].copy()
if count.max() < 1:
count = np.zeros(len(count))
# Updating probs:
avg_probs = train_loader.dataset.avg_probs[index].copy()
avg_probs[avg_probs == -1] = 0.0
accumulated_probs = count*avg_probs
avg_probs = (accumulated_probs + (1-prob[range(len(labels)), labels]))/(count+1)
train_loader.dataset.avg_probs[index] = avg_probs
times_seen = train_loader.dataset.times_seen
times_seen[index] += 1
times_seen[times_seen == 1+1e-6] = 1
###############################################################################
################################ Testing #####################################
def testing(args, model, device, test_loader):
model.eval()
loss_per_batch = []
acc_val_per_batch =[]
test_loss = 0
correct = 0
with torch.no_grad():
for batch_idx, (data, target, *_) in enumerate(test_loader):
data, target = data.to(device), target.to(device)
output = model(data)
output = F.log_softmax(output, dim=1)
test_loss += F.nll_loss(output, target, reduction='sum').item()
loss_per_batch.append(F.nll_loss(output, target).item())
pred = output.max(1, keepdim=True)[1] # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
acc_val_per_batch.append(100. * correct / ((batch_idx+1)*args.test_batch_size))
test_loss /= len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
loss_per_epoch = [np.average(loss_per_batch)]
acc_val_per_epoch = [np.array(100. * correct / len(test_loader.dataset))]
return (loss_per_epoch, acc_val_per_epoch)
##############################################################################
##################### Selecting data and preparing loaders ###################
def select_samples(args, train_loader, epoch):
'''Gives back the indexes that correspond to the samples to be used for training'''
if args.method == "unif-SGD":
curr_prob = np.ones(len(train_loader.dataset.labels))
elif args.method == "p-SGD":
curr_prob = train_loader.dataset.avg_probs.copy()
# This is for the initial epochs, to force the moded to see all the samples:
if curr_prob.max() == -1:
curr_prob *= -1
max_prob = curr_prob.max()
curr_prob[curr_prob==-1] = max_prob
elif args.method == "c-SGD":
curr_prob = train_loader.dataset.avg_probs.copy()
# This is for the initial epochs, to force the moded to see all the samples:
if curr_prob.max() == -1:
curr_prob *= -1
max_prob = curr_prob.max()
curr_prob[curr_prob==-1] = max_prob
# Use the confusion instaed of the probability:
curr_prob = curr_prob * (1 - curr_prob)
# Random sampling warmup for baselines without budget restrictions
if epoch < args.c_sgd_warmup:
len_curr = len(curr_prob)
curr_prob = np.ones(len_curr)
# Smoothness constant
c = curr_prob.mean()
curr_prob = curr_prob + c
# Probability normalization
y = curr_prob
if y.sum() == 0:
y = y+1e-10
curr_prob = (y)/(y).sum()
# Select the samples to be used:
samples_to_keep = int(len(curr_prob)*args.budget)
try:
curr_samples_idx = np.random.choice(len(curr_prob), (samples_to_keep), p = curr_prob, replace = False)
except:
curr_prob[curr_prob == 0] = 1e-10
curr_samples_idx = np.random.choice(len(curr_prob), (samples_to_keep), p = curr_prob/curr_prob.sum(), replace = False)
return curr_samples_idx
def prepare_loader(args, train_loader, epoch):
'''Prepares the dataset with the samples to be used in the following epochs'''
curr_samples_idx = select_samples(args, train_loader, epoch)
dataset_sampler = torch.utils.data.SubsetRandomSampler(curr_samples_idx)
train_loader.dataset.train_samples_idx = curr_samples_idx
train_loader = torch.utils.data.DataLoader(train_loader.dataset, sampler=dataset_sampler, \
batch_size=args.batch_size, \
num_workers=args.num_workers, \
pin_memory=True, \
drop_last = True)
return train_loader
##############################################################################
################################ Other functions ##############################
def linearLR_per_it(args, optimizer, iteration, max_count):
"""Sets the learning rate"""
lr = np.linspace(args.lr, 1e-6, args.epoch*max_count)
try:
lr = lr[iteration]
except:
lr = 1e-6
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return optimizer
def test_sb(loader, epoch, sb, cnn):
# Testing function when using selective backpropagation
cnn.eval() # Change model to 'eval' mode (BN uses moving mean/var).
correct = 0.
total = 0.
test_loss = 0.
for images, labels in loader:
images = images.cuda()
labels = labels.cuda()
with torch.no_grad():
pred = cnn(images)
loss = nn.CrossEntropyLoss()(pred, labels)
test_loss += loss.item()
pred = torch.max(pred.data, 1)[1]
total += labels.size(0)
correct += (pred == labels).sum().item()
# test_loss /= total
test_loss /= len(loader)
val_acc = correct / total
print('============ EPOCH {} ============'.format(epoch))
print('FPs: {} / {}\nBPs: {} / {}\nTest loss: {:.6f}\nTest acc: {:.3f}'.format(
sb.logger.global_num_forwards,
sb.logger.global_num_skipped_fp + sb.logger.global_num_forwards,
sb.logger.global_num_backpropped,
sb.logger.global_num_skipped + sb.logger.global_num_backpropped,
test_loss,
100.*val_acc))
cnn.train()
return [100. * val_acc], [test_loss]
|
import pygame
import sys
import random
from pygame.locals import *
clock = pygame.time.Clock()
pygame.init()
pygame.font.init()
myfont = pygame.font.SysFont('Comic Sans MS', 14)
WINDOW_SIZE = (896, 560)
pygame.display.set_caption('Platformer')
screen = pygame.display.set_mode(WINDOW_SIZE, 0, 32)
display = pygame.Surface((448, 280))
# 14 тайлов на экране в ширину и 8.75 в высоту
class Player():
def __init__(self, rect):
self.isMovingLeft = False
self.isMovingRight = False
self.rect = rect
self.activity = 'idle'
self.flip = False
self.frame = 0
self.speed_x = 2
self.momentumY = 0
self.momentum_change_rate = 0.4
self.air_timer = 0
self.air_count = 0
self.movement = [0, 0]
self.isGrounded = True
def move(self, tiles):
# сначала двигаемся на фрейм по оси x и смотрим, есть ли столкновения, а потом так же по оси y
self.movement = [0, 0]
if self.isMovingRight:
self.movement[0] += self.speed_x
self.flip = False
elif self.isMovingLeft:
self.movement[0] -= self.speed_x
self.flip = True
self.movement[1] += self.momentumY / 1.2 # использовать делитель для изменения скорости прыжка
self.momentumY += self.momentum_change_rate
if self.momentumY > 7:
self.momentumY = 7
if self.momentum_change_rate == 0:
self.activityChange('fly')
else:
if self.movement[0] > 0:
self.activityChange('run')
elif self.movement[0] < 0:
self.activityChange('run')
elif self.movement[0] == 0:
self.activityChange('idle')
# теперь здесь начинается проверка коллизий
collision_types = {'top': False, 'bottom': False, 'right': False, 'left': False}
self.rect.x += self.movement[0]
hit_list = collision_test(self.rect, tiles)
for tile in hit_list:
if self.movement[0] > 0:
self.rect.right = tile.left
collision_types['right'] = True
elif self.movement[0] < 0:
self.rect.left = tile.right
collision_types['left'] = True
self.rect.y += self.movement[1]
hit_list = collision_test(self.rect, tiles)
for tile in hit_list:
if self.movement[1] > 0:
self.rect.bottom = tile.top
collision_types['bottom'] = True
elif self.movement[1] < 0:
self.rect.top = tile.bottom
collision_types['top'] = True
return collision_types
def activityChange(self, new_activity_value):
if self.activity != new_activity_value:
self.activity = new_activity_value
animation_key = self.activity + '_' + '0'
self.rect.width = animation_frames[animation_key].get_width()
self.rect.height = animation_frames[animation_key].get_height()
self.frame = 0
# tile size 32 x 32
TILE_SIZE = 32
CHUNK_SIZE = 4
# 32 x 4 = 128
# 3.5 по оси x, и 2.188 по оси y
grass = pygame.image.load('tiles/grass_block.png') # 1
ground = pygame.image.load('tiles/ground.png') # 2
air_block_left = pygame.image.load('tiles/air_block_left.png')
air_block_right = pygame.image.load('tiles/air_block_right.png')
um = pygame.image.load('pix_umbrella.png')
tile_index = {1: grass, 2: ground, 3: air_block_left, 4: air_block_right}
true_scroll = [0, 0]
global animation_frames
animation_frames = {}
def load_animation(path, frames_duration):
global animation_frames
animation_name = path.split('/')[-1]
animation_frame_data = []
n = 0
for frame in frames_duration:
animation_frame_id = animation_name + '_' + str(n)
img_loc = path + '/' + animation_frame_id + '.png'
animation_image = pygame.image.load(img_loc)
animation_frames[animation_frame_id] = animation_image.copy()
for i in range(frame):
animation_frame_data.append(animation_frame_id)
n += 1
return animation_frame_data
animation_database = {'idle': load_animation('animation/idle', [70, 70]), 'run': load_animation('animation/run', [10]),
'fly': load_animation('animation/fly', [1])}
def load_map(path):
f = open(path + '.txt', 'r')
data = f.read()
f.close()
data = data.split('\n')
game_map = []
for row in data:
game_map.append(list(row.split(',')))
return game_map
def generate_chunk(x, y):
chunk_data = []
airBlock_x, airBlock_y = -1, -1
if y*CHUNK_SIZE < 4:
random_air = random.randrange(0, 1)
if random_air == 0:
airBlock_x = random.randrange(0, CHUNK_SIZE - 1)
airBlock_y = random.randrange(0, CHUNK_SIZE)
for y_pos in range(CHUNK_SIZE):
for x_pos in range(CHUNK_SIZE):
target_x = x * CHUNK_SIZE + x_pos
target_y = y * CHUNK_SIZE + y_pos
if x_pos == airBlock_x and y_pos == airBlock_y:
tile_type = 3
chunk_data.append([[target_x, target_y], tile_type])
elif x_pos == airBlock_x+1 and y_pos == airBlock_y:
tile_type = 4
chunk_data.append([[target_x, target_y], tile_type])
else:
tile_type = 0
random_number = random.randrange(0,3)
if random_number != 0:
if target_y > 3:
tile_type = 2
elif target_y == 3:
tile_type = 1
if tile_type != 0:
chunk_data.append([[target_x, target_y], tile_type])
return chunk_data
# game_map = load_map('game_map')
game_map = {}
# background_objects = [[0.25, [120, 10, 70, 400]], [0.25, [280, 30, 40, 400]], [0.5, [30, 40, 70, 350]]]
'''
background_hills = []
for i in range(4):
scalar = (i + 1)*0.2
if i < 2:
background_hills.append((scalar, pygame.image.load('background/bg' + str(i+1) + '.png')))
else:
background_hills.append((scalar, pygame.image.load('background/b' + str(i+1) + '.png')))
'''
bg = pygame.image.load('background/b1.png')
bg_tree = pygame.image.load('background/b4.png')
pygame.mixer.music.load('music/Netherplace.mp3')
pygame.mixer.music.set_volume(0.36)
pygame.mixer.music.play(-1)
# функция, которая принимает прямоугольник и список тайлов, а выводит список тайлов, с которыми
# есть столкновение.
def collision_test(rect, tiles):
hit_list = []
for tile in tiles:
if rect.colliderect(tile):
hit_list.append(tile)
return hit_list
record = -100
rect = pygame.Rect(100, 100, animation_frames['idle_0'].get_width(),
animation_frames['idle_0'].get_height())
player = Player(rect)
while True:
display.fill((241, 242, 215)) # заполняем экран цветом
# -расчёт расположения камеры----------------------------------------#
true_scroll[0] += (player.rect.x - true_scroll[0] - 150) / 18
true_scroll[1] += (player.rect.y - true_scroll[1] - 150) / 18
scroll = true_scroll[:]
scroll[0] = round(scroll[0])
scroll[1] = round(scroll[1])
'''
if scroll[0] < 0:
scroll[0] = 0
elif scroll[0] > len(game_map[0])*TILE_SIZE - 448:
scroll[0] = len(game_map[0])*TILE_SIZE - 448
if scroll[1] < 0:
scroll[1] = 0
elif scroll[1] > len(game_map)*TILE_SIZE - 280:
scroll[1] = len(game_map)*TILE_SIZE - 280
'''
# -------------------------------------------------------------------#
# scroll = [1, 1]
'''
pygame.draw.rect(display, 0x666666, pygame.Rect(0, 170, 300, 80))
for ob in background_objects:
ob_rect = pygame.Rect(ob[1][0] - scroll[0]*ob[0], ob[1][1] - scroll[1]*ob[0],
ob[1][2], ob[1][3])
if ob[0] == 0.5:
pygame.draw.rect(display, 0xF2DFB4, ob_rect)
else:
pygame.draw.rect(display, 0x9B634C, ob_rect)
'''
'''
# -рисуем паралакс фон-----------------------------------------------------------#
for ob in background_hills:
display.blit(ob[1], (-scroll[0]*ob[0], -50-scroll[1]*ob[0]))
# -------------------------------------------------------------------------------#
'''
bg_y = len(game_map)*TILE_SIZE - bg.get_height()
bg_tree_y = len(game_map)*TILE_SIZE - bg.get_height()
display.blit(bg, (-scroll[0]*0.1, bg_y-scroll[1]))
display.blit(bg_tree, (-scroll[0]*0.5, bg_tree_y-scroll[1]))
# --рисуем тайлы и если эти тайлы должны иметь способность к столкновению, то добавляем-----#
# --координаты тайла в список в форме pygame.rect
tile_rects = []
for y in range(4):
for x in range(5):
target_x = x - 1 + round(scroll[0]/(TILE_SIZE*CHUNK_SIZE))
target_y = y - 1 + round(scroll[1]/(TILE_SIZE*CHUNK_SIZE))
target_chunk = str(target_x) + ':' + str(target_y)
if target_chunk not in game_map:
game_map[target_chunk] = generate_chunk(target_x, target_y)
#pygame.draw.rect(display, (0, 0, 200), (target_x * CHUNK_SIZE * TILE_SIZE - scroll[0],
# target_y * CHUNK_SIZE * TILE_SIZE - scroll[1],
# TILE_SIZE*CHUNK_SIZE, TILE_SIZE*CHUNK_SIZE), 1)
for tile in game_map[target_chunk]:
display.blit(tile_index[tile[1]], (tile[0][0]*TILE_SIZE-scroll[0],
tile[0][1]*TILE_SIZE-scroll[1]))
#pygame.draw.rect(display, (0 , 0, 0), (tile[0][0]*TILE_SIZE-scroll[0],
# tile[0][1]*TILE_SIZE-scroll[1],
# TILE_SIZE, TILE_SIZE),1)
if tile[1] in [1, 2, 3, 4]:
tile_rects.append(pygame.Rect(tile[0][0]*TILE_SIZE, tile[0][1]*TILE_SIZE,
TILE_SIZE, TILE_SIZE))
collisions = player.move(tile_rects)
if collisions['bottom']: # если мы столкнулись с тайлом снизу, то не надо увеличивать ускорения падения
if player.momentum_change_rate == 0:
player.momentum_change_rate = 0.4
player.air_timer = 0
player.air_count = 0
player.isGrounded = True
else:
player.air_timer += 1
if collisions['top']:
player.momentumY = 0
player.frame += 1
if player.frame >= len(animation_database[player.activity]):
player.frame = 0
player_img_id = animation_database[player.activity][player.frame]
player_img = animation_frames[player_img_id]
display.blit(pygame.transform.flip(player_img, player.flip, False),
(player.rect.x - scroll[0], player.rect.y - scroll[1]))
# pygame.draw.rect(display, (255, 0 , 0), (player.rect.x - scroll[0], player.rect.y - scroll[1],
# player.rect.width, player.rect.height), 2)
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_RIGHT:
player.isMovingRight = True
elif event.key == K_LEFT:
player.isMovingLeft = True
if event.key == K_UP or event.key == K_SPACE and not player.activity == 'fly' :
if not player.isGrounded:
if player.air_count < 1:
player.air_count += 1
player.momentumY = -8
player.isGrounded = False
else:
player.momentumY = -9
player.isGrounded = False
if event.key == K_x and not player.isGrounded:
player.momentumY = 2
player.momentum_change_rate = 0
if event.type == KEYUP:
if event.key == K_RIGHT:
player.isMovingRight = False
if event.key == K_LEFT:
player.isMovingLeft = False
if event.key == K_x:
player.momentum_change_rate = 0.4
text = str(int(clock.get_fps()))
# text = f'X: {player.rect.x} Y: {player.rect.y} momentum: {player.momentumY}'
textsurface = myfont.render(text, False, (0, 0, 0))
display.blit(textsurface,(0,0))
textsurface = myfont.render(player.activity, False, (0, 0, 0))
display.blit(textsurface, (0, 14))
textsurface = myfont.render('Высота: ' + str(-player.rect.y), False, (100, 0, 0))
display.blit(textsurface, (340, 0))
if -player.rect.y > record:
record = -player.rect.y
textsurface = myfont.render('Рекорд: ' + str(record), False, (100, 0, 0))
display.blit(textsurface, (340, 20))
surf = pygame.transform.scale(display, WINDOW_SIZE)
screen.blit(surf, (0, 0))
pygame.display.update()
clock.tick(50)
|
class Vehicle:
def __init__(self,name,color):
self.__name=name
self.__color=color
def getColor(self):
return self.__color
def setColor(self,color):
self.__color=color
def getName(self):
return self.__name
class Car(Vehicle):
def __init__(self,name,color,model):
super().__init__(name,color)
self.__model=model
def getDescription(self):
return self.getName() +' '+ self.__model + ' in ' + self.getColor() + ' color'
c1=Car('Ford Mustang','red','GT350')
print(c1.getDescription())
print(c1.getName())
c1.setColor('white')
print(c1.getDescription()) |
#!/usr/bin/env /data/mta/Script/Python3.8/envs/ska3-shiny/bin/python
#####################################################################################
# #
# print_html_page.py update/create html page related acis does plots #
# #
# author: t. isobe (tisobe@cfa.harvard.edu) #
# #
# Last Update: Mar 03, 2021 #
# #
#####################################################################################
import os
import sys
import re
import string
import random
import operator
import math
import time
path = '/data/mta/Script/ACIS/Count_rate/house_keeping/dir_list_py'
with open(path, 'r') as f:
data = [line.strip() for line in f.readlines()]
for ent in data:
atemp = re.split(':', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec("%s = %s" %(var, line))
#
#--- append pathes to private folders to a python directory
#
sys.path.append(bin_dir)
#
#--- temp writing file name
#
rtail = int(time.time() * random.random())
zspace = '/tmp/zspace' + str(rtail)
#--------------------------------------------------------------------------------------
#--- print_html_page: print all html pages for ACIS Dose Plots ---
#--------------------------------------------------------------------------------------
def print_html_page(year=''):
"""
driving function to print all html pages for ACIS Dose Plots
input: in_year --- year; default ""
output: html pages in <web_dir> and <web_dir>/<mon_dir_name> (e.g. JAN2013)
"""
#
#--- find today's date
#
ldate = time.strftime("%d-%m-%Y", time.gmtime())
lyear = int(float(time.strftime("%Y", time.gmtime())))
yday = int(float(time.strfimte('%j', time.gmtime())))
#
#--- if year and month is given, create a page for that month.
#
if year == '':
year = lyear
chk = 0
line = '<tr>\n'
for syear in range(1999, lyear+1):
line = line + '<td><a href="./Htmls/plot_page_' + str(syear) + '.html">'
line = line+ str(syear) + '</a></td>\n'
if chk >= 9:
line = line + '</tr>\n'
chk = 0
else:
chk += 1
if chk < 9:
for k in range(chk, 10):
line = line + '<td> </td>\n'
line = line + '</tr>\n'
#
#--- update main page
#
ifile = house_keeping + 'main_page_template'
with open(ifile, 'r') as f:
out = f.read()
out = out.replace('#TABLE#', line)
out = out.replace('#UPDATE#', ldate)
outfile = web_dir + 'main_acis_dose_plot.html'
with open(outfile, 'w') as fo:
fo.write(out)
#
#--- update this year's page
#
ifile = house_keeping + 'yearly_template'
with open(ifile, 'r') as f:
out = f.read()
out = out.replace('#YEAR#', str(year))
out = out.replace('#UPDATE#', ldate)
if year < 2019:
out = out.replace('#EPHINTITLE#', 'Ephin Rate')
ephline = "<a href=\"javascript:WindowOpener('" + str(year) + "/ephin_rate.png')\">"
ephline = ephline + "<img src=\"../Plots/" + str(year) + "/ephin_rate.png\" "
ephline = ephline + "style=\"text-align:center; width: 95%\"></a>"
out = out.replace('#EPHIN#', ephline)
else:
out = out.replace('#EPHINTITLE#', ' ')
out = out.replace('#EPHIN#', ' ')
if year == 1999:
dline = '<a href="https://cxc.cfa.harvard.edu/mta_days/mta_dose_count/Htmls/plot_page_2000.html"> Next Year</a>'
elif year == lyear:
prev = str(year - 1)
dline = '<a href="https://cxc.cfa.harvard.edu/mta_days/mta_dose_count/Htmls/plot_page_'+prev+'.html">Prev Year </a>'
else:
prev = str(year - 1)
after = str(year + 1)
dline = '<a href="https://cxc.cfa.harvard.edu/mta_days/mta_dose_count/Htmls/plot_page_'+prev+'.html">Prev Year </a>'
dline = dline + '--'
dline = dline + '<a href="https://cxc.cfa.harvard.edu/mta_days/mta_dose_count/Htmls/plot_page_'+after+'.html"> Next Year</a>'
out = out.replace("#DIRECT#", dline)
outfile = web_dir + 'Htmls/plot_page_' + str(year) + '.html'
with open(outfile, 'w') as fo:
fo.write(out)
#--------------------------------------------------------------------------------------
if __name__ == "__main__":
if len(sys.argv) > 1:
year = int(float(sys.argv[1]))
else:
year = ''
print_html_page(year)
#
#--- if this is the first several days of the year, udate the last year's html page
#
yday = int(float(time.strftime('%j', time.gmtime())))
if yday < 10:
lyear = int(float(time.strftime('%Y', time.gmtime()))) -1
print_html_page(lyear)
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from dataclasses import dataclass
from typing import Iterable
from pants.backend.python.goals import lockfile
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.target_types import (
InterpreterConstraintsField,
PythonResolveField,
PythonSourceField,
)
from pants.backend.python.typecheck.pytype.skip_field import SkipPytypeField
from pants.backend.python.typecheck.pytype.subsystem import Pytype
from pants.backend.python.util_rules import pex_from_targets
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.partition import (
_partition_by_interpreter_constraints_and_resolve,
)
from pants.backend.python.util_rules.pex import (
Pex,
PexRequest,
VenvPex,
VenvPexProcess,
VenvPexRequest,
)
from pants.backend.python.util_rules.pex_environment import PexEnvironment
from pants.backend.python.util_rules.pex_from_targets import RequirementsPexRequest
from pants.core.goals.check import CheckRequest, CheckResult, CheckResults
from pants.core.util_rules import config_files
from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.collection import Collection
from pants.engine.fs import Digest
from pants.engine.internals.native_engine import MergeDigests
from pants.engine.internals.selectors import MultiGet
from pants.engine.process import FallibleProcessResult
from pants.engine.rules import Get, Rule, collect_rules, rule
from pants.engine.target import CoarsenedTargets, CoarsenedTargetsRequest, FieldSet, Target
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
from pants.util.ordered_set import FrozenOrderedSet, OrderedSet
from pants.util.strutil import pluralize
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class PytypeFieldSet(FieldSet):
required_fields = (PythonSourceField,)
sources: PythonSourceField
resolve: PythonResolveField
interpreter_constraints: InterpreterConstraintsField
@classmethod
def opt_out(cls, tgt: Target) -> bool:
return tgt.get(SkipPytypeField).value
class PytypeRequest(CheckRequest):
field_set_type = PytypeFieldSet
tool_name = Pytype.options_scope
@dataclass(frozen=True)
class PytypePartition:
field_sets: FrozenOrderedSet[PytypeFieldSet]
root_targets: CoarsenedTargets
resolve_description: str | None
interpreter_constraints: InterpreterConstraints
def description(self) -> str:
ics = str(sorted(str(c) for c in self.interpreter_constraints))
return f"{self.resolve_description}, {ics}" if self.resolve_description else ics
class PytypePartitions(Collection[PytypePartition]):
pass
@rule(
desc="Pytype typecheck each partition based on its interpreter_constraints",
level=LogLevel.DEBUG,
)
async def pytype_typecheck_partition(
partition: PytypePartition,
pytype: Pytype,
pex_environment: PexEnvironment,
) -> CheckResult:
roots_sources, requirements_pex, pytype_pex, config_files = await MultiGet(
Get(
SourceFiles,
SourceFilesRequest(fs.sources for fs in partition.field_sets),
),
Get(
Pex,
RequirementsPexRequest(
(fs.address for fs in partition.field_sets),
hardcoded_interpreter_constraints=partition.interpreter_constraints,
),
),
Get(
Pex,
PexRequest,
pytype.to_pex_request(interpreter_constraints=partition.interpreter_constraints),
),
Get(
ConfigFiles,
ConfigFilesRequest,
pytype.config_request(),
),
)
input_digest = await Get(
Digest, MergeDigests((roots_sources.snapshot.digest, config_files.snapshot.digest))
)
runner = await Get(
VenvPex,
VenvPexRequest(
PexRequest(
output_filename="pytype_runner.pex",
interpreter_constraints=partition.interpreter_constraints,
main=pytype.main,
internal_only=True,
pex_path=[pytype_pex, requirements_pex],
),
pex_environment.in_sandbox(working_directory=None),
),
)
result = await Get(
FallibleProcessResult,
VenvPexProcess(
runner,
argv=(
*(("--config", pytype.config) if pytype.config else ()),
"{pants_concurrency}",
*pytype.args,
*roots_sources.files,
),
# This adds the venv/bin folder to PATH
extra_env={
"PEX_VENV_BIN_PATH": "prepend",
},
input_digest=input_digest,
output_files=roots_sources.files,
concurrency_available=len(roots_sources.files),
description=f"Run Pytype on {pluralize(len(roots_sources.files), 'file')}.",
level=LogLevel.DEBUG,
),
)
return CheckResult.from_fallible_process_result(
result,
partition_description=partition.description(),
)
@rule(
desc="Determine if it is necessary to partition Pytype's input (interpreter_constraints and resolves)",
level=LogLevel.DEBUG,
)
async def pytype_determine_partitions(
request: PytypeRequest,
pytype: Pytype,
python_setup: PythonSetup,
) -> PytypePartitions:
resolve_and_interpreter_constraints_to_field_sets = (
_partition_by_interpreter_constraints_and_resolve(request.field_sets, python_setup)
)
coarsened_targets = await Get(
CoarsenedTargets,
CoarsenedTargetsRequest(field_set.address for field_set in request.field_sets),
)
coarsened_targets_by_address = coarsened_targets.by_address()
return PytypePartitions(
PytypePartition(
FrozenOrderedSet(field_sets),
CoarsenedTargets(
OrderedSet(
coarsened_targets_by_address[field_set.address] for field_set in field_sets
)
),
resolve if len(python_setup.resolves) > 1 else None,
interpreter_constraints or pytype.interpreter_constraints,
)
for (resolve, interpreter_constraints), field_sets in sorted(
resolve_and_interpreter_constraints_to_field_sets.items()
)
)
@rule(desc="Typecheck using Pytype", level=LogLevel.DEBUG)
async def pytype_typecheck(
request: PytypeRequest,
pytype: Pytype,
) -> CheckResults:
if pytype.skip:
return CheckResults([], checker_name=request.tool_name)
partitions = await Get(PytypePartitions, PytypeRequest, request)
partitioned_results = await MultiGet(
Get(CheckResult, PytypePartition, partition) for partition in partitions
)
return CheckResults(
partitioned_results,
checker_name=request.tool_name,
)
def rules() -> Iterable[Rule | UnionRule]:
return (
*collect_rules(),
*config_files.rules(),
*lockfile.rules(),
*pex_from_targets.rules(),
UnionRule(CheckRequest, PytypeRequest),
)
|
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import dateutil
import datetime
from cement.utils.misc import minimal_logger
from ..core import io
from ..objects.solutionstack import SolutionStack
from ..objects.exceptions import NotFoundError, InvalidStateError, \
AlreadyExistsError
from ..objects.tier import Tier
from ..lib import utils
from ..lib import aws
from ..lib.aws import InvalidParameterValueError
from ..objects.event import Event
from ..objects.environment import Environment
from ..objects.application import Application
from ..resources.strings import strings, responses
from ..core import globals
LOG = minimal_logger(__name__)
DEFAULT_ROLE_NAME = 'aws-elasticbeanstalk-ec2-role'
def _make_api_call(operation_name, region=None, **operation_options):
try:
endpoint_url = globals.app.pargs.endpoint_url
except AttributeError:
endpoint_url = None
return aws.make_api_call('elasticbeanstalk',
operation_name,
region=region,
endpoint_url=endpoint_url,
**operation_options)
def create_application(app_name, descrip, region=None):
LOG.debug('Inside create_application api wrapper')
try:
result = _make_api_call('create-application',
application_name=app_name,
description=descrip,
region=region)
except InvalidParameterValueError as e:
string = responses['app.exists'].replace('{app-name}', app_name)
if e.message == string:
raise AlreadyExistsError(e)
else:
raise e
return result
def create_application_version(app_name, vers_label, descrip, s3_bucket,
s3_key, region=None):
LOG.debug('Inside create_application_version api wrapper')
return _make_api_call('create-application-version',
application_name=app_name,
version_label=vers_label,
description=descrip,
source_bundle={'S3Bucket': s3_bucket,
'S3Key': s3_key},
region=region)
def create_environment(app_name, env_name, cname, description, solution_stck,
tier, itype, label, single, key_name, profile, tags,
region=None, database=False, vpc=False, size=None):
"""
Creates an Elastic Beanstalk environment
:param app_name: Name of application where environment will live
:param env_name: Desired name of environment
:param cname: cname prefix, if None, a cname will be auto-generated
:param description: a string description (optional)
:param solution_stck: a solution_stack object
:param tier: a tier object
:param itype: instance type string
:param label: version label of app version to deploy. If None, a
sample app will be launched
:param single: True if you would like environment to be a SingleInstance.
If False, the environment will be launched as LoadBalanced
:param key_name: EC2 SSH Keypair name
:param profile: IAM Instance profile name
:param tags: a list of tags as {'Key': 'foo', 'Value':'bar'}
:param region: region in which to create the environment
:param database: database object dictionary
:param size: number of instances to spawn at create
:return: environment_object, request_id
"""
LOG.debug('Inside create_environment api wrapper')
assert app_name is not None, 'App name can not be empty'
assert env_name is not None, 'Environment name can not be empty'
assert solution_stck is not None, 'Solution stack can not be empty'
if size:
assert isinstance(size, int), 'Size must be of type int'
size = str(size)
if region is None:
region = aws.get_default_region()
settings = []
kwargs = {
'application_name': app_name,
'environment_name': env_name,
'solution_stack_name': solution_stck.name,
'option_settings': settings,
}
if description:
kwargs['description'] = description
if cname:
kwargs['cname_prefix'] = cname
if tier:
kwargs['tier'] = tier.to_struct()
if label:
kwargs['version_label'] = label
if tags:
kwargs['tags'] = tags
if profile:
settings.append(
{'Namespace': 'aws:autoscaling:launchconfiguration',
'OptionName': 'IamInstanceProfile',
'Value': profile}
)
if itype:
settings.append(
{'Namespace': 'aws:autoscaling:launchconfiguration',
'OptionName': 'InstanceType',
'Value': itype}
)
if single:
settings.append(
{'Namespace': 'aws:elasticbeanstalk:environment',
'OptionName': 'EnvironmentType',
'Value': 'SingleInstance'}
)
if key_name:
settings.append(
{'Namespace': 'aws:autoscaling:launchconfiguration',
'OptionName': 'EC2KeyName',
'Value': key_name},
)
if size:
settings.append(
{'Namespace': 'aws:autoscaling:asg',
'OptionName': 'MaxSize',
'Value': size},
)
settings.append(
{'Namespace': 'aws:autoscaling:asg',
'OptionName': 'MinSize',
'Value': size},
)
# add client defaults
settings.append(
{'Namespace': 'aws:elasticbeanstalk:command',
'OptionName': 'BatchSize',
'Value': '30'}
)
settings.append(
{'Namespace': 'aws:elasticbeanstalk:command',
'OptionName': 'BatchSizeType',
'Value': 'Percentage'}
)
if not tier or tier.name.lower() != 'worker':
settings.append(
{'Namespace': 'aws:elb:policies',
'OptionName': 'ConnectionDrainingEnabled',
'Value': 'true'}
)
settings.append(
{'Namespace': 'aws:elb:healthcheck',
'OptionName': 'Interval',
'Value': '30'}
)
settings.append(
{'Namespace': 'aws:elb:loadbalancer',
'OptionName': 'CrossZone',
'Value': 'true'}
)
if database:
#Database is a dictionary
kwargs['template_specification'] = {
'TemplateSnippets': [
{'SnippetName': 'RdsExtensionEB',
'Order': 10000,
'SourceUrl': 'https://s3.amazonaws.com/'
'elasticbeanstalk-env-resources-' + region +
'/eb_snippets/rds/rds.json'}
]
}
# Add to option settings
settings.append(
{'Namespace': 'aws:rds:dbinstance',
'OptionName': 'DBPassword',
'Value': database['password']}
)
settings.append(
{'Namespace': 'aws:rds:dbinstance',
'OptionName': 'DBUser',
'Value': database['username']}
)
if database['instance']:
settings.append(
{'Namespace': 'aws:rds:dbinstance',
'OptionName': 'DBInstanceClass',
'Value': database['instance']}
)
if database['size']:
settings.append(
{'Namespace': 'aws:rds:dbinstance',
'OptionName': 'DBAllocatedStorage',
'Value': database['size']}
)
if database['engine']:
settings.append(
{'Namespace': 'aws:rds:dbinstance',
'OptionName': 'DBEngine',
'Value': database['engine']}
)
settings.append(
{'Namespace': 'aws:rds:dbinstance',
'OptionName': 'DBDeletionPolicy',
'Value': 'Snapshot'}
)
if vpc:
namespace = 'aws:ec2:vpc'
settings.append(
{'Namespace': namespace,
'OptionName': 'VPCId',
'Value': vpc['id']}
)
settings.append(
{'Namespace': namespace,
'OptionName': 'AssociatePublicIpAddress',
'Value': vpc['publicip']}
)
settings.append(
{'Namespace': namespace,
'OptionName': 'ELBScheme',
'Value': vpc['elbscheme']}
)
if vpc['elbsubnets']:
settings.append(
{'Namespace': namespace,
'OptionName': 'ELBSubnets',
'Value': vpc['elbsubnets']}
)
if vpc['ec2subnets']:
settings.append(
{'Namespace': namespace,
'OptionName': 'Subnets',
'Value': vpc['ec2subnets']}
)
if vpc['securitygroups']:
settings.append(
{'Namespace': 'aws:autoscaling:launchconfiguration',
'OptionName': 'SecurityGroups',
'Value': vpc['securitygroups']}
)
if vpc['dbsubnets']:
settings.append(
{'Namespace': namespace,
'OptionName': 'DBSubnets',
'Value': vpc['dbsubnets']}
)
result = _make_api_call('create-environment', region=region, **kwargs)
# convert to object
env = _api_to_environment(result)
request_id = result['ResponseMetadata']['RequestId']
return env, request_id
def clone_environment(app_name, env_name, clone_name, cname,
description, label, scale, tags, region=None):
LOG.debug('Inside clone_environment api wrapper')
assert app_name is not None, 'App name can not be empty'
assert env_name is not None, 'Environment name can not be empty'
assert clone_name is not None, 'Clone name can not be empty'
if scale:
assert isinstance(scale, int), 'Size must be of type int'
scale = str(scale)
settings = []
kwargs = {
'application_name': app_name,
'environment_name': clone_name,
'template_specification': {'TemplateSource': {'EnvironmentName': env_name,}},
'option_settings': settings,
}
if description:
kwargs['description'] = description
if cname:
kwargs['cname_prefix'] = cname
if label:
kwargs['version_label'] = label
if tags:
kwargs['tags'] = tags
if scale:
settings.append(
{'Namespace': 'aws:autoscaling:asg',
'OptionName': 'MaxSize',
'Value': scale},
)
settings.append(
{'Namespace': 'aws:autoscaling:asg',
'OptionName': 'MinSize',
'Value': scale},
)
result = _make_api_call('create-environment', region=region, **kwargs)
# convert to object
env = _api_to_environment(result)
request_id = result['ResponseMetadata']['RequestId']
return env, request_id
def _api_to_environment(api_dict):
try:
cname = api_dict['CNAME']
except KeyError:
cname = 'UNKNOWN'
try:
version_label = api_dict['VersionLabel']
except KeyError:
version_label = None
try:
description = api_dict['Description']
except KeyError:
description = None
# Convert solution_stack and tier to objects
solution_stack = SolutionStack(api_dict['SolutionStackName'])
tier = api_dict['Tier']
tier = Tier(tier['Name'], tier['Type'], tier['Version'])
env = Environment(
version_label=version_label,
status=api_dict['Status'],
app_name=api_dict['ApplicationName'],
health=api_dict['Health'],
id=api_dict['EnvironmentId'],
date_updated=api_dict['DateUpdated'],
platform=solution_stack,
description=description,
name=api_dict['EnvironmentName'],
date_created=api_dict['DateCreated'],
tier=tier,
cname=cname,
)
return env
def delete_application(app_name, region=None):
LOG.debug('Inside delete_application api wrapper')
result = _make_api_call('delete-application',
application_name=app_name,
region=region)
return result['ResponseMetadata']['RequestId']
def delete_application_and_envs(app_name, region=None):
LOG.debug('Inside delete_application_and_envs')
result = _make_api_call('delete-application',
application_name=app_name,
terminate_env_by_force=True,
region=region)
return result['ResponseMetadata']['RequestId']
def describe_application(app_name, region=None):
LOG.debug('Inside describe_application api wrapper')
result = _make_api_call('describe-applications',
application_names=[app_name],
region=region)
apps = result['Applications']
if len(apps) != 1:
raise NotFoundError('Application "' + app_name + '" not found.')
return apps[0]
def is_cname_available(cname, region=None):
LOG.debug('Inside is_cname_available api wrapper')
result = _make_api_call('check-dns-availability',
cname_prefix=cname,
region=region)
return result['Available']
def describe_applications(region=None):
LOG.debug('Inside describe_applications api wrapper')
result = _make_api_call('describe-applications', region=region)
return result['Applications']
def describe_configuration_settings(app_name, env_name, region=None):
LOG.debug('Inside describe_configuration_settings api wrapper')
result = _make_api_call('describe-configuration-settings',
application_name=app_name,
environment_name=env_name,
region=region)
return result['ConfigurationSettings'][0]
def get_specific_configuration(env_config, namespace, option):
for setting in env_config['OptionSettings']:
if setting['Namespace'] == namespace and \
setting['OptionName'] == option:
try:
return setting['Value']
except KeyError:
return None
return None
def get_specific_configuration_for_env(app_name, env_name, namespace, option, region=None):
env_config = describe_configuration_settings(app_name, env_name,
region=region)
return get_specific_configuration(env_config, namespace, option)
def get_available_solution_stacks(region=None):
LOG.debug('Inside get_available_solution_stacks api wrapper')
result = _make_api_call('list-available-solution-stacks', region=region)
stack_strings = result['SolutionStacks']
LOG.debug('Solution Stack result size = ' + str(len(stack_strings)))
if len(stack_strings) == 0:
raise NotFoundError(strings['sstacks.notfound'])
solution_stacks = [SolutionStack(s) for s in stack_strings]
return solution_stacks
def get_application_versions(app_name, region=None):
LOG.debug('Inside get_application_versions api wrapper')
result = _make_api_call('describe-application-versions',
application_name=app_name,
region=region)
return result['ApplicationVersions']
def get_all_applications(region=None):
LOG.debug('Inside get_all_applications api wrapper')
result = _make_api_call('describe-applications',
region=region)
app_list = []
for app in result['Applications']:
try:
description = app['Description']
except KeyError:
description = None
try:
versions = app['Versions']
except KeyError:
versions = None
app_list.append(
Application(
name=app['ApplicationName'],
date_created=app['DateCreated'],
date_updated=app['DateUpdated'],
description=description,
versions=versions,
templates=app['ConfigurationTemplates'],
)
)
return app_list
def get_app_environments(app_name, region=None):
LOG.debug('Inside get_app_environments api wrapper')
result = _make_api_call('describe-environments',
application_name=app_name,
include_deleted=False,
region=region)
# convert to objects
envs = [_api_to_environment(env) for env in result['Environments']]
return envs
def get_all_environments(region=None):
LOG.debug('Inside get_all_environments api wrapper')
result = _make_api_call('describe-environments',
include_deleted=False,
region=region)
# convert to object
envs = []
for env in result['Environments']:
envs.append(_api_to_environment(env))
return envs
def get_environment(app_name, env_name, region=None):
LOG.debug('Inside get_environment api wrapper')
result = _make_api_call('describe-environments',
application_name=app_name,
environment_names=[env_name],
include_deleted=False,
region=region)
envs = result['Environments']
if len(envs) < 1:
raise NotFoundError('Environment "' + env_name + '" not Found.')
else:
return _api_to_environment(envs[0])
def get_environment_resources(env_name, region=None):
LOG.debug('Inside get_environment_resources api wrapper')
result = _make_api_call('describe-environment-resources',
environment_name=env_name,
region=region)
return result
def get_new_events(app_name, env_name, request_id,
last_event_time=None, region=None):
LOG.debug('Inside get_new_events api wrapper')
# make call
if last_event_time is not None:
# In python 2 time is a datetime, in 3 it is a string
## Convert to string for compatibility
time = last_event_time
new_time = time + datetime.timedelta(0, 0, 1000)
else:
new_time = None
kwargs = {}
if app_name:
kwargs['application_name'] = app_name
if env_name:
kwargs['environment_name'] = env_name
if request_id:
kwargs['request_id'] = request_id
if new_time:
kwargs['start_time'] = str(new_time)
result = _make_api_call('describe-events',
region=region,
**kwargs)
# convert to object
events = []
for event in result['Events']:
try:
version_label = event['VersionLabel']
except KeyError:
version_label = None
try:
environment_name = event['EnvironmentName']
except KeyError:
environment_name = None
events.append(
Event(message=event['Message'],
event_date=event['EventDate'],
version_label=version_label,
app_name=event['ApplicationName'],
environment_name=environment_name,
severity=event['Severity'],
)
)
return events
def get_storage_location(region=None):
LOG.debug('Inside get_storage_location api wrapper')
response = _make_api_call('create-storage-location', region=region)
return response['S3Bucket']
def update_environment(env_name, options, region=None, remove=[]):
LOG.debug('Inside update_environment api wrapper')
try:
response = _make_api_call('update-environment',
environment_name=env_name,
option_settings=options,
options_to_remove=remove,
region=region)
except aws.InvalidParameterValueError as e:
if e.message == responses['env.invalidstate'].replace('{env-name}',
env_name):
raise InvalidStateError(e)
return response['ResponseMetadata']['RequestId']
def update_env_application_version(env_name,
version_label, region=None):
LOG.debug('Inside update_env_application_version api wrapper')
response = _make_api_call('update-environment',
environment_name=env_name,
version_label=version_label,
region=region)
return response['ResponseMetadata']['RequestId']
def request_environment_info(env_name, info_type, region=None):
result = _make_api_call('request-environment-info',
environment_name=env_name,
info_type=info_type,
region=region)
return result
def retrieve_environment_info(env_name, info_type, region=None):
result = _make_api_call('retrieve-environment-info',
environment_name=env_name,
info_type=info_type,
region=region)
return result
def terminate_environment(env_name, region=None):
result = _make_api_call('terminate-environment',
environment_name=env_name,
region=region)
return result['ResponseMetadata']['RequestId'] |
"""Script that extracts,crops and stores images in black and white in a new folder using OpenCVs Deep Neural
Network (Pre-trained for face Detection).
Script is adapted from several sources, with parts taken from each of the following:
1. van Gent, P. (2016). Emotion Recognition With Python, OpenCV and a Face Dataset.
A tech blog about fun things with Python and embedded electronics. Retrieved from:
http://www.paulvangent.com/2016/04/01/emotion-recognition-with-python-opencv-and-a-face-dataset/
2. https://www.pyimagesearch.com/2018/02/26/face-detection-with-opencv-and-deep-learning/
3. https://towardsdatascience.com/extracting-faces-using-opencv-face-detection-neural-network-475c5cd0c260
"""
import cv2
import glob
import numpy as np
# Define paths
prototxt_path = "/Users/yenji/opencv/samples/dnn/face_detector/deploy.prototxt"
caffemodel_path = "/Users/yenji/opencv/samples/dnn/face_detector/res10_300x300_ssd_iter_140000_fp16.caffemodel"
# Read the model
net = cv2.dnn.readNetFromCaffe(prototxt_path, caffemodel_path)
emotions = ["neutral", "anger", "disgust", "fear", "happy", "sadness", "surprise"] #Define emotions
def detect_faces(emotion):
print(emotion)
files = glob.glob("/Users/yenji/Desktop/Emotion-Detection/sorted_set_DNN/%s/*" %emotion) #Get list of all images with emotion
filenumber = 0
for f in files:
print(str(f))
frame = cv2.imread(f) #Open image
(h, w) = frame.shape[:2]
blob = cv2.dnn.blobFromImage(cv2.resize(frame, (300, 300)), 1.0, (300, 300), (104.0, 177.0, 123.0))
net.setInput(blob)
detections = net.forward()
# loop over the detections
for i in range(0, detections.shape[2]):
# extract the confidence (i.e., probability) associated with the
# prediction
confidence = detections[0, 0, i, 2]
# filter out weak detections by ensuring the `confidence` is
# greater than the minimum confidence
# I have found that a confidence less than 0.95 gives wrong results
if confidence < 0.5:
continue
# compute the (x, y)-coordinates of the bounding box for the
# object
box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
(startX, startY, endX, endY) = box.astype("int")
# cut, resize, convert to gray and save
out = frame[startY:endY, startX:+endX]
out = cv2.resize(out, (350, 350))
out = cv2.cvtColor(out, cv2.COLOR_BGR2GRAY) #Convert to gray so that it can be used with fisherface
cv2.imwrite("/Users/yenji/Desktop/Emotion-Detection/datasetDNN/%s/%s.jpg" % (emotion, filenumber), out) # Write image
filenumber += 1 # Increment image number
for emotion in emotions:
print("Start")
detect_faces(emotion) #Call function
print("Done") |
import numpy as np
from scipy.stats import norm
def simple_co_model(hi, fir, w_co,
offset_centre=0., offset_scale=0.5,
x_co_centre=2., x_co_scale=0.5):
def lnlike(p):
theta, offset, x_co, stddev = p
# N = len(hi)
# diff = (fir - np.tan(theta) * (hi + 2. * w_co * x_co))
# like = np.sum(diff*diff)
# like /= -2. * N * stddev * stddev
# like -= np.log(stddev) * N
like = np.sum(-np.log(stddev)
- 0.5 * ((fir - np.tan(theta) * (hi + 2. * w_co * x_co))/stddev)**2)
return like
def lnprior(p):
"""
Log-prior
"""
theta, offset, x_co, stddev = p
if (theta < 0.) or (theta > np.pi/2.):
return -np.inf
if x_co < 0.:
return -np.inf
if stddev < 0.:
return -np.inf
value = np.log(norm.pdf(offset, offset_centre, offset_scale))
value += np.log(norm.pdf(x_co, x_co_centre, x_co_scale))
return value
def lnwrap(p):
"""
Sum of log-prior and log-likelihood
"""
l = lnprior(p)
if np.isfinite(l):
return l + lnlike(p)
else:
return -np.inf
return lnwrap, lnprior
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from scipy import stats
from ast import literal_eval
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.metrics.pairwise import linear_kernel, cosine_similarity
from nltk.stem.snowball import SnowballStemmer
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.corpus import wordnet
from surprise import Reader, Dataset, SVD
from surprise.model_selection import cross_validate
import logging
import warnings; warnings.simplefilter('ignore')
logging.basicConfig(filename="../log/result_contentBased.txt",
filemode='a',
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%H:%M:%S',
level=logging.DEBUG)
def load_data(dir_links_small='../input/links_small.csv', dir_metadata='../input/movies_metadata.csv'):
"""
This function for: Load the Dataset and preprocessing data
Args:
dir_links_small: đường dẫn đến file links small
dir_metadata : đường dẫn đến file meta data
Return:
links_small , md : pandas frame
"""
links_small = pd.read_csv(dir_links_small)
links_small = links_small[links_small['tmdbId'].notnull()]['tmdbId'].astype('int')
md = pd. read_csv(dir_metadata)
md['genres'] = md['genres'].fillna('[]').apply(literal_eval).apply(lambda x: [i['name'] for i in x] if isinstance(x, list) else [])
return links_small, md
def get_vote_counts(md):
"""
This function for:
get vote_counts
Args:
md : meta data, a pandas frame
Return:
m : m is the minimum votes required to be listed in the chart
"""
vote_counts = md[md['vote_count'].notnull()]['vote_count'].astype('int')
m = vote_counts.quantile(0.95)
return m
def get_mean_vote(md):
"""
This function for:
get mean vote
Args:
md : meta data, a pandas frame
Return:
C : C is the mean vote across the whole report
"""
vote_averages = md[md['vote_average'].notnull()]['vote_average'].astype('int')
C = vote_averages.mean()
return C
def get_small_movies_metatdata(md, links_small):
"""
This function for:
get small movies meta data.
Args:
md : meta data, a pandas frame
Return:
smd: small meta data.
"""
md['year'] = pd.to_datetime(md['release_date'], errors='coerce').apply(lambda x: str(x).split('-')[0] if x != np.nan else np.nan)
md = md.drop([19730, 29503, 35587]) # these numbers presents row indices which rows contain bad format data;
# just try
# md['id'] = md['id'].astype(int)
# u will get an error indicating it cannot convert '1975-xx-xx'.
md['id'] = md['id'].astype('int')
smd = md[md['id'].isin(links_small)]
return smd
# Movie Description Based Recommender
def get_quantitative_matrix(smd):
"""
This function for:
get quantitative_matrix
Args:
smd : small meta data, a pandas frame
Return:
smd: small meta data.
tfidf_matrix: quantitative matrix.
"""
smd['tagline'] = smd['tagline'].fillna('')
smd['description'] = smd['overview'] + smd['tagline']
smd['description'] = smd['description'].fillna('')
tf = TfidfVectorizer(analyzer='word',ngram_range=(1, 2),min_df=0, stop_words='english')
tfidf_matrix = tf.fit_transform(smd['description'])
logging.info(f"tfidf_matrix shape: {tfidf_matrix.shape}")
return smd, tfidf_matrix
def get_similarity_between2movies(tfidf_matrix):
"""
This function for:
get similarity between 2 movies
Args:
tfidf_matrix: quantitative matrix.
Return:
cosine_sim : similarity between 2 movies
"""
cosine_sim = linear_kernel(tfidf_matrix, tfidf_matrix)
cosine_sim[0]
return cosine_sim
def get_recommendations(title, cosine_sim, titles):
"""
This function for:
get_recommendations
Args:
title: title of the movie
cosine_sim: a matrix that indicates similarity betwwen 2 movies
titles: all titles
Return:
The 30 most similar movies based on the cosine similarity score.
"""
idx = indices[title]
sim_scores = list(enumerate(cosine_sim[idx]))
sim_scores = sorted(sim_scores, key=lambda x: x[1], reverse=True)
sim_scores = sim_scores[1:31]
movie_indices = [i[0] for i in sim_scores]
return titles.iloc[movie_indices]
def convert_int(x):
"""
THis function for: convert x to int
"""
try:
return int(x)
except:
return np.nan
if __name__ == "__main__":
# Load all dataset
links_small, md = load_data()
# Get total number of votes
m = get_vote_counts(md)
# Get the mean of votes
C = get_mean_vote(md)
# Load small dataset
smd = get_small_movies_metatdata(md, links_small)
# Calculate quantitative matrix
smd, tfidf_matrix = get_quantitative_matrix(smd)
# Calculate tfidf matrix - similarity between 2 movies
cosine_sim = get_similarity_between2movies(tfidf_matrix)
smd = smd.reset_index()
# Get titles of movies
titles = smd['title']
# Get indices of movies
indices = pd.Series(smd.index, index=smd['title'])
logging.info(f"Top 10 recommendations for the movie: The Godfather:\n{get_recommendations('The Godfather', cosine_sim, titles).head(10)}")
logging.info(f"Top 10 recommendations for the movie: The Dark Knight:\n{get_recommendations('The Dark Knight', cosine_sim, titles).head(10)}")
|
#!/usr/bin/env python3
from sys import argv
"""
Version 3: as fast as version 2
(it now uses list comprehensions to store
all palindromes in a tuple and count occurrences).
Finds palindromes greater than X characters.
It also prints:
- the size of the longest palindrome
- the size of the shortest palindrome
PEP8 compliant
“Readability counts."
“Beautiful is better than ugly.”
— The Zen of Python
"""
if len(argv) > 2:
all_palindromes = () # tuple stores all palindromes
str_size = argv[1] # number of required characters
filename = argv[2] # file to parse
counter_plus = 0 # counts palindromes with str_size or more
counter_exact = 0 # counts palindromes with exact str_size
with open(filename, 'r') as file:
lines = file.read().split()
all_palindromes = list(filter(lambda str: str == str[::-1], lines))
# extracts the longest and the shortest palindromes (string)
longest_str = max(all_palindromes, key=len)
shortest_str = min(all_palindromes, key=len)
# compares initial str_size with the shortest and longest palindromes
# and forces counters to zero, since there are no palindromes with
# those sizes (shorter than shortest_str - longer than longest_str)
if int(str_size) < len(shortest_str) or int(str_size) > len(longest_str):
counter_plus = counter_exact = 0
else:
# walrus := taking place in a list comprehension :)
[counter_plus := counter_plus + 1 for pp in all_palindromes
if len(pp) >= int(str_size)]
[counter_exact := counter_exact + 1 for pp in all_palindromes
if len(pp) == int(str_size)]
print(f'There are:\n'
f'{counter_plus} palindromes with {str_size} or more characters\n'
f'{counter_exact} palindromes with exact {str_size} characters\n'
f'{len(all_palindromes)} palindromes total in file {filename}\n')
print(f'---> Longest palindrome: "{longest_str}" > '
f'{len(longest_str)} characters.\n')
print(f'---> Shortest palindrome: "{shortest_str}" > '
f'{len(shortest_str)} characters.\n')
else:
print('Usage: palindrome.py numberofchars filename\n'
'Example: ./palindrome.py 15 filewithstrings.txt')
|
import os
import optparse
import time
from threading import Timer
import requests
import chardet
from BeautifulSoup import BeautifulSoup
class Crawler(object):
def __init__(self, url, outfile):
"""
:param url: url to Crawling
:param outfile: Place output
"""
self.url = url
self.outfile = outfile
def refresh(self):
"""
refresh html and soup
:return:
"""
response = requests.get(self.url)
self.encoding = chardet.detect(response.content).get('encoding')
html = response.content.decode(self.encoding, 'ignore')
self.soup = BeautifulSoup(html)
self.directory = '%s/%s' % (self.outfile, time.strftime('%Y%m%d%H%M%S'))
def do_request(tag):
"""
Request source and change url to local.
:param tag: tag to change, like 'img'.
:return:
"""
def decorator(fn):
def wrapper(self, select, attr, path, *args, **kwargs):
"""
:param select: attribute filter to tag, like {'src': True}.
:param attr: attribute to change, this should be a URL.
:param path: relative path to save source.
:return:
"""
tags = self.soup.findAll(tag, attrs=select)
for tag_ in tags:
url = tag_[attr]
name = url.split('/')[-1].split('?')[0]
if not url.startswith('http'): # some incomplete URL
url = 'http://%s' % url.split('//', 1)[-1]
source = requests.get(url).content
tag_[attr] = '%s/%s' % (path, name)
self.save(path=path, name=name, source=source)
return wrapper
return decorator
@do_request('link')
def css_parse(self):
pass
@do_request('img')
def image_parse(self):
pass
@do_request('script')
def js_parse(self):
pass
@do_request('iframe')
def iframe_parse(self):
pass
def html_parse(self):
self.save(path='', name='index.html', source=str(self.soup))
def save(self, path, name, source):
path_ = '%s/%s' % (self.directory, path)
if not os.path.exists(path_):
os.makedirs(path_)
fp = open('%s/%s' % (path_, name), 'w')
fp.write(source)
fp.close()
def option_parser(parser):
parser.add_option("-d", "--interval", dest="interval",
help="Interval of crawling the web")
parser.add_option("-u", "--url", dest="url",
help="Url to crawler")
parser.add_option("-o", "--outfile", dest="outfile",
help="Place output in file")
def timer(crawler, interval):
"""
Repeat
:param crawler:
:param interval:
:return:
"""
t = Timer(int(interval), timer, (crawler, interval))
t.start()
crawler.refresh()
crawler.css_parse({'type': 'text/css', 'href': True}, 'href', 'css')
crawler.js_parse({'src': True}, 'src', 'js')
crawler.image_parse({'src': True}, 'src', 'images')
crawler.image_parse({'_src': True}, '_src', 'images')
crawler.iframe_parse({'src': True}, 'src', 'iframe')
crawler.html_parse()
def main():
parser = optparse.OptionParser()
option_parser(parser)
(options, args) = parser.parse_args()
crawler = Crawler(options.url, options.outfile)
timer(crawler, options.interval)
if __name__ == '__main__':
main()
|
from .pages.product_page import ProductPage
from .pages.login_page import LoginPage
import pytest
from .pages.basket_page import BasketPage
import time
@pytest.mark.user_add_to_basket
class TestUserAddToBasketFromProductPage:
@pytest.fixture(scope="function", autouse=True)
def setup(self, browser):
login_link = 'http://selenium1py.pythonanywhere.com/ru/accounts/login/'
page = LoginPage(browser, login_link)
page.open()
page.should_be_login_page()
email = str(time.time()) + "@fakemail.org"
password = str(time.time())
page.register_new_user(email, password)
page.should_be_authorized_user()
def test_user_cant_see_success_message(self, browser):
link = 'http://selenium1py.pythonanywhere.com/ru/catalogue/coders-at-work_207/'
page = ProductPage(browser, link)
page.open()
page.should_not_be_success_message()
@pytest.mark.need_review
def test_user_can_add_product_to_basket(self, browser):
link = 'http://selenium1py.pythonanywhere.com/ru/catalogue/coders-at-work_207/'
page = ProductPage(browser, link)
page.open()
page.should_be_adding_basket_button()
page.add_object_to_basket()
page.should_be_name_in_basket()
page.check_name_in_basket()
page.should_be_price_in_basket()
page.check_price_in_basket()
@pytest.mark.need_review
@pytest.mark.parametrize('promo_code', [0, 1, 2, 3, 4, 5, 6, pytest.param(7, marks=pytest.mark.xfail), 8, 9])
def test_guest_can_add_product_to_basket(browser, promo_code):
link = f'http://selenium1py.pythonanywhere.com/catalogue/coders-at-work_207/?promo=offer{promo_code}'
page = ProductPage(browser, link)
page.open()
page.should_be_adding_basket_button()
page.add_object_to_basket_solve()
page.should_be_name_in_basket()
page.check_name_in_basket()
page.should_be_price_in_basket()
page.check_price_in_basket()
@pytest.mark.xfail
def test_guest_cant_see_success_message_after_adding_product_to_basket(browser):
link = 'http://selenium1py.pythonanywhere.com/ru/catalogue/coders-at-work_207/'
page = ProductPage(browser, link)
page.open()
page.add_object_to_basket()
page.should_not_be_success_message()
def test_guest_cant_see_success_message(browser):
link = 'http://selenium1py.pythonanywhere.com/ru/catalogue/coders-at-work_207/'
page = ProductPage(browser, link)
page.open()
page.should_not_be_success_message()
@pytest.mark.xfail
def test_message_disappeared_after_adding_product_to_basket(browser):
link = 'http://selenium1py.pythonanywhere.com/ru/catalogue/coders-at-work_207/'
page = ProductPage(browser, link)
page.open()
page.add_object_to_basket()
page.should_disappear()
def test_guest_should_see_login_link_on_product_page(browser):
link = "http://selenium1py.pythonanywhere.com/en-gb/catalogue/the-city-and-the-stars_95/"
page = ProductPage(browser, link)
page.open()
page.should_be_login_link()
@pytest.mark.need_review
def test_guest_can_go_to_login_page_from_product_page(browser):
link = "http://selenium1py.pythonanywhere.com/en-gb/catalogue/the-city-and-the-stars_95/"
page = ProductPage(browser, link)
page.open()
page.go_to_login_page()
login_page = LoginPage(browser, browser.current_url)
login_page.should_be_login_page()
@pytest.mark.need_review
def test_guest_cant_see_product_in_basket_opened_from_product_page(browser):
link = "http://selenium1py.pythonanywhere.com/en-gb/catalogue/the-city-and-the-stars_95/"
page = ProductPage(browser, link)
page.open()
page.should_be_basket_link()
page.go_to_basket_page()
basket_page = BasketPage(browser, browser.current_url)
basket_page.should_be_no_products_in_basket()
basket_page.should_be_text_about_empty_basket()
|
from __future__ import print_function
import torch.utils.data as data
import os
import glob
from PIL import Image
from utils import preprocess
class OurDataset(data.Dataset):
CLASSES = [
"background",
"road",
"side-walk",
"people",
"car",
"building",
"bridge",
"median",
"sky",
"plant",
"inner-car",
]
def __init__(
self, root, train=True, transform=None, target_transform=None, crop_size=None,
):
self.root = root
self.transform = transform
self.target_transform = target_transform
self.train = train
self.crop_size = crop_size
jpgs = glob.glob("{}/*.jpg".format(self.root))
print("total has {} images in ourdataset({}).".format(len(jpgs), self.root))
if self.train:
jpgs = jpgs[:-20]
else:
jpgs = jpgs[-20:]
self.images = []
self.masks = []
for jpg in jpgs:
self.images.append(jpg)
self.masks.append(jpg.replace(".jpg", ".png"))
def __getitem__(self, index):
_img = Image.open(self.images[index]).convert("RGB")
_target = Image.open(self.masks[index])
_img, _target = preprocess(
_img,
_target,
flip=True if self.train else False,
scale=(0.5, 2.0) if self.train else None,
crop=(self.crop_size, self.crop_size),
is_train=self.train,
)
if self.transform is not None:
_img = self.transform(_img)
if self.target_transform is not None:
_target = self.target_transform(_target)
return _img, _target
def __len__(self):
return len(self.images)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 18 23:34:37 2016
A 0MQ Client to dispatch messages to other services to get required information
@author: alex
"""
import zmq
import json
class ServiceConnector(object):
def __init__(self, discovery_conn, inbound_connection_info, logging):
#Establish a connection to the discovery service, which is necessary to
#connect to the other modules in the application
self.context = zmq.Context()
self.disc_socket = self.context.socket(zmq.REQ)
self.disc_socket.connect(discovery_conn)
#A dictionary of connections with category names
self.connections_dict = {}
#Inbound connection information to present on registration (String)
self.inb_connect
#Logging info
self.log = logging
self.log.debug('SerivceConnector: Service Connector Initialized')
def register(self):
#Send a registration message to the discovery service
msg = {'header': {'msg_type': 'Registration'}, 'msg_body': {'service_type': 'Object', 'inbound_port': self.inb_connect}}
self.disc_socket.send(json.dumps(msg))
self.log.debug('SerivceConnector: Registration Message sent to discovery service')
#Recieve the registration response
resp = self.disc_socket.recv()
self.log.debug('ServiceConnector: Registration response recieved: %s' % (resp))
dict_resp = json.loads(resp)
resp_body = dict_resp['msg_body']
if resp_body['register_success'] == 'success':
self.log.debug('SerivceConnector: Registration with Discovery Service successful')
elif resp_body['register_success'] == 'failure':
self.log.debug('SerivceConnector: Registration with Discovery Service unsuccessful')
self.log.debug('SerivceConnector: Error encountered: %s' % (resp_body['register_details']))
def request_service(self, service_type):
#Send a service request to the discovery service for a particular type of
#service. The discovery service should return connection information for
#a service of the desired type
self.log.debug('SerivceConnector: Requesting New Service Type: %s' % (service_type))
msg = {
'header': {
'msg_type': 'Service Request'
},
'msg_body': {
'requested_service_type': 'Mesh'
}
}
self.disc_socket.send(json.dumps(msg))
resp = self.disc_socket.recv()
dict_resp = json.loads(resp)
resp_body = dict_resp['msg_body']
if resp_body['service_address'] == 'None':
return None
self.log.debug('ServiceConnector: No Service type found from Discovery Service')
else:
new_socket = self.context.socket(zmq.REQ)
new_socket.connect(resp_body['service_address'])
self.connections_dict[service_type] = new_socket
self.log.debug('ServiceConnector: Service found from Discovery Service at address %s' % (resp_body['service_address']))
return new_socket
def send_request(self, service_type, msg):
#Send a request to the specified service type
con = None
if self.connections_dict.has_key(service_type):
con = self.connections_dict['service_type']
else:
con = self.request_service(service_type)
if con is not None:
con.send(json.dumps(msg))
resp = con.recv()
dict_resp = json.loads(resp)
self.log.debug('ServiceConnector: Message sent and response recieved: ' %s (resp))
else:
dict_resp = None
self.log.error('ServiceConnector: No Service Found')
return dict_resp |
# -*- coding: utf-8 -*-
from typing import List
class Solution:
def destCity(self, paths: List[List[str]]) -> str:
starts, ends = set(), set()
for (start, end) in paths:
starts.add(start)
ends.add(end)
return ends.difference(starts).pop()
if __name__ == "__main__":
solution = Solution()
assert "Sao Paulo" == solution.destCity(
[["London", "New York"], ["New York", "Lima"], ["Lima", "Sao Paulo"]]
)
assert "A" == solution.destCity([["B", "C"], ["D", "B"], ["C", "A"]])
assert "Z" == solution.destCity([["A", "Z"]])
|
import csv
d={'1':'__label__NEUTRAL','0':'__label__NEGATIVE','2':'__label__POSITIVE'}
l=[]
with open('train.csv','rt')as f:
data = csv.reader(f)
for row in data:
l.append(row)
with open('actual_train.txt','a') as f:
for i in range(len(l)):
s=l[i][0]+' '+l[i][1]+'\n'
f.write(s)
# l1=[]
# with open('test.csv','rt')as f:
# data = csv.reader(f)
# for row in data:
# l1.append(row)
# with open('actual_test.txt','a') as f:
# for i in range(len(l1)):
# s=l1[i][1]+'\n'
# f.write(s)
|
# -*- coding: utf-8 -*-
from at import interfaces
from zope.schema.vocabulary import SimpleVocabulary
from zope.app.component.hooks import getSite
from zope.app.pagetemplate import ViewPageTemplateFile
vendorImages={"Alfa Romeo":"/media/237459/1.jpg",
"Audi":"/media/237462/2.jpg",
"BMW":"/media/237465/3.jpg",
"Citroen":"/media/237468/4.jpg",
"Daewoo":"/media/237471/5.jpg",
"Fiat":"/media/237474/6.jpg",
"Ford":"/media/237477/6-5.jpg",
"Honda":"/media/237480/7.jpg",
"Hyundai":"/media/237483/7-5.jpg",
"Iveco":"/media/237489/9.jpg",
"Jaguar":"/media/1576190/33.jpg",
"Jeep":"/media/237492/10.jpg",
"Kia":"/media/237495/11.jpg",
"Lancia":"/media/237498/12.jpg",
"Land Rover":"/media/237501/13.jpg",
"Lexus":"/media/237504/14.jpg",
"Mazda":"/media/237507/15.jpg",
"Mercedes":"/media/237510/16.jpg",
"Mitsubishi":"/media/237513/17.jpg",
"Nissan":"/media/237516/18.jpg",
"Opel":"/media/237519/19.jpg",
"Peugeot":"/media/237522/20.jpg",
"Renault":"/media/237525/21.jpg",
"Rover":"/media/237528/22.jpg",
"Saab":"/media/237531/23.jpg",
"Seat":"/media/237534/24.jpg",
"Skoda":"/media/237537/25.jpg",
"Subaru":"/media/237540/26.jpg",
"Suzuki":"/media/237543/27.jpg",
"Toyota":"/media/237546/28.jpg",
"Volvo":"/media/237549/29.jpg",
"Volkswagen":"/media/237552/30.jpg",
}
def vendorsVoc(context):
terms = []
site = getSite()
vendors = [v.__name__ for v in site.values() if interfaces.IVendor.providedBy(v)]
for v in vendors:
term = SimpleVocabulary.createTerm(v,v,v)
terms.append(term)
return SimpleVocabulary(terms)
colors = [(1,u"белый"), (2,u"красный")]
def colorsVoc(context):
return SimpleVocabulary([SimpleVocabulary.createTerm(t[0],t[0],t[1]) for t in colors])
models = [u"x3", u"x5"]
def modelsVoc(context):
terms = []
site = getSite()
vendors = [v for v in site.values() if interfaces.IVendor.providedBy(v)]
for v in vendors:
for m in v.models:
term = SimpleVocabulary.createTerm(m,v.__name__ + m,m)
terms.append(term)
return SimpleVocabulary(terms)
class VendorListViewlet(object):
def update(self):
site = getSite()
self.vendors = [{'counter':v.counter,'image':'++resource++at/' + v.logoResource} for v in site.values() if interfaces.IVendor.providedBy(v)]
def render(self):
return ViewPageTemplateFile("vendorlist.pt").__call__(self)
|
'''Write a function bestSum(targetSum, numbers) that takes in a targetSum and an array of numbers as arguments
The function should return an array containing the shortest combination of numbers that add up to exactly the targetSum
If there is any tie for the shortest combination, you may return any one of the shortest'''
def bestSum(targetSum, numbers):
if targetSum == 0:
return []
if targetSum < 0:
return None
shortestCombination = None
for num in numbers:
remainder = targetSum - num
remainderCombination = bestSum(remainder, numbers)
if remainderCombination != None:
combination = remainderCombination + [num]
if (shortestCombination == None or len(combination) < len(shortestCombination)):
shortestCombination = combination
return shortestCombination
print(bestSum(7, [2,3,4,7]))
print(bestSum(8, [2,3,5]))
print(bestSum(8, [1,4,5]))
'''m = targetSum ; n = len(numbers)
time complexity: O(n^m * m)
space complexity: O(m*m) = O(m^2)'''
#Memoized solution
def bestSum2(targetSum, numbers, memo = None):
if memo is None:
memo = {}
if targetSum in memo:
return memo[targetSum]
if targetSum == 0:
return []
if targetSum < 0:
return None
shortestCombination = None
for num in numbers:
remainder = targetSum - num
remainderCombination = bestSum2(remainder, numbers, memo)
if remainderCombination is not None:
combination = remainderCombination + [num]
if shortestCombination is None or len(combination) < len(shortestCombination):
shortestCombination = combination
memo[targetSum] = shortestCombination
return memo[targetSum]
print(bestSum2(7, [2,3,4,7]))
print(bestSum2(8, [2,3,5]))
print(bestSum2(8, [1,4,5]))
#print(bestSum2(100, [1,2,5,25]))
# Tabulation
def bestSumt(targetSum, numbers):
table = [None] * (targetSum + 1)
table[0] = []
for i in range(targetSum + 1):
if table[i] is not None:
for num in numbers:
if i+num <= targetSum:
combination = table[i] + [num]
if not(table[i+num]) or len(table[i+num]) > len(combination):
table[i+num] = combination
return table[targetSum]
print(bestSumt(8, [2,3,5]))
print(bestSumt(8, [1,4,5]))
print(bestSumt(100, [25,1,2,4]))
'''
Time Complexity : O(m^2 * n)
Space Complexity: O(m^2)
''' |
"""
Django settings for pympm project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv('SECRET_KEY', '*3@=r_c6xobh80utpje=$&-v-u@)ofb56#cg(8^@l-jd-q0-u#')
# SECURITY WARNING: don't run with debug turned on in production!
STATIC_ROOT = os.path.join(BASE_DIR,'static/')
DEBUG = False
STATIC_URL = "https://static.musicasparamissa.com.br/"
# STATIC_URL = "/static/"
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'storages',
'apps.mpm',
'corsheaders',
)
MIDDLEWARE_CLASSES = (
'django.middleware.gzip.GZipMiddleware',
'htmlmin.middleware.HtmlMinifyMiddleware',
'htmlmin.middleware.MarkRequestMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'corsheaders.middleware.CorsMiddleware',
# 'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'pympm.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
],
},
}
]
# SECURE_SSL_REDIRECT = True
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CORS_ORIGIN_WHITELIST = (
'musicasparamissa.com.br',
'http://musicasparamissa.com.br',
'https://musicasparamissa.com.br',
'blog.musicasparamissa.com.br',
'http://blog.musicasparamissa.com.br',
'https://blog.musicasparamissa.com.br',
'localhost:8000',
'http://localhost:8000',
'localhost:1337',
'http://localhost:1337',
)
WSGI_APPLICATION = 'pympm.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.getenv('DB_NAME', 'pympm'),
'USER': os.getenv('DB_USER', 'pympm'),
'PASSWORD': os.getenv('DB_PASSWORD', 'pympm'),
'HOST': os.getenv('DB_HOST', 'localhost'),
'PORT': os.getenv('DB_PORT', '3306'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
SITE_ID = 1
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'handlers': {
# 'console': {
# 'class': 'logging.StreamHandler',
# },
# },
# 'loggers': {
# 'django': {
# 'handlers': ['console'],
# 'level': 'DEBUG',
# 'propagate': True,
# },
# },
# }
|
from flask import Flask, json, jsonify ,render_template
from flask import request
import requests
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
order_counter=1
catalog_counter=1
#front end tier will send requests to order server and catalog server
cache_size = 5
id_count = {}
#initial app
app = Flask(__name__)
#Database
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///frontEndCache.sqlite'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
#init database
db = SQLAlchemy(app)
#init marshmallow
ma = Marshmallow(app)
#catalog Class/Model
class Catalog(db.Model):
id = db.Column(db.Integer,primary_key=True)
title = db.Column(db.String(200))
quantity = db.Column(db.Integer)
price = db.Column(db.Float)
topic = db.Column(db.String(200))
def __init__(self,id,title,quantity,price,topic):
self.id=id
self.title=title
self.quantity=quantity
self.price=price
self.topic=topic
#Catalog schema
class CatalogSchema(ma.Schema):
class Meta:
fields = ('id', 'title' , 'quantity' , 'price' , 'topic')
db.create_all()
#init schema
book_schema = CatalogSchema()
books_schema = CatalogSchema(many=True)
#request to get all of the books information # it is sent to the catalog server
@app.route('/bazar/info/all', methods=['GET'])
def info():
global catalog_counter
if catalog_counter == 1:
r = requests.get("http://192.168.1.202:2000/bazar/info/all")
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.get("http://192.168.1.202:3000/bazar/info/all")
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.get("http://192.168.1.202:4000/bazar/info/all")
catalog_counter = 1
return (r.content)
#request to catalog to get info about book with the id book_id
@app.route('/bazar/info/<int:book_id>', methods=['GET'])
def get_info(book_id):
book_id = book_id
#this is the request to be sent to the catalog server
global catalog_counter
global cache_size
global id_count
book = Catalog.query.with_entities(Catalog.title,Catalog.quantity,Catalog.topic,Catalog.price).filter_by(id = book_id).first()
print("enter function and get books from db")
if book:
print("if found in db")
id_count[book_id] = id_count[book_id] + 1
return book_schema.jsonify(book)
else:
print("if not found in db")
if catalog_counter == 1:
r = requests.get("http://192.168.1.202:2000/bazar/info/"+str(book_id))
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.get("http://192.168.1.202:3000/bazar/info/"+str(book_id))
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.get("http://192.168.1.202:4000/bazar/info/"+str(book_id))
catalog_counter = 1
res = r.json()
c = Catalog(book_id, res['title'], res['quantity'] , res['price'] , res['topic'])
if cache_size > 0 :
print("if there is a space for book in the cache")
db.session.add(c)
db.session.commit()
id_count[book_id] = 1
cache_size=cache_size-1
elif cache_size <= 0 :
print("if there is not a space for book in the cache")
min = 100000000
k = -1
for key in id_count :
if min > id_count[key]:
min = id_count[key]
k = key
break
del id_count[k]
id_count[book_id] = 1
Catalog.query.filter_by(id=k).delete()
print("delete from db")
db.session.add(c)
db.session.commit()
return (r.content)
#getting the books info which have the topic s_topic #request to catalogServer
@app.route('/bazar/search/<s_topic>', methods=['GET'])
def search(s_topic):
s_topic = s_topic
topic_1="distributed systems"
topic_2="undergraduate school"
topic_3="new"
global cache_size
global id_count
global catalog_counter
books = Catalog.query.with_entities(Catalog.id,Catalog.title,Catalog.quantity,Catalog.topic,Catalog.price).filter_by(topic=s_topic.replace("%20"," ")).all()
result =jsonify(books_schema.dump(books))
num = len(books)
print(num)
################################################### topic 1
if (books)and(s_topic==topic_1)and(num == 1):
b = books[0]
Catalog.query.filter_by(id=b.id).delete()
print("delete from db")
db.session.commit()
if catalog_counter == 1:
r = requests.get("http://192.168.1.202:2000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.get("http://192.168.1.202:3000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.get("http://192.168.1.202:4000/bazar/search/"+str(s_topic))
catalog_counter = 1
res=r.json()
book_1 = Catalog(res[0]['id'], res[0]['title'], res[0]['quantity'] , res[0]['price'] , res[0]['topic'])
book_2 = Catalog(res[1]['id'], res[1]['title'], res[1]['quantity'] , res[1]['price'] , res[1]['topic'])
if cache_size-2 > 0 :
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
cache_size -= 2
elif cache_size-2 <= 0 :
min1 = 100000000
min2 = 100000000
k1 = -1
k2 = -1
for key in id_count :
if min1 > id_count[key]:
min1 = id_count[key]
k1 = key
del id_count[key]
break
for key in id_count :
if min2 > id_count[key]:
min2 = id_count[key]
k2 = key
del id_count[key]
break
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
Catalog.query.filter_by(id=k1).delete()
print("delete from db")
db.session.commit()
Catalog.query.filter_by(id=k2).delete()
print("delete from db")
db.session.commit()
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
return r.content
elif (books)and(s_topic==topic_1)and(num == 2):
id_count[books[0].id]+=1
id_count[books[1].id]+=1
return result
################################################# topic 2
if (books)and(s_topic==topic_2)and(num == 1):
b = books[0]
Catalog.query.filter_by(id=b.id).delete()
print("delete from db")
db.session.commit()
if catalog_counter == 1:
r = requests.get("http://192.168.1.202:2000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.get("http://192.168.1.202:3000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.get("http://192.168.1.202:4000/bazar/search/"+str(s_topic))
catalog_counter = 1
res=r.json()
book_1 = Catalog(res[0]['id'], res[0]['title'], res[0]['quantity'] , res[0]['price'] , res[0]['topic'])
book_2 = Catalog(res[1]['id'], res[1]['title'], res[1]['quantity'] , res[1]['price'] , res[1]['topic'])
if cache_size-2 > 0 :
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
cache_size -= 2
elif cache_size-2 <= 0 :
min1 = 100000000
min2 = 100000000
k1 = -1
k2 = -1
for key in id_count :
if min1 > id_count[key]:
min1 = id_count[key]
k1 = key
del id_count[key]
break
for key in id_count :
if min2 > id_count[key]:
min2 = id_count[key]
k2 = key
del id_count[key]
break
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
Catalog.query.filter_by(id=k1).delete()
print("delete from db")
db.session.commit()
Catalog.query.filter_by(id=k2).delete()
print("delete from db")
db.session.commit()
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
return r.content
elif (books)and(s_topic==topic_2)and(num == 2):
id_count[books[0].id] += 1
id_count[books[1].id] += 1
return result
################################################# topic 3
if (books)and(s_topic==topic_3)and((num == 1)or(num==2)):
if(num==1):
b = books[0]
Catalog.query.filter_by(id=b.id).delete()
print("delete from db")
db.session.commit()
elif(num==2):
b = books[0]
c = books[1]
Catalog.query.filter_by(id=b.id).delete()
print("delete from db")
db.session.commit()
Catalog.query.filter_by(id=c.id).delete()
print("delete from db")
db.session.commit()
if catalog_counter == 1:
r = requests.get("http://192.168.1.202:2000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.get("http://192.168.1.202:3000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.get("http://192.168.1.202:4000/bazar/search/"+str(s_topic))
catalog_counter = 1
res=r.json()
book_1 = Catalog(res[0]['id'], res[0]['title'], res[0]['quantity'] , res[0]['price'] , res[0]['topic'])
book_2 = Catalog(res[1]['id'], res[1]['title'], res[1]['quantity'] , res[1]['price'] , res[1]['topic'])
book_3 = Catalog(res[2]['id'], res[2]['title'], res[2]['quantity'] , res[2]['price'] , res[2]['topic'])
if cache_size-3 > 0 :
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
db.session.add(book_3)
db.session.commit()
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
id_count[res[2]['id']] = 1
cache_size -= 3
elif cache_size-3 <= 0 :
min1 = 100000000
min2 = 100000000
min3 = 100000000
k1 = -1
k2 = -1
k3 = -1
for key in id_count :
if min1 > id_count[key]:
min1 = id_count[key]
k1 = key
del id_count[k1]
break
for key in id_count :
if min2 > id_count[key]:
min2 = id_count[key]
k2 = key
del id_count[k2]
break
for key in id_count :
if min3 > id_count[key]:
min3 = id_count[key]
k3 = key
del id_count[k3]
break
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
id_count[res[2]['id']] = 1
Catalog.query.filter_by(id=k1).delete()
print("delete from db")
db.session.commit()
Catalog.query.filter_by(id=k2).delete()
print("delete from db")
db.session.commit()
Catalog.query.filter_by(id=k3).delete()
print("delete from db")
db.session.commit()
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
db.session.add(book_3)
db.session.commit()
return r.content
elif (books)and(s_topic==topic_3)and(num == 3):
id_count[books[0].id]+=1
id_count[books[1].id]+=1
id_count[books[2].id]+=1
return result
###########################################
if not(books):
if catalog_counter == 1:
r = requests.get("http://192.168.1.202:2000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.get("http://192.168.1.202:3000/bazar/search/"+str(s_topic))
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.get("http://192.168.1.202:4000/bazar/search/"+str(s_topic))
catalog_counter = 1
res=r.json()
num=len(res)
if (num ==2):
book_1 = Catalog(res[0]['id'], res[0]['title'], res[0]['quantity'] , res[0]['price'] , res[0]['topic'])
book_2 = Catalog(res[1]['id'], res[1]['title'], res[1]['quantity'] , res[1]['price'] , res[1]['topic'])
if cache_size-2 > 0 :
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
cache_size -= 2
elif cache_size-2 <= 0 :
min1 = 100000000
min2 = 100000000
k1 = -1
k2 = -1
for key in id_count :
if min1 > id_count[key]:
min1 = id_count[key]
k1 = key
del id_count[key]
break
for key in id_count :
if min2 > id_count[key]:
min2 = id_count[key]
k2 = key
del id_count[key]
break
Catalog.query.filter_by(id=k1).delete()
Catalog.query.filter_by(id=k2).delete()
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
elif (num==3):
book_1 = Catalog(res[0]['id'], res[0]['title'], res[0]['quantity'] , res[0]['price'] , res[0]['topic'])
book_2 = Catalog(res[1]['id'], res[1]['title'], res[1]['quantity'] , res[1]['price'] , res[1]['topic'])
book_3 = Catalog(res[2]['id'], res[2]['title'], res[2]['quantity'] , res[2]['price'] , res[2]['topic'])
if cache_size-3 > 0 :
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
db.session.add(book_3)
db.session.commit()
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
id_count[res[2]['id']] = 1
cache_size -= 3
elif cache_size-3 <= 0 :
min1 = 100000000
min2 = 100000000
min3 = 100000000
k1 = -1
k2 = -1
k3 = -1
for key in id_count :
if min1 > id_count[key]:
min1 = id_count[key]
k1 = key
del id_count[key]
break
for key in id_count :
if min2 > id_count[key]:
min2 = id_count[key]
k2 = key
del id_count[key]
break
for key in id_count :
if min3 > id_count[key]:
min3 = id_count[key]
k3 = key
del id_count[key]
break
Catalog.query.filter_by(id=k1).delete()
Catalog.query.filter_by(id=k2).delete()
Catalog.query.filter_by(id=k3).delete()
id_count[res[0]['id']] = 1
id_count[res[1]['id']] = 1
id_count[res[2]['id']] = 1
db.session.add(book_1)
db.session.commit()
db.session.add(book_2)
db.session.commit()
db.session.add(book_3)
db.session.commit()
print (r.content)
return r.content
#purchase to order server, there is a parameter called amount can be send with the request body to
#specify how many books to purchase it will 1 by default if there is no body sent with the request
@app.route('/bazar/purchase/<int:book_id>', methods=['POST'])
def purchase(book_id):
book_id = book_id
if request.data:#if there is an amount sent with the request`s body
amount=request.json['amount']
else :
amount=1 #the default value is one
#this is the reqest to be sent to the order server
global catalog_counter
if catalog_counter == 1:
#r = requests.post("http://192.168.1.203:2000/bazar/purchase/"+str(book_id),data={'amount':amount})
r = requests.post("http://192.168.1.121:2000/bazar/purchase/"+str(book_id),data={'amount':amount})
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
#r = requests.post("http://192.168.1.203:3000/bazar/purchase/"+str(book_id),data={'amount':amount})
r = requests.post("http://192.168.1.121:3000/bazar/purchase/"+str(book_id),data={'amount':amount})
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
#r = requests.post("http://192.168.1.203:4000/bazar/purchase/"+str(book_id),data={'amount':amount})
r = requests.post("http://192.168.1.121:6000/bazar/purchase/"+str(book_id),data={'amount':amount})
catalog_counter = 1
return (r.content)
############################################################# for admin ######################################
#the following requests is sent form the admin of the book store
#update the price of a book
@app.route('/bazar/update_price/<int:book_id>', methods=['PUT'])
def update_book_price(book_id):
book_id = book_id
price = request.json['price']
global catalog_counter
if catalog_counter == 1:
r = requests.put("http://192.168.1.202:2000/bazar/update_price/"+str(book_id),data={'price':price})
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.put("http://192.168.1.202:3000/bazar/update_price/"+str(book_id),data={'price':price})
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.put("http://192.168.1.202:4000/bazar/update_price/"+str(book_id),data={'price':price})
catalog_counter = 1
return (r.content)
#increase quantity
@app.route('/bazar/increase_quantity/<int:book_id>', methods=['PUT'])
def increase_book_quantity(book_id):
book_id = book_id
amount = request.json['amount']
global catalog_counter
if catalog_counter == 1:
r = requests.put("http://192.168.1.202:2000/bazar/increase_quantity/"+str(book_id),data={'amount':amount})
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.put("http://192.168.1.202:3000/bazar/increase_quantity/"+str(book_id),data={'amount':amount})
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.put("http://192.168.1.202:4000/bazar/increase_quantity/"+str(book_id),data={'amount':amount})
catalog_counter = 1
return (r.content)
#decrease quantity
@app.route('/bazar/decrease_quantity/<int:book_id>', methods=['PUT'])
def decrease_book_quantity(book_id):
book_id = book_id
amount = request.json['amount']
global catalog_counter
if catalog_counter == 1:
r = requests.put("http://192.168.1.202:2000/bazar/decrease_quantity/"+str(book_id),data={'amount':amount})
catalog_counter = catalog_counter + 1
elif catalog_counter == 2:
r = requests.put("http://192.168.1.202:3000/bazar/decrease_quantity/"+str(book_id),data={'amount':amount})
catalog_counter = catalog_counter + 1
elif catalog_counter == 3:
r = requests.put("http://192.168.1.202:4000/bazar/decrease_quantity/"+str(book_id),data={'amount':amount})
catalog_counter = 1
return (r.content)
@app.route('/bazar/delete/<int:book_id>', methods=['DELETE'])
def delete(book_id):
book_id = book_id
global order_counter
dc = Catalog.query.get(book_id)
if dc:
db.session.delete(dc)
db.session.commit()
id_count[book_id] = 0
return jsonify({'msg':"done"})
else:
return jsonify({'msg':"not found"})
#to show the orders list
@app.route('/bazar/order/show', methods=['GET'])
def show():
global order_counter
if order_counter == 1:
r = requests.get("http://192.168.1.121:2000/show")
order_counter = order_counter + 1
elif order_counter == 2:
r = requests.get("http://192.168.1.121:3000/show")
order_counter = order_counter + 1
elif order_counter == 3:
r = requests.get("http://192.168.1.121:6000/show")
order_counter = 1
return (r.content)
#catalog= 202
#order = 203
#run
if __name__=="__main__":
app.run(debug=True) |
# coding: UTF-8
__author__ = 'Steeve'
__version__ = '1.0.0'
import os
import re
import argparse
from beautifultable import BeautifulTable as btft
class GenericAplication():
def __init__(self, arquivo, namearq, display ,binaryfile ,textfile):
self.arquivo = arquivo
self.namearq = namearq
self.display = display
self.binaryfile = binaryfile
self.textfile = textfile
# Funcao para pegar tudo que for depois dos dois pontos oseja os rotulos
def __splitGetData(self, text, by, index):
if not type(text) == str and hasattr(text, 'text'):
text = text.text
if type(text) == str:
if type(by) == list:
for b in by[1:]:
text = text.replace(b, by[0])
by = by[0]
if index < 0:
index = len(text.split(by))-1
return text.split(by)[index]
elif type(text) == list:
lst = []
for x in text:
lst.append(self.__splitGetData(x, by, index))
return lst
return text
# Função para retirar os rotulos
def __getRotulo(self, name, objRot):
for rot in objRot:
if name == rot['rotulo']:
return rot
return 'No Existe este rotulo'
# Função para tirar os byte
def __getByte(self, end, obj):
for b in obj:
if b['end'] == end:
return True
return False
def __conjInstrucoes(self, name):
conjinstru = [{
'opcode' : '00',
'name': 'NOP',
'tamanho': 1
},{
'opcode' : '10',
'name': 'LDR',
'tamanho': 2
},{
'opcode' : '20',
'name': 'STR',
'tamanho': 2
},{
'opcode' : '30',
'name': 'ADD',
'tamanho': 2
},{
'opcode' : '40',
'name': 'SUB',
'tamanho': 2
},{
'opcode' : '50',
'name': 'MUL',
'tamanho': 2
},{
'opcode' : '60',
'name': 'DIV',
'tamanho': 2
},{
'opcode' : '70',
'name': 'NOT',
'tamanho': 1
},{
'opcode' : '80',
'name': 'AND',
'tamanho': 2
},{
'opcode' : '90',
'name': 'OR',
'tamanho': 2
},{
'opcode' : 'A0',
'name': 'XOR',
'tamanho': 2
},{
'opcode' : 'B0',
'name': 'JMP',
'tamanho': 2
},{
'opcode' : 'C0',
'name': 'JEQ',
'tamanho': 2
},{
'opcode' : 'D0',
'name': 'JG',
'tamanho': 2
},{
'opcode' : 'E0',
'name': 'JL',
'tamanho': 2
},{
'opcode' : 'F0',
'name': 'HLT',
'tamanho': 1
},{
'name': 'byte',
'tamanho': 1
}]
for inst in conjinstru:
if name == inst['name']:
return inst
return None
def __verificaRotuloExist(self, obj, name):
for rot in obj:
if rot['rotulo'] == name:
return True
return False
def __verificaInstrucoes(self, obj, name):
'''
procura as intruções no objeto da tabela de instruções
'''
for rot in obj:
if rot['name'] == name:
return True
return False
def __getJustInstructions(self, lines):
'''
Funcoa para pegar soamente as instruções para seu usa da saida em un arquivo binario
'''
progComple = []
for l in lines:
for g in l.split():
result = re.search(':', g)
if (result == None) and (g != 'text' and g != 'byte' and g != 'data'):
#indica que nao e un rotulo e se nao for nem un campo de texto deixando asim
# soamente as instrucoes e os valores das variaveis ou operando
progComple.append(g)
return progComple
def programaBinaria(self):
'''
Função com o programa principal para monstar os arquivos
binarios as saidas e os arquivos texto
'''
simbolData = []
programBinary = []
endAtual = 0
endAtualFinal = 0
rotuloRedefinido = False
# Faca a leitura do arquivo .asm
fileData = open(self.arquivo, 'r')
lines = fileData.readlines()
for x in range(2):
if x==0: # Primeira pasagem pegando os rotulos e o endereço da memoria
for l in lines:
for g in l.split():
# For que vai pegar tudo que for rotulo para executar o programa.
result = re.search(':', g) # Tudo que for antes dos dois pontos são rotulos
if result != None:
rotu = self.__splitGetData(l,':',0)
if len(simbolData) > 0:
if self.__verificaRotuloExist(simbolData, rotu):
rotuloRedefinido = self.__verificaRotuloExist(simbolData, rotu)
simbolData.append({
"rotulo" : rotu,
"end" : endAtual
})
if self.__conjInstrucoes(g.strip()) != None: #
if g != 'text':
endAtual += self.__conjInstrucoes(g.strip())['tamanho']
if g == 'data':
endAtual = 128
else: # Segunda pasagem pasagem pegando os rotulos e o endereço da memoria
if rotuloRedefinido:
print('*****************************')
print(' ERROR!')
print(' Rótulo "'+rotu+'" redefinido')
print('*****************************')
else:
endAtual = 0
# Primeiro verifica se as intrucoes estao certo e as operações estao correto
instrucoesvalida = True
achouText = False
achouData = False
achouByte = False
for l in lines:# Se a o rotulo nao e nem text e nem data vai dar um erro e se tiver instruções antes tambem vai dar erro
for g in l.split(): # Se a Instruções nao existe vai dar un erro e se existe não vai dar erro no programa
# Oseja vai dar uma exeção baseandose em que o comando nao existe
if l.split() != 'text' and g != 'byte' and g != 'data':
self.__conjInstrucoes(g.strip())
if l.split()[0] == 'text':
achouText = True
if len(l.split()) > 1:
instrucoesvalida = False
print('*****************************')
print(' ERROR!')
print(' Operando "'+l.split()[1]+'" inválido nesta instrução ')
print('*****************************')
elif l.split()[0] == 'data':
achouData = True
if len(l.split()) > 1:
instrucoesvalida = False
print('*****************************')
print(' ERROR!')
print(' Operando "'+l.split()[1]+'" inválido nesta instrução')
print('*****************************')
# se o tamanho da leitura e maior que 2 ouseja na linha ten 3 informacoes
# posivelmente data ten que existir
elif len(l.split()) > 2:
if l.split()[1] == 'byte' and achouByte == False:
achouByte = True
if achouText == False:
instrucoesvalida = False
print('*****************************')
print(' ERROR!')
print(' Nome "text" não foi definido ')
print('*****************************')
if achouData == False:
instrucoesvalida = False
print('*****************************')
print(' ERROR!')
print(' Nome "data" não foi definido ')
print('*****************************')
if achouByte == False:
instrucoesvalida = False
print('*****************************')
print(' ERROR!')
print(' Nome "byte" não foi definido ')
print('*****************************')
# Complete output binary file
if (self.binaryfile) and self.namearq:
programBinaryCompl = [] # Objeto que vai almacenar o programa completo com tudo os zeros para fazer a saida binaria
afterHlt = [] # Almacena os resultado depois do camndo HLT pois e o comando que indica que o programa finalizo.
count = 0
hltprogram = None
if len(self.__getJustInstructions(lines)) > 0:
for i in range(0, 258): # For que fara a inserção das 256 linha ou byte de saida no arquivo
if i <= (len(self.__getJustInstructions(lines))-1): # Verifica se a quantidade de endereco atual para pegar o indice do endereco no objeto para evitar erro desnecesario de programacao
justInstruc = self.__getJustInstructions(lines)[i]
if (self.__conjInstrucoes(justInstruc) != None) and (hltprogram == None): # verifica se e uma instrução da tabela de instrucoes se for ele almacena o objeto de uma maneira diferente
binary = '{:0>8}'.format(bin(int(self.__conjInstrucoes(justInstruc)['opcode'], 16))[2:])
decimalB = int(str(binary), 2)
programBinaryCompl.append(decimalB)
elif (i < 128 and self.__conjInstrucoes(justInstruc) == None) and (hltprogram == None):
endRot = self.__getRotulo(justInstruc, simbolData)['end']
binary = '{:0>8}'.format(bin(endRot)[2:])
decimalB = int(str(binary), 2)
programBinaryCompl.append(decimalB)
if justInstruc == 'HLT':
hltprogram = justInstruc
elif hltprogram != None:
afterHlt.append(justInstruc)
else:
if i >= 128 and len(afterHlt)-1 >= count:
binary = '{:0>8}'.format(bin(int(afterHlt[count]))[2:])
decimalB = int(str(binary), 2)
programBinaryCompl.append(decimalB)
count += 1
else:
binary = '{:0>8}'.format(bin(int(0))[2:])
decimalB = int(str(binary), 2)
programBinaryCompl.append(decimalB)
else:
if instrucoesvalida:
for l in lines:
# Monta a tabela de instruções que vai printar no terminal e de como foi trabalhado em sala de aula
for g in l.split():
result = re.search(':', g)
if result == None:
if g != 'text' and g != 'byte' and g != 'data':
if self.__conjInstrucoes(g.strip()) != None:
programBinary.append({
'end' : endAtual,
'conteudo' : '{:0>8}'.format(bin(int(self.__conjInstrucoes(g.strip())['opcode'], 16))[2:])
})
elif endAtual < 128 and self.__conjInstrucoes(g.strip()) == None:
endRot = self.__getRotulo(g.strip(),simbolData)['end']
# print(endAtual,' Operando ',endRot)
programBinary.append({
'end' : endAtual,
'conteudo' : '{:0>8}'.format(bin(endRot)[2:])
})
else:
# print(endAtual,' Intrucoes ',g)
programBinary.append({
'end' : endAtual,
'conteudo' : '{:0>8}'.format(bin(int(g))[2:])
})
endAtualFinal = endAtual;
if g != 'text' and g != 'byte':
endAtual += 1
if g == 'data':
endAtual = 128
# Saida no terminal
if self.display and instrucoesvalida:
# para montar a tabela de instruções e fazer a saida binaria
tableSimbolData = btft()
tableProgramBinary = btft()
contentTable = btft()
tableSimbolData.column_headers = ["Rotulo", "Endereço"]
for sd in simbolData:
tableSimbolData.append_row([sd['rotulo'], sd['end']])
tableProgramBinary.column_headers = ['Endereço', 'Conteudo']
for pb in programBinary:
content = '- '+pb['conteudo']+' -'
tableProgramBinary.append_row([pb['end'],content])
contentTable.append_row(["Tabela de Simbolo", "Programa Traduzido"])
contentTable.append_row([tableSimbolData, tableProgramBinary])
contentTable.set_style(btft.STYLE_BOX_DOUBLED)
tableProgramBinary.set_style(btft.STYLE_BOX_DOUBLED)
tableSimbolData.set_style(btft.STYLE_BOX_DOUBLED)
print(contentTable)
# Saida no arquivo text
if self.textfile and self.namearq and instrucoesvalida:
tableSimbolData = btft()
tableProgramBinary = btft()
contentTable = btft()
tableSimbolData.column_headers = ["Rotulo", "Endereço"]
for sd in simbolData:
tableSimbolData.append_row([sd['rotulo'], sd['end']])
tableProgramBinary.column_headers = ['Endereço', 'Conteudo']
for pb in programBinary:
content = '- '+pb['conteudo']+' -'
tableProgramBinary.append_row([pb['end'],content])
contentTable.append_row(["Tabela de Simbolo", "Programa Traduzido"])
contentTable.append_row([tableSimbolData, tableProgramBinary])
contentTable.set_style(btft.STYLE_BOX_DOUBLED)
tableProgramBinary.set_style(btft.STYLE_BOX_DOUBLED)
tableSimbolData.set_style(btft.STYLE_BOX_DOUBLED)
file = open(self.namearq+'.txt', "w")
file.write(str(contentTable))
file.close()
# Saida do arquivo binario
if self.binaryfile and self.namearq and instrucoesvalida:
# make file
newFile = open(self.namearq+'.bin', "wb")
newFileByteArray = bytearray(programBinaryCompl)
# write to file
newFile.write(newFileByteArray)
if __name__=='__main__':
parser = argparse.ArgumentParser(prog='PROG', usage='%(prog)s [options]')
parser.add_argument('-a', required=True, help='Caminho ou nome do arquivo .asm')
parser.add_argument('-o', required=True, help='Nome de Saída para o arquivo .bin ou txt')
parser.add_argument('-v', required=False, action='store_true', help='Visualizar o resultado no terminal')
parser.add_argument('-b', required=False, action='store_true', help='Tipo de saída en un arquivo binario')
parser.add_argument('-t', required=False, action='store_true', help='Tipo de saída en un arquivo txt')
args = parser.parse_args()
GenericAplication(arquivo=args.a, namearq=args.o, display=args.v, binaryfile=args.b, textfile=args.t).programaBinaria()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import shutil
DOC_DIR = "."
AMAO_DIR = "../AMAO"
def generate_rst_for_app(app,excludes=None):
"Cria os arquivos .rst para a app em questao usando o sphinx-apidoc"
command_template = "sphinx-apidoc -f -o %(APP_DOC_DIR)s %(APP)s %(EXCLUDES)s"
app_doc_dir = os.path.abspath(os.path.join(DOC_DIR,app))
abs_app_dir = os.path.abspath(os.path.join(AMAO_DIR,app))
abs_exclude = [os.path.abspath(os.path.join(AMAO_DIR,d)) for d in excludes]
excludes_str = " ".join(abs_exclude)
command = command_template % { 'APP_DOC_DIR' : app_doc_dir, 'APP' : abs_app_dir, 'EXCLUDES' : excludes_str}
print command
os.system(command)
def clean_rsts():
"limpa todos os rsts menos o index.rst"
shutil.rmtree('apps',ignore_errors=True)
shutil.rmtree('libs',ignore_errors=True)
shutil.rmtree('settings',ignore_errors=True)
pass
def generate_rsts():
"Gera todos os rsts necessarios para a doc"
generate_rst_for_app(
'apps/Aluno',
excludes=[
# 'apps/Avaliacao/Questao',
]
)
generate_rst_for_app(
'apps/Avaliacao',
excludes=[
]
)
generate_rst_for_app(
'apps/Core',
excludes=[
]
)
generate_rst_for_app(
'apps/Corretor',
excludes=[
]
)
generate_rst_for_app(
'apps/Materia',
excludes=[
]
)
generate_rst_for_app(
'apps/Professor',
excludes=[
]
)
generate_rst_for_app(
'libs/abs_models',
excludes=[
]
)
generate_rst_for_app(
'libs/context_processors',
excludes=[
]
)
generate_rst_for_app(
'libs/htmlt_boilerplate',
excludes=[
]
)
generate_rst_for_app(
'libs/test_utils',
excludes=[
]
)
generate_rst_for_app(
'libs/user_backends',
excludes=[
]
)
def clean_docs():
command = "make clean"
print command
os.system(command)
def generate_html():
print "Generate HTML"
clean_docs()
command = "make html"
print command
os.system(command)
if __name__ == '__main__':
doc_type = sys.argv[1] if len(sys.argv) > 1 else "html"
gen = globals()['generate_'+doc_type]
clean_rsts()
generate_rsts()
gen()
|
# Generated by Django 2.2.2 on 2019-07-06 09:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('task', '0003_auto_20190619_1247'),
]
operations = [
migrations.AlterField(
model_name='task',
name='contribution',
field=models.FloatField(default=0.0),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.