content
stringlengths 5
1.05M
|
|---|
#
# @lc app=leetcode id=355 lang=python
#
# [355] Design Twitter
#
# https://leetcode.com/problems/design-twitter/description/
#
# algorithms
# Medium (26.90%)
# Total Accepted: 32.4K
# Total Submissions: 120.3K
# Testcase Example: '["Twitter","postTweet","getNewsFeed","follow","postTweet","getNewsFeed","unfollow","getNewsFeed"]\n[[],[1,5],[1],[1,2],[2,6],[1],[1,2],[1]]'
#
# Design a simplified version of Twitter where users can post tweets,
# follow/unfollow another user and is able to see the 10 most recent tweets in
# the user's news feed. Your design should support the following methods:
#
#
#
# postTweet(userId, tweetId): Compose a new tweet.
# getNewsFeed(userId): Retrieve the 10 most recent tweet ids in the user's news
# feed. Each item in the news feed must be posted by users who the user
# followed or by the user herself. Tweets must be ordered from most recent to
# least recent.
# follow(followerId, followeeId): Follower follows a followee.
# unfollow(followerId, followeeId): Follower unfollows a followee.
#
#
#
# Example:
#
# Twitter twitter = new Twitter();
#
# // User 1 posts a new tweet (id = 5).
# twitter.postTweet(1, 5);
#
# // User 1's news feed should return a list with 1 tweet id -> [5].
# twitter.getNewsFeed(1);
#
# // User 1 follows user 2.
# twitter.follow(1, 2);
#
# // User 2 posts a new tweet (id = 6).
# twitter.postTweet(2, 6);
#
# // User 1's news feed should return a list with 2 tweet ids -> [6, 5].
# // Tweet id 6 should precede tweet id 5 because it is posted after tweet id
# 5.
# twitter.getNewsFeed(1);
#
# // User 1 unfollows user 2.
# twitter.unfollow(1, 2);
#
# // User 1's news feed should return a list with 1 tweet id -> [5],
# // since user 1 is no longer following user 2.
# twitter.getNewsFeed(1);
#
#
#
import time
from collections import defaultdict
class Poster(object):
def __init__(self, user_id, poster_id):
self.user_id = user_id
self.poster_id = poster_id
class Twitter(object):
def __init__(self):
"""
Initialize your data structure here.
"""
# 用户表
self.user_map = []
# 关注表
self.follower_map = defaultdict(list)
# 文章表
self.twitter_map = []
def postTweet(self, userId, tweetId):
"""
Compose a new tweet.
:type userId: int
:type tweetId: int
:rtype: None
"""
self.twitter_map.append(Poster(userId, tweetId))
def getNewsFeed(self, userId):
"""
Retrieve the 10 most recent tweet ids in the user's news feed. Each item in the news feed must be posted by users who the user followed or by the user herself. Tweets must be ordered from most recent to least recent.
:type userId: int
:rtype: List[int]
"""
user_ids = [userId] + self.follower_map[userId]
res = []
for poster in self.twitter_map[::-1]:
if poster.user_id in user_ids:
res.append(poster.poster_id)
if len(res) == 10:
break
return res
def follow(self, followerId, followeeId):
"""
Follower follows a followee. If the operation is invalid, it should be a no-op.
:type followerId: int
:type followeeId: int
:rtype: None
"""
self.follower_map[followerId].append(followeeId)
def unfollow(self, followerId, followeeId):
"""
Follower unfollows a followee. If the operation is invalid, it should be a no-op.
:type followerId: int
:type followeeId: int
:rtype: None
"""
if followeeId in self.follower_map[followerId]:
self.follower_map[followerId].remove(followeeId)
# Your Twitter object will be instantiated and called as such:
# obj = Twitter()
# obj.postTweet(userId,tweetId)
# param_2 = obj.getNewsFeed(userId)
# obj.follow(followerId,followeeId)
# obj.unfollow(followerId,followeeId)
|
#!/usr/tce/bin/python
import sys
import random
if len(sys.argv) < 2 :
usage = '''
usage: add_overlap.py list_base_name number_of_lists overlap_percent
example: if your lists are t0_list.txt, t1_list.txt and t2_list.txt
you want 30 percent overlap you would run as:
add_overlap.py list.txt 2 30
The output lists names in this example would be:
t0_list.txt.overlap=30 (etc)
The output list will contain 30% more samples;
specifically, t0 will receive 15% of randomly selected
samples from t1 and t2.
The input lists are unchanged
The "excluded" counts in the output files are all set to -1,
because I haven't taken the time to get them correct.
I don't think these are used anyplace in lbann, so this should
be OK.
'''
print usage
exit(9)
#============================================================================
# the List class parses and encapsulate a sample list
class List :
# the constructor parses the sample list
def __init__(self, filename) :
self.filename = filename
a = open(filename)
self.first_line = a.readline()
assert(self.first_line.find('CONDUIT_HDF5_INCLUSION') != -1)
t = a.readline().split()
self.valid_samples = int(t[0])
self.invalid_samples = int(t[1])
self.num_files = int(t[2])
self.base_dir = a.readline()
self.samples = []
self.counts = {}
for line in a :
if len(line) > 2 :
t = line.split()
dir = t[0]
included = int(t[1])
excluded = int(t[2])
self.counts[dir] = included + excluded
for j in range(3, len(t)):
self.samples.append((dir, t[j]))
#returns a list that contains random samples
def get_random_samples(self, n) :
w = set()
while len(w) < n :
x = random.randint(0, len(self.samples)-1)
if x not in w :
w.add(x)
r = []
for x in w :
r.append(self.samples[x])
return r
def num_samples(self) :
return len(self.samples)
# add random samples from some other List to this List
def add_samples(self, samples) :
for x in samples :
self.samples.append(x)
# write final output (sample list file)
def write(self, overlap) :
out = open(self.filename + '.overlap=' + str(overlap), 'w')
out.write(self.first_line)
#build map: filename -> (included samples)
s = {}
for sample in self.samples :
if sample[0] not in s :
s[sample[0]] = set()
s[sample[0]].add(sample[1])
#write included_samples excluded_samples, num_files
out.write(str(len(self.samples)) + ' -1 ' + str(len(s)) + '\n')
out.write(self.base_dir)
#write the samples
for fn in s.keys() :
out.write(fn + ' ' + str(len(s[fn])) + ' -1 ')
for sample_id in s[fn] :
out.write(sample_id + ' ')
out.write('\n')
out.close()
#============================================================================
# parse cmd line
base = sys.argv[1]
count = int(sys.argv[2])
overlap = int(sys.argv[3])
the_lists = []
random_samples = []
for j in range(count) :
# instantiate a List object; this holds all information from a sample list
c = List('t' + str(j) + '_' + base)
the_lists.append(c)
# get the random samples from the list; this is the overlap that
# will be added to the other lists
n = c.num_samples()
p = int( (overlap / (count-1))* n / 100)
random_samples.append(c.get_random_samples(p))
# add overlap to the samples
for j in range(count) :
for k in range(count) :
if j != k :
the_lists[j].add_samples(random_samples[k])
# write output files
for x in the_lists :
x.write(overlap)
|
import config
class Snapp:
info = {
"genus": "",
"family": "",
"region": "",
"source": "",
"country": "",
"binomial": "",
"province": "",
"difficulty": "",
"photographer": "",
"path": ""
},
project_id: ''
@staticmethod
def create_tasks():
pass
|
# -*- coding: utf-8 -*-
"""
tasks.email
~~~~~~~~~~~
:author: Dave Caraway
:copyright: © 2014-2015, Fog Mine LLC
:license: Proprietary, see LICENSE for more details.
"""
from flask.ext.mail import Message
from ..framework.extensions import mail, celery
@celery.task()
def send_email(message):
mail.send(message)
@celery.task()
def send_message(subject, sender, recipients, text_body, html_body):
msg = Message(subject, sender = sender, recipients = recipients)
msg.body = text_body
msg.html = html_body
send_email.delay(msg)
|
# -*- coding:utf-8 -*-
from functools import reduce
from pyfunctor.functor import Functor
class Maybe(Functor):
'''Maybe(value) -> new Functor object
Example:
>>> f = lift(lambda x, y: x + y)
>>> run(f(Just('a'), Just('b')))
Just('ab')
>>> run(f(Just(0), Nothing))
Nothing
'''
def run(self):
return reduce(lambda x, f: f(x), self.fs, self)
@classmethod
def fmap(cls, f):
def _f(*args):
try:
if all(isinstance(x, Just) for x in args):
return Just(f(*(x.value for x in args)))
else:
return Nothing
except:
return Nothing
return _f
class Just(Maybe):
def __str__(self):
if self.fs == []:
x = self.value
s = "'%s'" % x if isinstance(x, str) else x
return 'Just(%s)' % s
else:
return 'Maybe(?)'
def __repr__(self):
if self.fs == []:
return self.__str__()
else:
return '<%s>' % self.__str__()
class Nothing(Maybe):
def __str__(self):
return 'Nothing'
def __repr__(self):
return self.__str__()
Nothing = Nothing(None)
|
#!/usr/bin/env python3
"""
Copyright (c) 2015 Jacob Martin
A command for sending TauNet Messages.
"""
import sys
import os
import socket
from subprocess import call
from taunet import *
args = sys.argv
# Helpful usage hints.
if len(args) != 3:
print("usage:")
print("\tpython3 client.py <username> <message>")
exit()
to_username = args[1]
message = args[2]
# Fetch the target host from the client table using the username argument.
try:
target_host = client_table.clients[to_username]
except KeyError:
print("That client does not exist in the table.")
exit()
# Construct the full message complete with headers.
full_message = "version: " + version + "\r\n"
full_message += "from: " + client_table.username + "\r\n"
full_message += "to: " + to_username + "\r\n\r\n"
full_message += message
# Write the message to a file.
with open("message", "w") as messageFile:
messageFile.write(full_message)
# Encrypt the message file then delete the original file.
call(["./cs2", "encrypt", client_table.key, "message", "encrypted"])
os.remove("message")
# Connect to the target.
target = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
target.connect((target_host, port))
# Send the encrypted file.
with open("encrypted", "rb") as encryptedFile:
target.send(encryptedFile.read(1024))
# Close the connection and delete the encrypted file.
target.close()
os.remove("encrypted")
|
"""
zhihu.items
~~~~~~~~~~~
This module implements the items for zhihu scraping.
:copyright: (c) 2017 by Jiale Xu.
:date: 2017/11/04.
:license: MIT License, see LICENSE.txt for more details.
"""
from lib.basis import SocialMediaItem, Sex
class ZhihuItem(SocialMediaItem):
pass
class ZhihuUserItem(ZhihuItem):
def __init__(self):
self._id = '' # 用户ID
self._name = '' # 用户名
self._sex = Sex.UNKNOWN # 性别
self._avatar = '' # 头像链接
self._business = '' # 行业
self._headline = '' # 一句话描述
self._description = '' # 个人介绍
self._question_count = 0 # 提问数
self._answer_count = 0 # 回答数
self._article_count = 0 # 文章数
self._voteup_count = 0 # 得到的赞同数
self._thanked_count = 0 # 得到的感谢数
self._favorited_count = 0 # 得到的收藏数
self._following_count = 0 # 关注数
self._follower_count = 0 # 粉丝数
self._following_topic_count = 0 # 关注的话题数
self._following_column_count = 0 # 关注的专栏数
self._following_question_count = 0 # 关注的问题数
self._following_favlist_count = 0 # 关注的收藏夹数
self.educations = [] # 教育经历
self.employments = [] # 职业经历
self.locations = [] # 居住地
def __str__(self):
string = ''
string += 'ID: ' + self._id + '\n'
string += 'Name: ' + self._name + '\n'
string += 'Sex: ' + self._sex.name + '\n'
string += 'Avatar: ' + self._avatar + '\n'
string += 'Business: ' + self._business + '\n'
string += 'Headline: ' + self._headline + '\n'
string += 'Description: ' + self._description + '\n'
string += 'Question Count: ' + str(self._question_count) + '\n'
string += 'Answer Count: ' + str(self._answer_count) + '\n'
string += 'Article Count: ' + str(self._article_count) + '\n'
string += 'Vote-up Count: ' + str(self._voteup_count) + '\n'
string += 'Thanked Count: ' + str(self._thanked_count) + '\n'
string += 'Favorited Count: ' + str(self._favorited_count) + '\n'
string += 'Following Count: ' + str(self._following_count) + '\n'
string += 'Follower Count: ' + str(self._follower_count) + '\n'
string += 'Following Topic Count: ' + str(self._following_topic_count) + '\n'
string += 'Following Column Count: ' + str(self._following_column_count) + '\n'
string += 'Following Question Count: ' + str(self._following_question_count) + '\n'
string += 'Following Favlist Count: ' + str(self._following_favlist_count) + '\n'
string += 'Educations: ' + '; '.join([str(edu) for edu in self.educations]) + '\n'
string += 'Employments: ' + '; '.join([str(emp) for emp in self.employments]) + '\n'
string += 'Locations: ' + '; '.join([str(loc) for loc in self.locations]) + '\n'
return string
def __eq__(self, other):
if not isinstance(other, ZhihuUserItem):
return False
return self._id == other.id
@property
def id(self):
return self._id
@id.setter
def id(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'id\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._id = value
@property
def name(self):
return self._name
@name.setter
def name(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'name\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._name = value
@property
def sex(self):
return self._sex
@sex.setter
def sex(self, value):
if not isinstance(value, Sex):
raise TypeError('Attribute \'sex\' should be an instance of type \'Sex\'. '
'Found: %s.' % type(value))
self._sex = value
@property
def avatar(self):
return self._avatar
@avatar.setter
def avatar(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'avatar\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._avatar = value
@property
def business(self):
return self._business
@business.setter
def business(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'business\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._business = value
@property
def headline(self):
return self._headline
@headline.setter
def headline(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'headline\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._headline = value
@property
def description(self):
return self._description
@description.setter
def description(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'description\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._description = value
@property
def question_count(self):
return self._question_count
@question_count.setter
def question_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'question_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'question_count\' should be a positive integer.')
self._question_count = value
@property
def answer_count(self):
return self._answer_count
@answer_count.setter
def answer_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'answer_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'answer_count\' should be a positive integer.')
self._answer_count = value
@property
def article_count(self):
return self._article_count
@article_count.setter
def article_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'article_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'article_count\' should be a positive integer.')
self._article_count = value
@property
def voteup_count(self):
return self._voteup_count
@voteup_count.setter
def voteup_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'voteup_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'voteup_count\' should be a positive integer.')
self._voteup_count = value
@property
def thanked_count(self):
return self._thanked_count
@thanked_count.setter
def thanked_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'thanked_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'thanked_count\' should be a positive integer.')
self._thanked_count = value
@property
def favorited_count(self):
return self._favorited_count
@favorited_count.setter
def favorited_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'favorited_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'favorited_count\' should be a positive integer.')
self._favorited_count = value
@property
def following_count(self):
return self._following_count
@following_count.setter
def following_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'following_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'following_count\' should be a positive integer.')
self._following_count = value
@property
def follower_count(self):
return self._follower_count
@follower_count.setter
def follower_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'follower_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'follower_count\' should be a positive integer.')
self._follower_count = value
@property
def following_topic_count(self):
return self._following_topic_count
@following_topic_count.setter
def following_topic_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'following_topic_count\' should be an instance of type '
'\'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'following_topic_count\' should be a positive integer.')
self._following_topic_count = value
@property
def following_column_count(self):
return self._following_column_count
@following_column_count.setter
def following_column_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'following_column_count\' should be an instance of type '
'\'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'following_column_count\' should be a positive integer.')
self._following_column_count = value
@property
def following_question_count(self):
return self._following_question_count
@following_question_count.setter
def following_question_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'following_question_count\' should be an instance of type '
'\'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'following_question_count\' should be a positive integer.')
self._following_question_count = value
@property
def following_favlist_count(self):
return self._following_favlist_count
@following_favlist_count.setter
def following_favlist_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'following_favlist_count\' should be an instance of type '
'\'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'following_favlist_count\' should be a positive integer.')
self._following_favlist_count = value
class ZhihuEducationItem(ZhihuItem):
def __init__(self):
self._school = '' # 学校
self._major = '' # 专业
def __str__(self):
string = 'School: ' + self._school + ', Major:' + self._major
return string
@property
def school(self):
return self._school
@school.setter
def school(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'school\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._school = value
@property
def major(self):
return self._major
@major.setter
def major(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'major\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._major = value
class ZhihuEmploymentItem(ZhihuItem):
def __init__(self):
self._company = '' # 公司
self._job = '' # 职位
def __str__(self):
string = 'Company: ' + self._company + ', Job: ' + self._job
return string
@property
def company(self):
return self._company
@company.setter
def company(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'company\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._company = value
@property
def job(self):
return self._job
@job.setter
def job(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'job\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._job = value
class ZhihuQuestionItem(ZhihuItem):
def __init__(self):
self._id = 0 # 问题ID
self._title = '' # 标题
self._content = '' # 内容
self._create_time = 0 # 创建时间
self._update_time = 0 # 更新时间
self._follower_count = 0 # 关注数
self._visit_count = 0 # 浏览数
self._comment_count = 0 # 评论数
self.topics = [] # 话题标签列表
def __str__(self):
string = ''
string += 'ID: ' + str(self._id) + '\n'
string += 'Title: ' + self._title + '\n'
string += 'Content: ' + self._content + '\n'
string += 'Create Time: ' + str(self._create_time) + '\n'
string += 'Update Time: ' + str(self._update_time) + '\n'
string += 'Follower Count: ' + str(self._follower_count) + '\n'
string += 'Visit Count: ' + str(self._visit_count) + '\n'
string += 'Comment Count: ' + str(self._comment_count) + '\n'
string += 'Topics: ' + '; '.join([str(top) for top in self.topics]) + '\n'
return string
def __eq__(self, other):
if not isinstance(other, ZhihuQuestionItem):
return False
return self._id == other.id
@property
def id(self):
return self._id
@id.setter
def id(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'id\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'id\' should be a positive integer.')
self._id = value
@property
def title(self):
return self._title
@title.setter
def title(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'title\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._title = value
@property
def content(self):
return self._content
@content.setter
def content(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'content\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._content = value
@property
def create_time(self):
return self._create_time
@create_time.setter
def create_time(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'create_time\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError("Attribute \'create_time\' should be positive.")
self._create_time = value
@property
def update_time(self):
return self._update_time
@update_time.setter
def update_time(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'update_time\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError("Attribute \'update_time\' should be positive.")
self._update_time = value
@property
def follower_count(self):
return self._follower_count
@follower_count.setter
def follower_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'follower_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'follower_count\' should be a positive integer.')
self._follower_count = value
@property
def visit_count(self):
return self._visit_count
@visit_count.setter
def visit_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'visit_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'visit_count\' should be a positive integer.')
self._visit_count = value
@property
def comment_count(self):
return self._comment_count
@comment_count.setter
def comment_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'comment_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'comment_count\' should be a positive integer.')
self._comment_count = value
class ZhihuAnswerItem(ZhihuItem):
def __init__(self):
self._id = 0 # 答案ID
self._question_id = 0 # 问题ID
self._author = '' # 答主
self._content = '' # 内容
self._create_time = 0 # 创建时间
self._update_time = 0 # 更新时间
self._voteup_count = 0 # 赞同数
self._comment_count = 0 # 评论数
def __str__(self):
string = ''
string += 'ID: ' + str(self._id) + '\n'
string += 'Question ID: ' + str(self._question_id) + '\n'
string += 'Author: ' + self._author + '\n'
string += 'Content: ' + self._content + '\n'
string += 'Create Time: ' + str(self._create_time) + '\n'
string += 'Update Time: ' + str(self._update_time) + '\n'
string += 'Vote-up Count: ' + str(self._voteup_count) + '\n'
string += 'Comment Count: ' + str(self._comment_count) + '\n'
return string
def __eq__(self, other):
if not isinstance(other, ZhihuAnswerItem):
return False
return self._id == other.id
@property
def id(self):
return self._id
@id.setter
def id(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'id\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'id\' should be a positive integer.')
self._id = value
@property
def question_id(self):
return self._question_id
@question_id.setter
def question_id(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'question_id\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'question_id\' should be a positive integer.')
self._question_id = value
@property
def author(self):
return self._author
@author.setter
def author(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'author\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._author = value
@property
def content(self):
return self._content
@content.setter
def content(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'content\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._content = value
@property
def create_time(self):
return self._create_time
@create_time.setter
def create_time(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'create_time\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError("Attribute \'create_time\' should be positive.")
self._create_time = value
@property
def update_time(self):
return self._update_time
@update_time.setter
def update_time(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'update_time\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError("Attribute \'update_time\' should be positive.")
self._update_time = value
@property
def voteup_count(self):
return self._voteup_count
@voteup_count.setter
def voteup_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'voteup_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'voteup_count\' should be a positive integer.')
self._voteup_count = value
@property
def comment_count(self):
return self._comment_count
@comment_count.setter
def comment_count(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'comment_count\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError('Attribute \'comment_count\' should be a positive integer.')
self._comment_count = value
class ZhihuActivityItem(ZhihuItem):
def __init__(self):
self._time = 0 # 时间
self._actor = '' # 主人
self._action = '' # 动态类型
self._target_user = '' # 目标用户名
self._target_title = '' # 目标标题
self._target_content = '' # 目标内容
self._target_link = '' # 目标链接
def __str__(self):
string = ''
string += 'Time: ' + str(self._time) + '\n'
string += 'Actor: ' + self._actor + '\n'
string += 'Action: ' + self._action + '\n'
string += 'Target User: ' + self._target_user + '\n'
string += 'Target Title: ' + self._target_title + '\n'
string += 'Target Content: ' + self._target_content + '\n'
string += 'Target Link: ' + self._target_link + '\n'
return string
def __eq__(self, other):
if not isinstance(other, ZhihuActivityItem):
return False
return self._time == other.time and self._actor == other.actor
@property
def time(self):
return self._time
@time.setter
def time(self, value):
if not isinstance(value, int):
raise TypeError('Attribute \'time\' should be an instance of type \'int\'. '
'Found: %s.' % type(value))
if value < 0:
raise ValueError("Attribute \'time\' should be positive.")
self._time = value
@property
def actor(self):
return self._actor
@actor.setter
def actor(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'actor\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._actor = value
@property
def action(self):
return self._action
@action.setter
def action(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'action\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._action = value
@property
def target_user(self):
return self._target_user
@target_user.setter
def target_user(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'target_user\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._target_user = value
@property
def target_title(self):
return self._target_title
@target_title.setter
def target_title(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'target_title\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._target_title = value
@property
def target_content(self):
return self._target_content
@target_content.setter
def target_content(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'target_content\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._target_content = value
@property
def target_link(self):
return self._target_link
@target_link.setter
def target_link(self, value):
if not isinstance(value, str):
raise TypeError('Attribute \'target_link\' should be an instance of type \'str\'. '
'Found: %s.' % type(value))
self._target_link = value
|
import csv
import random
import math
import operator
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
random.seed(123)
# loadDataset and create training set and test set
def load_data(filename, split, trainingSet, testSet):
with open(filename, 'rb') as csvfile:
lines = csv.reader(csvfile)
dataset = list(lines)
for x in range(len(dataset) - 1):
for y in range(9):
dataset[x][y] = float(dataset[x][y])
if random.random() < split:
trainingSet.append(dataset[x])
else:
testSet.append(dataset[x])
# separate class of chd for '0' & '1'
def separate(dataset):
separated = {}
for i in range(len(dataset)):
vector = dataset[i]
if (vector[-1] not in separated):
separated[vector[-1]] = []
separated[vector[-1]].append(vector)
return separated
# find euclidean distance between two instances (rows)
def get_distance(instance1, instance2, length):
distance = 0
for x in range(length):
distance += pow((instance1[x] - instance2[x]), 2)
return math.sqrt(distance)
# find prior probability for each class
def priorProb(dict, dataset):
total = len(dataset)
prior = {}
for k in dict:
if (k == '1'):
class1 = len(dict[k])
prior1 = float(class1) / float(total)
prior[k] = prior1
print(prior1)
else:
class0 = len(dict[k])
prior0 = float(class0) / float(total)
prior[k] = prior0
print(prior0)
return prior
# get the most similar neighbors for a test instance
def get_neighbours(train, test, width):
distances = []
length = len(test) - 1
for x in range(len(train)):
dist = get_distance(train[x], test, length)
distances.append((train[x], dist))
distances.sort(key=operator.itemgetter(1))
# print(distances)
neighbours = []
for x in range(len(distances)):
if (distances[x][1] < width):
neighbours.append(distances[x][0])
return neighbours
# Gaussian product kernel
def kernel(neighborsDic, testInstance, width):
difference1 = []
difference0 = []
length = len(testInstance) - 1
n1 = 0
n0 = 0
for k in neighborsDic:
if (k == "1"):
for neighbor in neighborsDic[k]:
diff1 = get_distance(neighbor, testInstance, length)
difference1.append((neighbor, diff1))
n1 += 1
print("difference is 1" + "\n")
print(difference1)
print(n1)
else:
for neighbor in neighborsDic[k]:
diff0 = get_distance(neighbor, testInstance, length)
difference0.append((neighbor, diff0))
n0 += 1
print("difference is 0" + "\n")
print(difference0)
print(n0)
sum1 = 0
sum0 = 0
for d in difference1:
sum1 += math.exp(-(math.pow(d[1] / width, 2)) / 2)
print("sum1 is: " + "\n")
print(sum1)
for d in difference0:
sum0 += math.exp(-(math.pow(d[1] / width, 2)) / 2)
print("sum0 is: " + "\n")
print(sum0)
width2 = math.pow(width, 2)
print("width2 is: ", width2)
nValue1 = n1 * math.pow(2 * width2 * math.pi, length / 2)
print("nValue1 is: ", nValue1)
if (nValue1 == 0):
product1 = 0
else:
product1 = 1 / nValue1 * sum1
nValue0 = n0 * math.pow(2 * width2 * math.pi, length / 2)
print("nValue0 is: ", nValue0)
if (nValue0 == 0):
product0 = 0
else:
product0 = 1 / nValue0 * sum0
print(product1)
print(product0)
prob = {}
prob["1"] = product1
prob["0"] = product0
return prob
# make prediction for the test set
def get_response(prior, prob, testInstance):
r = []
prob1 = prior["1"] * prob["1"]
prob0 = prior["0"] * prob["0"]
totalProb = prob1 + prob0
if (totalProb == 0):
freq1 = prob1
freq0 = prob0
else:
freq1 = prob1 / totalProb
freq0 = prob0 / totalProb
print(freq1)
print(freq0)
if (freq1 <= freq0):
response = "0"
r.append((testInstance, response))
else:
response = "1"
r.append((testInstance, response))
print(r)
return r
if __name__ == "__main__":
trainingSet = []
testSet = []
split = 0.5
# data.csv is the dataset "South African Heart Disease";
# however, it doesn't contain a header line and non-continous value such as famhist
load_data('data.csv', split, trainingSet, testSet)
print 'Train set: ' + repr(len(trainingSet))
print 'Test set: ' + repr(len(testSet))
# print trainingSet
separatedClass = separate(trainingSet)
# print('separatedClass instances: {0}').format(separatedClass)
prior = priorProb(separatedClass, trainingSet)
width = 50
# make prediction for each test set
responses = []
correct = 0
print(testSet)
for x in range(len(testSet)):
neighbors = get_neighbours(trainingSet, testSet[x], width)
print("neighbors" + "\n")
print(neighbors)
neighborsClass = separate(neighbors)
print("neighborsClass" + "\n")
print(neighborsClass)
prob = kernel(neighborsClass, testSet[x], width)
response = get_response(prior, prob, testSet[x])
print("testSet[x] is:")
print(testSet[x])
if (testSet[x][-1] == response[0][1]):
correct += 1
responses.append(response)
print("responses is " + "\n")
print(responses)
accuracy = (correct / float(len(testSet))) * 100.0
# LDA analysis
trainingSetX = []
trainingSetY = []
testSetX = []
testSetY = []
for trainx in trainingSet:
trainy = trainx.pop()
trainingSetX.append(trainx)
trainingSetY.append(trainy)
for testx in testSet:
testy = testx.pop()
testSetX.append(testx)
testSetY.append(testy)
X = np.array(trainingSetX)
Y = np.array(trainingSetY)
lda = LDA()
lda.fit(X, Y)
predict = lda.predict(testSetX)
print(predict)
testY = np.array(testSetY)
print(testY)
accuracyLDA = np.sum(predict == testY) / float(len(testSet)) * 100.0
print "Naive Bayes Accuracy: " + str(accuracy)
print "LDA Accuracy: " + str(accuracyLDA)
|
from src.utils.box_utils import iou, area, intersection, encode, batch_decode
from src.utils.nms import batch_non_max_suppression
|
import logging
import discord
from core import MemberDataController
from discord.ext import commands
from discord.ext.commands.errors import BadArgument
from main import SEBot
from utils.custom_errors import AlreadyLiked
from utils.utils import DefaultEmbed
from ..utils.converters import Reputation, InteractedMember
loger = logging.getLogger('Arctic')
class reputation(commands.Cog):
def __init__(self, bot: SEBot):
self.bot = bot
self.emoji = self.bot.config["additional_emoji"]["reputation"]
async def cog_command_error(self, ctx, error):
if isinstance(error, commands.BadArgument):
await ctx.message.delete()
embed = DefaultEmbed(title="Reputation not changed",
description=f"**Error**: {error}")
await ctx.send(embed=embed)
elif isinstance(error, commands.MissingRequiredArgument):
await ctx.message.delete()
if error.param.name == 'to_member':
embed = DefaultEmbed(
title="Reputation not changed",
description=f"**Error**: user not specified")
elif error.param.name == 'type':
reason = ('Please enter the correct type'\
+'\n`+` — increase reputation'\
+'\n`-` — reduce reputation'\
+"\n`?` — reset")
embed = DefaultEmbed(title="Reputation not changed",
description=f"**Error**: {reason}")
await ctx.send(embed=embed)
@commands.command(aliases=['rep'])
async def reputation(self, ctx, to_member: InteractedMember,
type: Reputation):
await ctx.message.delete()
member = MemberDataController(ctx.author.id)
try:
if type == 1:
member.like(to_member)
embed = discord.Embed(
title=f"{self.emoji['increased']} Reputation increased",
description=
f"{ctx.author.mention} increased {to_member.mention}'s reputation ",
color=discord.Colour.green())
elif type == -1:
member.dislike(to_member)
embed = discord.Embed(
title=f"{self.emoji['decreased']} Reputation decreased",
description=
f"{ctx.author.mention} decreased {to_member.mention}'s reputation ",
color=discord.Colour.red())
elif type == 0:
member.reset_like(to_member)
embed = DefaultEmbed(
title=f"{self.emoji['reset']} Reset", description='Reputation successfully reset')
embed.set_thumbnail(url=ctx.author.avatar_url)
except AlreadyLiked:
embed = DefaultEmbed(description="You can't do it twice")
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(reputation(bot))
|
import re
import pytest
import treedb
@pytest.mark.parametrize('section, option, expected', [
pytest.param('core', 'name', True,
id='section=core, option=name'),
pytest.param('core', 'spam', False,
id='section=core, option=unknown'),
pytest.param('spam', 'core', False,
id='section=unknown, option=unknown'),
])
def test_is_known(section, option, expected):
assert treedb.languoids.fields.is_known(section, option) == expected
@pytest.mark.parametrize('section, option, kwargs, expected', [
pytest.param('core', 'name', {}, False,
id='section=core, option=name'),
pytest.param('core', 'links', {}, True,
id='section=core, option=links'),
pytest.param('core', 'WARNS_SCALAR', {}, (None, UserWarning, r'unknown'),
id='section=core, option=unknown'),
pytest.param('core', 'RAISES_KEYERROR', {'unknown_as_scalar': False}, (KeyError, r'.+'),
id='section=core, option=unknown, strict'),
])
def test_is_lines(recwarn, section, option, kwargs, expected):
if isinstance(expected, tuple):
if len(expected) == 3:
expected, warning, match = expected
assert treedb.languoids.fields.is_lines(section, option, **kwargs) == expected
w = recwarn.pop(warning)
assert re.search(match, str(w.message))
else:
exception, match = expected
with pytest.raises(exception, match=match):
treedb.languoids.fields.is_lines(section, option, **kwargs)
recwarn.pop(UserWarning)
else:
assert treedb.languoids.fields.is_lines(section, option, **kwargs) == expected
assert not recwarn
@pytest.mark.parametrize('section, options, expected', [
pytest.param('core',
['eggs', 'iso639-3', 'name'],
['name', 'iso639-3', 'eggs'],
id='section=core, options=eggs-iso639-3-name'),
])
def test_sorted_options(section, options, expected):
assert treedb.languoids.fields.sorted_options(section, options) == expected
|
from os import environ
from unittest.mock import call, patch
import pytest
from pytest import raises
from vang.nexus3.publish import get_pom_publish_name
from vang.nexus3.publish import get_publish_data
from vang.nexus3.publish import main
from vang.nexus3.publish import parse_args
from vang.nexus3.publish import publish_maven_artifact
@pytest.mark.parametrize("params, expected",
[(
(
'/foo/bar/business.baz-1.0.0-SNAPSHOT.pom',
'business.baz',
'1.0.0-SNAPSHOT',
),
'business.baz-1.0.0-SNAPSHOT.pom',
),
(
(
'/foo/bar/pom.xml',
'business.baz',
'1.0.0-SNAPSHOT',
),
'business.baz-1.0.0-SNAPSHOT.pom',
)])
def test_get_pom_publish_name(params, expected):
assert expected == get_pom_publish_name(*params)
def test_get_publish_data():
assert {
'file_path': '/foo/bar/foo.pom',
'repository_path': '/repo/com/foo/bar/business.baz/1.0.0-SNAPSHOT/business.baz-1.0.0-SNAPSHOT.pom',
} == get_publish_data(
'/repo/com/foo/bar/business.baz/1.0.0-SNAPSHOT',
'/foo/bar/foo.pom',
'business.baz-1.0.0-SNAPSHOT.pom',
)
@patch('vang.nexus3.publish.upload')
@patch('vang.nexus3.publish.glob')
@patch('vang.nexus3.publish.get_pom_publish_name')
@patch('vang.nexus3.publish.get_publish_data')
@patch('vang.nexus3.publish.get_artifact_base_uri')
@patch('vang.nexus3.publish.get_pom_info')
@patch('vang.nexus3.publish.get_pom_path')
def test_publish_maven_artifact(
mock_get_pom_path,
mock_get_pom_info,
mock_get_artifact_base_uri,
mock_get_publish_data,
mock_get_pom_publish_name,
mock_glob,
mock_upload,
):
mock_get_pom_path.return_value = 'pom_path'
mock_get_pom_info.return_value = {
'pom_path': 'pom_path',
'artifact_id': 'artifact_id',
'group_id': 'parent_group_id',
'version': 'parent_version',
'packaging': 'packaging',
}
mock_get_artifact_base_uri.return_value = 'base_uri'
mock_get_publish_data.return_value = {
'file_path': 'file_path',
'repository_path': 'repository_path'
}
mock_get_pom_publish_name.return_value = 'pom_publish_name'
mock_glob.side_effect = [['foo.jar'], ['bar.war']] * 2
mock_upload.return_value = 201
assert [[201, 201, 201],
[201, 201, 201]] == list(publish_maven_artifact('repository', ['d1', 'd2'], 'url', 'username', 'password'))
@patch('vang.nexus3.publish.print')
@patch('vang.nexus3.publish.publish_maven_artifact')
def test_main(mock_publish_maven_artifact, mock_print):
mock_publish_maven_artifact.return_value = [201]
main('repository', ['d1', 'd2'], 'url', 'username', 'password')
assert [call('repository', ['d1', 'd2'], 'url', 'username', 'password')] == mock_publish_maven_artifact.mock_calls
assert [call(201)] == mock_print.mock_calls
@pytest.mark.parametrize("args", [
'',
'1 2',
])
def test_parse_args_raises(args):
with raises(SystemExit):
parse_args(args.split(' ') if args else args)
@pytest.mark.parametrize("args, expected", [
['repository', {
'repository': 'repository',
'dirs': ['.'],
'url': 'url',
'username': 'username',
'password': 'password',
}],
[
'repository -d d1 d2',
{
'repository': 'repository',
'dirs': ['d1', 'd2'],
'url': 'url',
'username': 'username',
'password': 'password',
}
],
])
def test_parse_args_valid(args, expected):
with patch.dict(environ, {
'NEXUS3_REST_URL': 'url',
'NEXUS3_USERNAME': 'username',
'NEXUS3_PASSWORD': 'password',
}, clear=True):
assert expected == parse_args(args.split(' ') if args else '').__dict__
|
#!/usr/bin/env python2
# -- coding: utf-8 --
import requests
import json
import utils
url = utils.API_URL
token = utils.get_api_key()
headers = utils.get_headers(token)
user = "NYWFOIL" #<--- Put username here. Case sensitive.
page = 1
next_ = url+"foia/?user="+user
while next_ is not None:
print "URL I'm using is " + next_
r = requests.get(next_, headers=headers)
print "hey rob. check out " + str(r)
try:
print "Getting that sweet JSON"
json_data = r.json()
print 'Page %d of %d' % (page, json_data['count'] / 20 + 1)
next_ = json_data['next']
for request in json_data['results']:
reqNumber = request["id"]
editedRequest = requests.get(url + 'foia/%s/' % str(reqNumber), headers=headers)
print "Embargoing request number " + str(reqNumber) + " for " + request["title"] + " filed by " + request["username"]
data = json.dumps({
'embargo': False,
'date_embargo': None, #Removes the embargo date to make sure it actually embargos.
})
editedRequest = requests.patch(url + 'foia/%d/' % reqNumber, headers=headers, data=data)
print "Request Embargoed."
if editedRequest.status_code != 200:
print '*In Ron Burgendy Voice:* Error? ', editedRequest.status_code, r.text
page += 1
except Exception as e:
print e
print 'Error! ', editedRequest.status_code, ": ", editedRequest.text
|
import requests
from requests import ConnectionError
import pika
import re
import sys
import time
import json
import HFRequests
import os
import math
import pandas as pd
import docker
ready_response_text = 'Done!'
controller = 'chainfaas.com'
# controller = 'chainfaas.sara-dev.com'
# controller_temp = 'chainfaas.sara-dev.com'
# controller = 'localhost'
# controller_temp = 'localhost:8080'
# controller_short = '127.0.0.1'
username = sys.argv[1]
password = sys.argv[2]
CPU = sys.argv[3]
RAM = sys.argv[4]
LOGIN_URL = 'https://' + controller + "/profiles/user_login/"
PROVIDER_URL = 'https://' + controller + "/provider/"
READY_URL = 'https://' + controller + "/provider/ready"
NOT_READY_URL = 'https://' + controller + "/provider/not_ready"
ACK_URL = 'https://' + controller + "/provider/job_ack?job="
rabbitmq_password = username + '_mqtt'
channelName = "mychannel"
chaincodeName = "monitoring"
token = ""
client = docker.from_env()
container_name = 'provider_container_1'
def run_docker(body):
start_pull_time = time.time()
image = client.images.pull(body)
print("Pull done!")
pull_time = int((time.time() - start_pull_time) *1000)
start_run_time = time.time()
result = client.containers.run(body, name=container_name)
result = result.decode("utf-8")
print("Run done!")
print(result)
run_time = int((time.time() - start_run_time)*1000)
return result, pull_time, run_time
def delete_container_and_image(body):
filters = {'name': container_name}
container_id = client.containers.list(all=True, filters=filters)[0]
container_id.remove()
client.images.remove(body)
def HF_set_time(job_code, t_time):
global token
response = HFRequests.invoke_set_time(token, channelName, chaincodeName, 'org2', job_code, t_time)
if 'jwt expired' in response.text or 'jwt malformed' in response.text or 'User was not found' in response.text or 'UnauthorizedError' in response.text:
token = HFRequests.register_user(username, 'Org2')
response = HFRequests.invoke_set_time(token, channelName, chaincodeName, 'org2', job_code, t_time)
return response
def on_request(ch, method, props, body):
global token
print(body)
if body.decode("utf-8") == '"Stop"':
ch.basic_ack(delivery_tag=method.delivery_tag)
ch.queue_purge(username)
ch.close()
return
print("Received a request, running docker file.")
body_dict = json.loads(body.decode("utf-8"))
print((ACK_URL + str(body_dict['job'])))
s.get(ACK_URL + str(body_dict['job']))
s.get(NOT_READY_URL)
print("Before run docker")
r, pull_time, run_time = run_docker(body_dict['task'])
total_time = math.ceil(((pull_time + run_time)/100.0))*100
print(pull_time, run_time, total_time)
temp = {'Result': r, 'pull_time': pull_time, 'run_time': run_time, 'total_time': total_time}
mqtt_response = json.dumps(temp)
ch.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id=props.correlation_id),
body=str(mqtt_response))
ch.basic_ack(delivery_tag=method.delivery_tag)
print('sent back the results')
time.sleep(3)
# HF_resp = HF_set_time(str(body_dict['job']), str(total_time))
# while (json.loads(HF_resp.text)['success'] == False):
# HF_resp = HF_set_time(str(body_dict['job']), str(total_time))
# if "The time for this job is already set and can't be changed." in json.loads(HF_resp.text)['message']:
# break
delete_container_and_image(body_dict['task'])
s = requests.Session()
print(username)
data = {'username': username,
'password': password}
response = s.post(url=LOGIN_URL, data=data)
print('Login response status', response.status_code)
response = s.get(PROVIDER_URL)
if re.search('stop', response.text) is None:
data = {'ram': RAM, 'cpu': CPU}
response = s.post(url=PROVIDER_URL, data=data)
print('Ready response status', response.status_code)
credentials = pika.PlainCredentials(username, rabbitmq_password)
connection = pika.BlockingConnection(
pika.ConnectionParameters(controller, 5672, credentials=credentials))
print("You logged in to RabbitMQ!")
channel = connection.channel()
channel.queue_declare(queue=username)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(queue=username, on_message_callback=on_request)
while channel.is_open:
# with requests.Session() as session:
try:
print(" [x] Awaiting RPC requests")
channel._impl._raise_if_not_open()
while channel._consumer_infos:
# This will raise ChannelClosed if channel is closed by broker
r = s.get(READY_URL)
channel._process_data_events(time_limit=30)
print(" [x] Awaiting RPC requests")
except ConnectionError:
continue
|
from .main import Auth
from .jwt import JWT
from .settings import Alerts
from .settings import BaseConfig
from .settings import TemplateConfig
from .schemas import User
__all__ = ["Auth", "JWT", "Alerts", "BaseConfig", "TemplateConfig", "User"]
|
import os.path as path
import os
import sqlite3
import argparse
from datetime import datetime, timedelta
def int_to_bytes(x: int) -> bytes:
return x.to_bytes((x.bit_length() + 7) // 8, 'big')
def int_from_bytes(xbytes: bytes) -> int:
return int.from_bytes(xbytes, 'big')
folder_path = path.join(os.environ["HOME"], ".pteco_time_keeping")
if not path.exists(folder_path):
os.mkdir(folder_path)
def start():
if path.exists(path.join(folder_path, "current_time")):
print("\033[33mAlready started tracking!")
exit(0)
with open(path.join(folder_path, "current_time"), "wb") as fp:
fp.write(int_to_bytes(int(datetime.now().timestamp())))
print("\033[33mStarted tracking time!")
def current():
if path.exists(path.join(folder_path, "current_time")):
with open(path.join(folder_path, "current_time"), "rb") as fp:
ts = int_from_bytes(fp.read())
current_dt = datetime.fromtimestamp(float(ts))
currently_worked: timedelta = datetime.now() - current_dt
print(
"\033[33mCurrently worked time: \033[34m{}\033[33m.".format(currently_worked))
else:
print("\033[33mNo time keep started. Run \033[35mptcime start\033[33m.")
exit(0)
def stop():
if path.exists(path.join(folder_path, "current_time")):
with open(path.join(folder_path, "current_time"), "rb") as fp:
ts = int_from_bytes(fp.read())
current_dt = datetime.fromtimestamp(float(ts))
currently_worked: timedelta = datetime.now() - current_dt
print(
"\033[33mCurrently worked time: \033[34m{}\033[33m.".format(currently_worked))
if path.exists(path.join(folder_path, str(datetime.now().date()))):
with open(path.join(folder_path, str(datetime.now().date())), "r") as fp:
day_dt = timedelta(seconds=float(fp.read()))
else:
day_dt = timedelta()
day_dt += currently_worked
print(
"\033[33mDaily worked time: \033[34m{}\033[33m.".format(day_dt))
with open(path.join(folder_path, str(datetime.now().date())), "w") as fp:
fp.write(str(day_dt.total_seconds()))
os.remove(path.join(folder_path, "current_time"))
else:
print("\033[33mNo time keep started. Run \033[35mptcime start\033[33m.")
exit(0)
def week():
total_time = timedelta()
for i in range(7):
current_date = (datetime.now() - timedelta(days=i)).date()
if path.exists(path.join(folder_path, str(current_date))):
with open(path.join(folder_path, str(current_date)), "r") as fp:
day_dt = timedelta(seconds=float(fp.read()))
print("\033[35m{}\033[33m: \033[34m{}".format(
current_date, day_dt))
total_time += day_dt
currently_worked = timedelta()
if path.exists(path.join(folder_path, "current_time")):
with open(path.join(folder_path, "current_time"), "rb") as fp:
ts = int_from_bytes(fp.read())
current_dt = datetime.fromtimestamp(float(ts))
currently_worked: timedelta = datetime.now() - current_dt
print("\033[35mTotal time\033[33m: \033[34m{}".format(
total_time + currently_worked))
def day():
if path.exists(path.join(folder_path, str(datetime.now().date()))):
with open(path.join(folder_path, str(datetime.now().date())), "r") as fp:
day_dt = timedelta(seconds=float(fp.read()))
else:
day_dt = timedelta()
currently_worked = timedelta()
if path.exists(path.join(folder_path, "current_time")):
with open(path.join(folder_path, "current_time"), "rb") as fp:
ts = int_from_bytes(fp.read())
current_dt = datetime.fromtimestamp(float(ts))
currently_worked: timedelta = datetime.now() - current_dt
print(
"\033[33mDaily worked time: \033[34m{}\033[33m.".format(day_dt + currently_worked))
def toggle():
if path.exists(path.join(folder_path, "current_time")):
stop()
else:
start()
|
"""Generate gif from physics in Spriteworld config.
This script runs the environment on a config in writes a video of the image
observations.
To run this script a config, run this on the config:
```bash
python generate_gif.py --config=$path_to_task_config$
```
If the config's colors are defined in HSV space, add the flag
`--hsv_colors=True`.
If you would like to use a mode other than "train", add the flag
`--mode=$mode$`.
"""
# pylint: disable=import-error
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import imageio
import importlib
import os
from spriteworld import renderers
from spriteworld_physics import physics_environment
FLAGS = flags.FLAGS
flags.DEFINE_string('config', 'spriteworld_physics.configs.collisions',
'Module name of task config to use.')
flags.DEFINE_string('mode', 'train', 'Mode, "train" or "test"]')
flags.DEFINE_boolean('hsv_colors', True,
'Whether the config uses HSV as color factors.')
flags.DEFINE_integer('render_size', 256,
'Height and width of the output image.')
flags.DEFINE_integer('anti_aliasing', 10, 'Renderer anti-aliasing factor.')
flags.DEFINE_integer(
'num_episodes', 5, 'Number of episodes to run for the gif.')
flags.DEFINE_float('fps', 8, 'Number of frames per second for the gif.')
flags.DEFINE_string(
'gif_path_head',
'~/Desktop/grad_school_research/shaul_spriteworld/gifs',
'Head of the file path to write the gif to.')
flags.DEFINE_string(
'gif_path_tail',
'collisions.gif',
'Tail of the file path to write the gif to.')
def main(_):
logging.info('Generating gif for config {}'.format(FLAGS.config))
gif_path = os.path.join(FLAGS.gif_path_head, FLAGS.gif_path_tail)
if gif_path[0] == '~':
gif_path = os.path.join(os.path.expanduser('~'), gif_path[2:])
if os.path.isfile(gif_path):
should_continue = input('Path {} to write gif to already exists. '
'Overwrite the existing file? (y/n)')
if should_continue != 'y':
logging.info('You pressed {}, not "y", so terminating '
'program.'.format(should_continue))
return
else:
logging.info('You pressed "y". Overwriting existing file.')
# Load and adjust environment config
config = importlib.import_module(FLAGS.config)
config = config.get_config(FLAGS.mode)
config['renderers'] = {
'image':
renderers.PILRenderer(
image_size=(FLAGS.render_size, FLAGS.render_size),
color_to_rgb=renderers.color_maps.hsv_to_rgb
if FLAGS.hsv_colors else None,
anti_aliasing=FLAGS.anti_aliasing),
}
env = physics_environment.PhysicsEnvironment(**config)
# Run the environment in a loop
duration_per_frame = 1. / FLAGS.fps
timestep = env.reset()
images = []
episodes_generated = 0
while episodes_generated < FLAGS.num_episodes:
images.append(timestep.observation['image'])
if timestep.last():
episodes_generated += 1
logging.info('Generated {} of {} episodes'.format(
episodes_generated, FLAGS.num_episodes))
timestep = env.step()
logging.info('Writing gif to file {}'.format(gif_path))
imageio.mimsave(gif_path, images, duration=duration_per_frame)
if __name__ == '__main__':
app.run(main)
|
from setuptools import setup, find_packages
setup(
name="tamade",
version="0.1.0",
description="Get PHP settings from c source code.",
long_description="""
tamade is a simple tool that will grab all the php ini settings from source code.
Source code: https://github.com/mike820324/tamade
Documentation: https://github.com/mike820324/tamade/blob/master/README.md
""",
url="https://github.com/mike820324/tamade",
author="MicroMike",
author_email="mike820324@gmail.com",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: POSIX",
"Operating System :: MacOS",
"Programming Language :: PHP",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities"
],
keywords=["php", "parsing", "ini"],
packages=find_packages(include=["tamade"]),
include_package_data=True,
entry_points={
"console_scripts": [
"tamade=tamade.command_line:main"
]
}
)
|
from schema import And
class DATAGRABMODE:
NONE = 0
EVENTS = 1
JSON = 2
CFG_FOLDER = ".demomgr"
CFG_NAME = "config.cfg"
DEFAULT_CFG = {
"datagrabmode": 0,
"date_format": "%d.%m.%Y %H:%M:%S",
"demopaths": [],
"evtblocksz": 65536,
"__comment": "By messing with the firstrun parameter you acknowledge "
"the disclaimer :P",
"firstrun": True,
"hlaepath": "",
"lastpath": "",
"lazyreload": False,
"previewdemos": True,
"rcon_port": 27015,
"rcon_pwd": "",
"steampath": "",
"ui_remember": {
"bookmark_setter": [],
"launch_tf2": [],
"settings": [],
},
"ui_theme": "Dark",
}
DEFAULT_CFG_SCHEMA = {
"datagrabmode": int,
"date_format": str,
"demopaths": [And(str, lambda x: x != "")],
"evtblocksz": int,
"__comment": str,
"firstrun": bool,
"hlaepath": str,
"lastpath": str,
"lazyreload": bool,
"previewdemos": bool,
"rcon_port": int,
"rcon_pwd": str,
"steampath": str,
"ui_remember": {
"bookmark_setter": [object], "launch_tf2": [object],
"settings": [object],
},
"ui_theme": str,
}
WELCOME = (
"Hi and Thank You for using Demomgr!\n\nA config file has been "
"created.\n\nThis program is able to delete files if you tell it to.\n"
"I can't guarantee that Demomgr is 100% safe or reliable and will not take any "
"responsibility for lost data, damaged drives and/or destroyed hopes and dreams.\n"
"This program is licensed via the MIT License."
)
EVENT_FILE = "_events.txt"
DATE_FORMATS = (
"%d.%m.%Y %H:%M:%S",
"%d.%m.%Y",
"%m/%d/%Y %H:%M:%S",
"%m/%d/%Y",
)
EVENTFILE_FILENAMEFORMAT = "(?<= \\(\").+(?=\" at)" #regex
EVENTFILE_BOOKMARK = "Bookmark"
EVENTFILE_KILLSTREAK = "Killstreak"
STATUSBARDEFAULT = "Ready."
TF2_EXE_PATH = "steamapps/common/team fortress 2/hl2.exe"
TF2_HEAD_PATH = "steamapps/common/team fortress 2/tf/"
TF2_LAUNCHARGS = ["-steam", "-game", "tf"]
HLAE_EXE = "hlae.exe"
HLAE_HOOK_DLL = "AfxHookSource.dll"
HLAE_LAUNCHARGS0 = ["-customLoader", "-noGui", "-autoStart", "-hookDllPath"]
HLAE_LAUNCHARGS1 = ["-programPath"]
HLAE_LAUNCHARGS2 = ["-cmdLine"]
HLAE_ADD_TF2_ARGS = ["-insecure", "+sv_lan", "1"]
STEAM_CFG_PATH0 = "userdata/"
STEAM_CFG_PATH1 = "config/localconfig.vdf"
REPLACEMENT_CHAR = "\uFFFD"
LAUNCHOPTIONSKEYS = (
"UserLocalConfigStore", "Software", "Valve", "Steam", "Apps", "440", "LaunchOptions",
)
STEAM_CFG_USER_NAME = '["UserLocalConfigStore"]["friends"]["PersonaName"]'
GUI_UPDATE_WAIT = 30 # Setting this to lower values might lock the UI, use with care.
THEME_SUBDIR = "ui_themes"
# 0: tcl file, 1: theme name, 2: resource dir name
THEME_PACKAGES = {
"Dark": ("dark.tcl", "demomgr_dark", "dark"),
}
FALLBACK_HEADER = {
"dem_prot": 3, "net_prot": 24, "hostname": "",
"clientid": "", "map_name": "", "game_dir": "", "playtime": 0,
"tick_num": 0, "framenum": 0, "tickrate": 0,
}
HEADER_HUMAN_NAMES = {
"hostname": "Hostname", "clientid": "Playername",
"map_name": "Map", "playtime": "Playtime",
"tick_num": "No. of ticks", "game_dir": "Game directory",
}
|
from comvest.extract_courses import extrair_cursos
from comvest.extract_courses import dict_cursos
def main():
extrair_cursos.extraction()
dict_cursos.get()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import tensorflow.compat.v1 as tf
import numpy as np
T = tf.float64
log2pi = tf.cast(tf.math.log(2.0 * np.pi), T)
def int_log2pi(k):
return tf.cast(k, T) * tf.cast(tf.math.log(2.0 * np.pi), T)
def loggaussian(x, mean=tf.cast(0.0, T), std=tf.cast(1.0, T)):
return -0.5 * tf.reduce_sum(
tf.square((x - mean) / std) + log2pi + 2 * tf.math.log(std), axis=-1
)
def lnlike_ellflatpriormarginalized_multiple_withprior(
y, # (..., dy)
yvar, # (..., dy)
mods_T, # (..., dt, dy),
mods_mean, # (..., dt), mu
mods_var, # (..., dt), lambda
expand_dims, # (..., dt, dt), lambda
compute_marglike=True,
):
nmod = tf.shape(mods_mean)[-1]
ndata = tf.shape(y)[-1]
mods = tf.transpose(mods_T, [0, 1, 3, 2]) # tf.einsum("...ij->...ji", mods_T)
t = expand_dims * tf.eye(nmod, dtype=T)[None, None, :, :]
mods_covar_inv = t * tf.expand_dims(1 / mods_var, -1)
A = mods_covar_inv + tf.matmul(mods_T, mods / yvar[..., :, None]) # (..., dt, dt)
ymodprior = tf.reduce_sum(mods_T * tf.expand_dims(mods_mean, -1), -2)
d = y - ymodprior
e = tf.reduce_sum(mods_T * (d / yvar)[..., None, :], axis=-1)
Ainve = tf.linalg.solve(A, e[..., None])[..., 0] # (..., dt)
chi2 = tf.reduce_sum(d * d / yvar, axis=-1) - tf.reduce_sum(e * Ainve, axis=-1)
s, logdetA = tf.linalg.slogdet(A)
ldets = (
logdetA
+ tf.reduce_sum(tf.math.log(mods_var), axis=-1)
+ tf.reduce_sum(tf.math.log(yvar), axis=-1)
)
scalar = tf.cast(ndata, T) * log2pi
LnMarglike = -0.5 * (scalar + ldets + chi2)
eta = mods_mean / mods_var + tf.reduce_sum(
mods_T * (y / yvar)[..., None, :], axis=-1
) # (..., dt)
mu = tf.linalg.solve(A, eta[..., None])[..., 0] # (..., dt)
return LnMarglike, mu
def lnlike_ellflatpriormarginalized(
F_obs, # (nobj, ..., numBands)
F_obs_var, # (nobj, ..., numBands)
F_mod, # (nobj, ..., numBands)
):
"""
Fit linear model to one Gaussian data set (formulation 3)
Parameters
----------
F_obs, F_obs_var : ndarray (nobj, ..., n_pix_y)
data and data variances
F_mod : ndarray (..., n_components, n_pix_y)
design matrix of linear model
Returns
-------
logfml : ndarray (nobj, )
log likelihood values with parameters marginalised and at best fit
ellML : ndarray (nobj, ndim)
Best fit MAP parameters
"""
FOT = tf.reduce_sum(F_mod * F_obs / F_obs_var, axis=-1) # (nobj, ..., )
FOO = tf.reduce_sum(tf.square(F_obs) / F_obs_var, axis=-1) # (nobj, ..., )
FTT = tf.reduce_sum(tf.square(F_mod) / F_obs_var, axis=-1) # (nobj, ..., )
LogSigma_det = tf.reduce_sum(tf.math.log(F_obs_var), axis=-1) # (nobj, ..., )
Chi2 = FOO - tf.multiply(tf.divide(FOT, FTT), FOT) # (nobj, ..., )
LogDenom = LogSigma_det + tf.math.log(FTT)
LnMarglike = -0.5 * Chi2 - 0.5 * LogDenom # (nobj, ..., )
ellML = FOT / FTT
return LnMarglike, ellML
def lnlike_ellflatpriormarginalized_multiple(
y, yinvvar, mods # (..., dy) # (..., dy) # (..., dt, dy)
):
"""
Fit linear model to one Gaussian data set (formulation 1)
Parameters
----------
y, yinvvar : ndarray (nobj, ..., n_pix_y)
data and data inverse variances
M_T : ndarray (..., n_components, n_pix_y)
design matrix of linear model
Returns
-------
logfml : ndarray (nobj, )
log likelihood values with parameters marginalised and at best fit
theta_map : ndarray (nobj, ndim)
Best fit MAP parameters
theta_cov : ndarray (nobj, ndim, ndim)
Parameter covariance
"""
eta = tf.reduce_sum(mods * (y * yinvvar)[..., None, :], axis=-1) # (..., dt)
H = tf.matmul(
mods, tf.transpose(mods * yinvvar[..., None, :], [0, 1, 3, 2])
) # (..., dt, dt)
mu = tf.linalg.solve(H, eta[..., None])[..., 0] # (..., dt)
etaHinveta = tf.reduce_sum(eta * mu, axis=-1) # (..., )
yyvarinvy = tf.reduce_sum(y * y * yinvvar, axis=-1) # (..., )
dets = tf.linalg.logdet(H) - tf.reduce_sum(
tf.where(yinvvar > 0, tf.math.log(yinvvar), yinvvar * 0), axis=-1
)
scalar = tf.cast(tf.shape(mods)[-1] - tf.shape(mods)[-2], T) * log2pi
LnMarglike = -0.5 * (scalar + dets + yyvarinvy - etaHinveta)
covar = tf.linalg.inv(H)
return LnMarglike, mu, covar
def logmarglike_onetransfergaussian(y, yinvvar, M_T): # (..., dy) # (..., dy)
"""
Fit linear model to one Gaussian data set (formulation 2)
Parameters
----------
y, yinvvar : ndarray (nobj, ..., n_pix_y)
data and data inverse variances
M_T : ndarray (..., n_components, n_pix_y)
design matrix of linear model
Returns
-------
logfml : ndarray (nobj, )
log likelihood values with parameters marginalised and at best fit
theta_map : ndarray (nobj, ndim)
Best fit MAP parameters
theta_cov : ndarray (nobj, ndim, ndim)
Parameter covariance
"""
nt = tf.cast(tf.shape(M_T)[-2], T)
ny = tf.cast(tf.math.count_nonzero(tf.where(yinvvar > 0)), T)
M = tf.transpose(M_T, [0, 2, 1]) # tf.einsum("...ij->...ji", M_T)
Hbar = tf.matmul(M_T, M * yinvvar[..., :, None]) # (..., dt, dt)
etabar = tf.reduce_sum(M_T * (y * yinvvar)[..., None, :], axis=-1) # (..., dt)
theta_map = tf.linalg.solve(Hbar, etabar[..., None])[..., 0] # (..., dt)
theta_cov = tf.linalg.inv(Hbar)
xi1 = -0.5 * (
ny * log2pi
+ tf.reduce_sum(y * y * yinvvar, axis=-1)
- tf.reduce_sum(
tf.where(yinvvar > 0, tf.math.log(yinvvar), yinvvar * 0), axis=-1
)
)
logdetHbar = tf.linalg.logdet(Hbar)
xi2 = -0.5 * (nt * log2pi - logdetHbar + tf.reduce_sum(etabar * theta_map, axis=-1))
logfml = xi1 - xi2
return logfml, theta_map, theta_cov
def logmarglike_twotransfergaussians(
ells,
y, # (..., dy)
yinvvar, # (..., dy)
M_T, # (..., dt, dy),
z, # (..., dz)
zinvvar, # (..., dz)
R_T, # (..., dt, dz),
perm=[0, 2, 1],
):
"""
Fit linear model to two Gaussian data sets
Parameters
----------
ells : ndarray (nobj, )
scaling between the data: y = ell * z
y, yinvvar : ndarray (nobj, ..., n_pix_y)
data and data inverse variances
M_T : ndarray (..., n_components, n_pix_y)
design matrix of linear model
z, zinvvar : ndarray (nobj, ..., n_pix_z)
data and data inverse variances for z
R_T : ndarray (..., n_components, n_pix_z)
design matrix of linear model for z
perm : list
permutation to get M and R from R_T and M_T
Returns
-------
logfml : ndarray (nobj, )
log likelihood values with parameters marginalised and at best fit
theta_map : ndarray (nobj, ndim)
Best fit MAP parameters
theta_cov : ndarray (nobj, ndim, ndim)
Parameter covariance
"""
log2pi = tf.cast(tf.math.log(2.0 * np.pi), T)
nt = tf.cast(tf.shape(M_T)[-2], T)
ny = tf.cast(
tf.math.count_nonzero(tf.where(yinvvar > 0)), T
) # tf.cast(tf.shape(y)[-1], T)
nz = tf.cast(
tf.math.count_nonzero(tf.where(zinvvar > 0)), T
) # tf.cast(tf.shape(z)[-1], T)
M = tf.transpose(M_T, perm) # tf.einsum("...ij->...ji", M_T)
R = tf.transpose(R_T, perm) # tf.einsum("...ij->...ji", M_T)
Hbar = ells[..., None, None] ** 2 * tf.matmul(
R_T, R * zinvvar[..., :, None]
) + tf.matmul(
M_T, M * yinvvar[..., :, None]
) # (..., dt, dt)
etabar = ells[..., None] * tf.reduce_sum(
R_T * (z * zinvvar)[..., None, :], axis=-1
) + tf.reduce_sum(
M_T * (y * yinvvar)[..., None, :], axis=-1
) # (..., dt)
theta_map = tf.linalg.solve(Hbar, etabar[..., None])[..., 0] # (..., dt)
theta_cov = tf.linalg.inv(Hbar)
logdetH = tf.reduce_sum(
tf.where(zinvvar > 0, tf.math.log(zinvvar), zinvvar * 0), axis=-1
) + tf.reduce_sum(tf.where(yinvvar > 0, tf.math.log(yinvvar), yinvvar * 0), axis=-1)
xi1 = -0.5 * (
(ny + nz) * log2pi
- logdetH
+ tf.reduce_sum(y * y * yinvvar, axis=-1)
+ tf.reduce_sum(z * z * zinvvar, axis=-1)
)
logdetHbar = tf.linalg.logdet(Hbar)
xi2 = -0.5 * (nt * log2pi - logdetHbar + tf.reduce_sum(etabar * theta_map, axis=-1))
logfml = xi1 - xi2
return logfml, theta_map, theta_cov
def logmarglike_threetransfergaussians(
ells, # (..., )
y, # (..., dy)
yinvvar, # (..., dy)
M_T, # (..., dt, dy),
z, # (..., dz),
zinvvar, # (..., dz),
R_T, # (..., dt, dz),
mu, # (..., dt),
muinvvar, # (..., dt),
):
"""
Fit linear model to three Gaussian data sets
Parameters
----------
ells : ndarray (nobj, )
scaling between the data: y = ell * z
y, yinvvar : ndarray (nobj, ..., n_pix_y)
data and data inverse variances
M_T : ndarray (..., n_components, n_pix_y)
design matrix of linear model
z, zinvvar : ndarray (nobj, ..., n_pix_z)
data and data variances for y
R_T : ndarray (..., n_components, n_pix_z)
design matrix of linear model for z
mu, muinvvar : ndarray ( ..., n_components)
data and data variances for y
Returns
-------
logfml : ndarray (nobj, )
log likelihood values with parameters marginalised and at best fit
theta_map : ndarray (nobj, ndim)
Best fit MAP parameters
theta_cov : ndarray (nobj, ndim, ndim)
Parameter covariance
"""
log2pi = tf.cast(tf.math.log(2.0 * np.pi), T)
nt = tf.cast(tf.shape(M_T)[-2], T)
nobj = tf.cast(tf.shape(y)[0], T)
ny = tf.cast(
tf.math.count_nonzero(tf.where(yinvvar > 0)), T
) # tf.cast(tf.shape(y)[-1], T)
nz = tf.cast(
tf.math.count_nonzero(tf.where(zinvvar > 0)), T
) # tf.cast(tf.shape(z)[-1], T)
nm = tf.cast(
tf.math.count_nonzero(tf.where(muinvvar > 0)), T
) # tf.cast(tf.shape(mu)[-1], T)
M = tf.transpose(M_T, [0, 2, 1]) # tf.einsum("...ij->...ji", M_T)
R = tf.transpose(R_T, [0, 2, 1]) # tf.einsum("...ij->...ji", M_T)
Hbar = (
ells[:, None, None] ** 2 * tf.matmul(R_T, R * zinvvar[..., :, None])
+ tf.matmul(M_T, M * yinvvar[..., :, None])
+ tf.eye(nt, dtype=T)[None, :, :]
* tf.ones((nobj, 1, 1), dtype=T)
* muinvvar[..., :, None]
) # (..., dt, dt)
etabar = (
ells[:, None] * tf.reduce_sum(R_T * (z * zinvvar)[..., None, :], axis=-1)
+ tf.reduce_sum(M_T * (y * yinvvar)[..., None, :], axis=-1)
+ tf.reduce_sum((mu * muinvvar)[..., None, :], axis=-1)
) # (..., dt)
theta_map = tf.linalg.solve(Hbar, etabar[..., None])[..., 0] # (..., dt)
theta_cov = tf.linalg.inv(Hbar)
logdetH = (
tf.reduce_sum(tf.where(zinvvar > 0, tf.math.log(zinvvar), zinvvar * 0), axis=-1)
+ tf.reduce_sum(
tf.where(yinvvar > 0, tf.math.log(yinvvar), yinvvar * 0), axis=-1
)
+ tf.reduce_sum(
tf.where(muinvvar > 0, tf.math.log(muinvvar), muinvvar * 0), axis=-1
)
)
xi1 = -0.5 * (
(ny + nz + nm) * log2pi
- logdetH
+ tf.reduce_sum(y * y * yinvvar, axis=-1)
+ tf.reduce_sum(z * z * zinvvar, axis=-1)
+ tf.reduce_sum(mu * mu * muinvvar, axis=-1)
)
logdetHbar = tf.linalg.logdet(Hbar)
xi2 = -0.5 * (nt * log2pi - logdetHbar + tf.reduce_sum(etabar * theta_map, axis=-1))
logfml = xi1 - xi2
return logfml, theta_map, theta_cov
def logmarglike_threetransfergaussians_secondfull(
ells, # (..., )
y, # (..., dy)
yvar, # (..., dy)
M_T, # (..., dt, dy),
z, # (..., dz),
zcovar, # (..., dz, dz),
R_T, # (..., dt, dz),
mu, # (..., dt),
muvar, # (..., dt),
):
nt = tf.cast(tf.shape(M_T)[-2], T)
nobj = tf.cast(tf.shape(y)[0], T)
ny = tf.cast(tf.shape(y)[-1], T)
nz = tf.cast(tf.shape(z)[-1], T)
nm = tf.cast(tf.shape(mu)[-1], T)
M = tf.transpose(M_T, [0, 2, 1]) # tf.einsum("...ij->...ji", M_T)
R = tf.transpose(R_T, [0, 2, 1]) # tf.einsum("...ij->...ji", M_T)
zvarinvz = tf.linalg.solve(zcovar, z[:, :, None])
Hbar = (
ells[:, None, None] ** 2 * tf.matmul(R_T, tf.linalg.solve(zcovar, R))
+ tf.matmul(M_T, M / yvar[..., :, None])
+ tf.eye(nt, dtype=T)[None, :, :]
* tf.ones((nobj, 1, 1), dtype=T)
/ muvar[..., :, None]
) # (..., dt, dt)
etabar = (
ells[:, None]
* tf.reduce_sum(R_T[..., None] * zvarinvz[..., None, :, :], axis=(-2, -1))
+ tf.reduce_sum(M_T * (y / yvar)[..., None, :], axis=-1)
+ tf.reduce_sum((mu / muvar)[..., None, :], axis=-1)
) # (..., dt)
theta_map = tf.linalg.solve(Hbar, etabar[..., None])[..., 0] # (..., dt)
theta_cov = tf.linalg.inv(Hbar)
logdetH = (
-tf.linalg.logdet(zcovar)
- tf.reduce_sum(tf.math.log(yvar), axis=-1)
- tf.reduce_sum(tf.math.log(muvar), axis=-1)
)
xi1 = -0.5 * (
(ny + nz + nm) * log2pi
- logdetH
+ tf.reduce_sum(y * y / yvar, axis=-1)
+ tf.reduce_sum(z[:, None] * zvarinvz, axis=(-1, -2))
+ tf.reduce_sum(mu * mu / muvar, axis=-1)
)
logdetHbar = tf.linalg.logdet(Hbar)
xi2 = -0.5 * (nt * log2pi - logdetHbar + tf.reduce_sum(etabar * theta_map, axis=-1))
logfml = xi1 - xi2
return logfml, theta_map, theta_cov
def solve_woodbury_sum_onefull_oneinv(A_full, Binv_diag, target):
n_B = tf.shape(Binv_diag)[1]
Binv_target = Binv_diag * target
# (A+B)inv * target = (Binv * target) - Binv * (Ainv + Binv)inv * (Binv * target)
Ainv = tf.linalg.inv(A_full)
Binv_full = tf.eye(n_B, dtype=T)[None, ...] * Binv_diag
Ainv_plus_Binv = Ainv + Binv_full
temp = tf.linalg.solve(Ainv_plus_Binv, Binv_target)
print(tf.shape(Ainv_plus_Binv), tf.shape(temp))
corr = tf.matmul(Binv_full, temp)
return Binv_target - corr
def logmarglike_twotransfergaussians_fullrank(
y, # (..., dy)
ycovar, # (..., dy)
yinvvar,
M_T, # (..., dt, dy),
z, # (..., dz), mu
zinvvar, # (..., dz), lambda,
R_T, # (..., dt, dz),
perm=[0, 2, 1],
):
log2pi = tf.cast(tf.math.log(2.0 * np.pi), T)
nt = tf.cast(tf.shape(M_T)[-2], T)
ny = tf.cast(tf.shape(y)[-1], T)
nz = tf.cast(
tf.math.count_nonzero(tf.where(zinvvar > 0)), T
) # tf.cast(tf.shape(z)[-1], T)
M = tf.transpose(M_T, perm) # tf.einsum("...ij->...ji", M_T)
R = tf.transpose(R_T, perm) # tf.einsum("...ij->...ji", M_T)
# XinvM = tf.linalg.solve(ycovar, M)
# Xinvy = tf.linalg.solve(ycovar, y[..., None])
XinvM = solve_woodbury_sum_onefull_oneinv(ycovar, yinvvar[..., None], M)
Xinvy = solve_woodbury_sum_onefull_oneinv(ycovar, yinvvar[..., None], y[..., None])
Hbar = tf.matmul(R_T, R * zinvvar[..., :, None]) + tf.matmul(
M_T, XinvM
) # (..., dt, dt)
etabar = tf.reduce_sum(R_T * (z * zinvvar)[..., None, :], axis=-1) + tf.reduce_sum(
M_T[..., None] * Xinvy[:, None, :, :], axis=(-2, -1)
) # (..., dt)
theta_map = tf.linalg.solve(Hbar, etabar[..., None])[..., 0] # (..., dt)
theta_cov = tf.linalg.inv(Hbar)
logdetH = tf.reduce_sum(
tf.where(zinvvar > 0, tf.math.log(zinvvar), zinvvar * 0), axis=-1
) - tf.linalg.logdet(ycovar)
xi1 = -0.5 * (
(ny + nz) * log2pi
- logdetH
+ tf.reduce_sum(y[:, None] * Xinvy, axis=(-1, -2))
+ tf.reduce_sum(z * z * zinvvar, axis=-1)
)
logdetHbar = tf.linalg.logdet(Hbar)
xi2 = -0.5 * (nt * log2pi - logdetHbar + tf.reduce_sum(etabar * theta_map, axis=-1))
logfml = xi1 - xi2
return logfml, theta_map, theta_cov, XinvM, Xinvy
|
from sudoku_grid_detector import SudokuGridDetector
from sudoku_digit_recognizer import SudokuDigitRecognizer_CV
from sudoku_solver import SudokuSolver
from matplotlib import pyplot as plt
import numpy as np
import cv2
import glob
import os
import time
def processImage(img_filepath : str, tilesize_px : int) -> np.ndarray:
t_start = time.time()
sudoku_grid = SudokuGridDetector(img_filepath=img_filepath, tilesize_px=tilesize_px)
t_end = time.time() - t_start
print(f'Took {t_end:.3f} seconds to load image and find the sudoku grid')
t_start = time.time()
digit_recognizer = SudokuDigitRecognizer_CV('CV_digits', tilesize_px)
predicted_sudoku_grid = digit_recognizer.recognizeDigits(sudoku_grid.sudoku_cells)
t_end = time.time() - t_start
print(f'Took {t_end:.3f} seconds to detect the digits in the sudoku grid')
t_start = time.time()
solver = SudokuSolver()
try:
solution = solver.solve(predicted_sudoku_grid)
except Exception as e:
print(e)
print('Skipping...')
solution = None
t_end = time.time() - t_start
print(f'Took {t_end:.3f} seconds to try and solve the sudoku grid')
return sudoku_grid, predicted_sudoku_grid, solution
def prettyPrintGrid(grid : np.ndarray):
""" Given 9x9 sudoku grid, return pretty string. """
msg = ''
grid = grid.astype('str')
for i in range(9):
msg += ''.join(grid[i,0:3]) + '|' + ''.join(grid[i,3:6]) + '|' + ''.join(grid[i,6:9]) + '\n'
if i in [2,5]:
msg += '---+---+---\n'
msg = msg.replace('0',' ')
return msg
def plotResults(sudoku_grid : SudokuGridDetector, prediction: np.ndarray, tilesize_px : int):
# Generate intermediate images for plotting
overlay = sudoku_grid.resized_img.copy()
cv2.drawContours(overlay, [sudoku_grid.contour], -1, (00, 255, 0), 10)
# Raw full-size image
plt.figure(figsize=(20, 10))
plt.subplot(1, 4, 1), plt.imshow(sudoku_grid.raw_img, vmin=0, vmax=255)
plt.title('Original Image')
# plt.xticks([]), plt.yticks([])
# Binary image with green dots for corners
plt.subplot(1, 4, 2), plt.imshow(sudoku_grid.binary_img, 'gray', vmin=0, vmax=255)
plt.title('ADAPTIVE_THRESH_GAUSSIAN_C')
plt.xticks([]), plt.yticks([])
if sudoku_grid.corners is None:
# Skip plotting the rest
plt.tight_layout()
plt.show()
return
plt.plot(sudoku_grid.corners[:, 0], sudoku_grid.corners[:, 1], 'go', markersize=15)
# Overlaid resized image with sudoku_grid grid rectangle
plt.subplot(1, 4, 3), plt.imshow(overlay, vmin=0, vmax=255)
plt.title('OVERLAY')
plt.xticks([]), plt.yticks([])
msg = prettyPrintGrid(prediction)
plt.text(10,10,msg,fontname='Consolas',horizontalalignment='left',verticalalignment='top', fontsize=12, color='yellow')
# Warped, binarized and line-removed sudoku_grid grid
# plt.imsave(fname='suduko.png', arr=sudoku_grid.sudoku_img, cmap='gray', vmin=0, vmax=255)
plt.subplot(1, 4, 4), plt.imshow(sudoku_grid.sudoku_img, 'gray', vmin=0, vmax=255)
for r in range(9):
for c in range(9):
if (prediction[r,c] > 0):
plt.text(c*tilesize_px+tilesize_px*.8, r*tilesize_px+tilesize_px*.3, f'{prediction[r,c]}',c='yellow')
plt.title('SUDOKU GRID (Yellow is predicted digit)')
plt.xticks([]), plt.yticks([])
plt.tight_layout()
def main():
# Load and process a sudoku image
tilesize_px = 64 # hard-coded for CV_digits for now.
# img_filepath='inputs/full1.jpg'
if not os.path.exists('results'):
os.mkdir('results')
for i, img_filepath in enumerate(glob.glob('inputs/*.jpg')):
# Time processing of image into predicted sudoku grid and solving it
t_start = time.time()
sudoku_grid, predicted_sudoku_grid, solution = processImage(img_filepath, tilesize_px)
tot_time = time.time() - t_start
print(f'Took {tot_time:.3f} seconds total to process an input image')
print('Sudoku Grid Prediction:')
print(predicted_sudoku_grid)
if solution is not None:
predicted_sudoku_grid = solution
print('Completed Sudoku Grid:')
print(predicted_sudoku_grid)
# Visualize
plotResults(sudoku_grid, predicted_sudoku_grid, tilesize_px)
plt.suptitle(img_filepath)
plt.savefig(f'results/result_{i:02d}.jpg')
# plt.show()
if __name__ == '__main__':
main()
|
"""Current bezpy version."""
__version__ = '0.0.2'
|
import weakref
import gc
class C(object):
def foo(self):
print "inside foo()"
def fact(n):
if n <= 1:
return n
return n * fact(n-1)
def getWR():
c = C()
wr = weakref.proxy(c)
wr.attr = "test attr"
print wr.attr, c.attr
wr.foo()
del c
return wr
wr = getWR()
fact(100) # try to clear some memory
def recurse(f, n):
if n:
return recurse(f, n - 1)
return f()
recurse(gc.collect, 50)
gc.collect()
try:
wr.foo()
except ReferenceError as e:
print e
try:
print wr.attr
except ReferenceError as e:
print e
try:
wr.attr = "val2"
except ReferenceError as e:
print e
|
# Do an experiment here.
print("_file_begin_", "servo.txt")
# Dump data logs here. It will be saved to "servo.txt".
print("4,8037,2,0,2,1021,2,0,4500,38498")
print("9,8037,2,0,2,1622,2,3,27000,41289")
print("14,8051,2,0,2,1357,2,13,13500,43731")
print("19,8051,2,0,2,1210,2,23,4500,46521")
print("24,8044,2,0,2,2551,2,18,58500,48963")
print("29,8044,2,0,2,1015,2,41,-9000,51753")
print("34,8030,2,0,2,2573,2,34,54000,54195")
print("39,8030,2,0,2,1842,2,55,20500,56986")
print("44,8028,2,0,2,2008,3,61,25000,59427")
print("49,8028,2,0,2,2396,3,66,38500,62218")
print("54,8016,2,0,2,3375,3,70,77000,64660")
print("59,8016,2,0,2,2369,4,88,32000,67450")
print("64,8010,2,0,2,2965,4,90,54500,69892")
print("69,8010,2,0,2,2253,5,106,48000,46500")
print("74,8036,2,0,2,2245,5,106,48000,46500")
print("_file_end_")
print("_file_begin_", "control.txt")
# Dump data logs here. It will be saved to "control.txt".
print("2,1,2,0,2,4500,2,1,2,0,0,0,4500,135230008")
print("7,6,2,0,2,27000,2,9,2,3,0,0,27000,1073898496")
print("12,11,2,0,2,13500,2,16,2,13,0,0,13500,65")
print("17,16,2,0,2,4500,2,24,2,23,0,0,4500,65")
print("22,21,2,0,2,58500,2,31,2,18,0,0,58500,65")
print("27,26,2,0,2,-9000,2,39,2,41,0,0,-9000,65")
print("32,31,2,0,2,54000,2,46,2,34,0,0,54000,7")
print("37,36,2,0,2,20500,3,54,2,55,25000,0,-4500,1073898496")
print("42,41,2,0,2,25000,3,61,3,61,25000,0,0,65")
print("47,46,2,0,2,38500,3,69,3,66,25000,0,13500,65")
print("52,51,2,0,2,77000,4,76,3,70,50000,0,27000,65")
print("57,56,2,0,2,32000,4,84,4,88,50000,0,-18000,1073898496")
print("62,61,2,0,2,54500,4,91,4,90,50000,0,4500,65")
print("67,66,2,0,2,48000,5,100,5,106,75000,0,-27000,65")
print("72,71,2,0,2,48000,5,100,5,106,75000,0,-27000,65")
print("_file_end_")
|
import unittest
from dcp.problems.bst.ds import BST
from dcp.problems.bst.floor_ceiling import floor_ceiling1
class Test_FloorCeiling1(unittest.TestCase):
def setUp(self):
pass
def test_case1(self):
values, tree = [7, 5, 10, -1, 6, 25], BST()
for v in values:
tree.insert(v)
assert floor_ceiling1(tree.root, 8) == (7, 10)
def test_case2(self):
values, tree = [7, 5, 10, -1, 6, 25], BST()
for v in values:
tree.insert(v)
assert floor_ceiling1(tree.root, 6) == (6, 6)
|
from slackbot.bot import Bot
import logging
import os
def main():
logging.basicConfig(level=logging.INFO)
logging.info("Running")
bot = Bot()
bot.run()
if __name__ == "__main__":
main()
|
from django.db import models
from django.contrib import admin
class Player(models.Model):
name = models.CharField(max_length=50, unique=True)
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(max_length=200, unique=True, null=True, blank=True)
players = models.ManyToManyField(Player)
points = models.IntegerField()
games = models.IntegerField()
def __unicode__(self):
return self.name
class Score(models.Model):
home = models.ForeignKey(Team,related_name="home")
away = models.ForeignKey(Team,related_name="away")
home_score = models.IntegerField()
away_score = models.IntegerField()
when = models.DateField()
class PlayerAdmin(admin.ModelAdmin):
list_display = ["name"]
search_fields = ["name"]
class ScoreAdmin(admin.ModelAdmin):
list_display = ["home", "home_score", "away", "away_score"]
search_fields = ["home", "away"]
actions = ["delete"]
def delete(self, request, queryset):
# change team data: minus points and games.
for score in queryset:
homeTeam = score.home
awayTeam = score.away
homeTeam.games = homeTeam.games - 1
awayTeam.games = awayTeam.games - 1
if score.home_score > score.away_score:
homeTeam.points = homeTeam.points - 2
elif score.home_score < score.away_score:
awayTeam.points = awayTeam.points - 2
else:
homeTeam.points = homeTeam.points - 1
awayTeam.points = awayTeam.points - 1
homeTeam.save()
awayTeam.save()
queryset.delete()
delete.short_description = "delete selected"
def get_actions(self, request):
actions = super(ScoreAdmin, self).get_actions(request)
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
class TeamAdmin(admin.ModelAdmin):
list_display = ["name", "points"]
search_fields = ["name"]
admin.site.register(Player, PlayerAdmin)
admin.site.register(Score, ScoreAdmin)
admin.site.register(Team, TeamAdmin)
|
import pytest
from merchantsguide.commands import MineralQueryCommand, MineralUpdateCommand, NumberQueryCommand, NumeralUpdateCommand
from merchantsguide.commands import UnknownCommand, BaseCommand
from merchantsguide.registry import Registry
def test_update_numeral():
r = Registry()
r.reset()
# add four definitions and assert that nothing is returned
assert NumeralUpdateCommand('bork', 'I').execute() is None
assert NumeralUpdateCommand('kmar', 'V').execute() is None
assert NumeralUpdateCommand('gromp', 'X').execute() is None
assert NumeralUpdateCommand('urx', 'L').execute() is None
# check that only these four definitions exist and are correct
assert len(r.alien_numerals) == 4
assert r.get_numeral('bork') == 'I'
assert r.get_numeral('kmar') == 'V'
assert r.get_numeral('gromp') == 'X'
assert r.get_numeral('urx') == 'L'
# update one definition and check for correctness
assert NumeralUpdateCommand('urx', 'C').execute() is None
assert len(r.alien_numerals) == 4
assert r.get_numeral('urx') == 'C'
def test_query_number():
r = Registry()
r.reset()
# define some numerals
NumeralUpdateCommand('bork', 'I').execute()
NumeralUpdateCommand('kmar', 'V').execute()
NumeralUpdateCommand('gromp', 'X').execute()
NumeralUpdateCommand('urx', 'L').execute()
# check for correct calculation
assert NumberQueryCommand(['bork']).execute() == "bork is 1"
assert NumberQueryCommand(['urx']).execute() == "urx is 50"
assert NumberQueryCommand(['gromp', 'urx', 'bork', 'bork']).execute()[-2:] == "42"
# assert that invalid inputs are handled correctly
assert NumberQueryCommand(['blork']).execute()[:7] == "Unknown"
assert NumberQueryCommand(['urx', 'urx']).execute()[:9] == "Could not"
def test_update_mineral():
r = Registry()
r.reset()
# define some numerals
NumeralUpdateCommand('bork', 'I').execute()
NumeralUpdateCommand('kmar', 'V').execute()
NumeralUpdateCommand('gromp', 'X').execute()
NumeralUpdateCommand('urx', 'L').execute()
# add some minerals
assert MineralUpdateCommand('Iron', ['bork', 'bork'], 99).execute() is None
assert MineralUpdateCommand('Gold', ['gromp'], 2500).execute() is None
# verify the registry status
assert len(r.mineral_prices) == 2
assert r.get_mineral('Iron') == 49.5
assert r.get_mineral('Gold') == 250.0
# uh-oh! Gold is worthless now!
MineralUpdateCommand('Gold', ['urx'], 500).execute()
assert len(r.mineral_prices) == 2
assert r.get_mineral('Gold') == 10.0
# check error cases, verify that nothing is changed
assert MineralUpdateCommand('Iron', ['bork', 'bark'], 99).execute()[:7] == 'Unknown'
assert MineralUpdateCommand('Iron', ['kmar', 'kmar'], 99).execute()[:5] == 'Could'
assert r.get_mineral('Iron') == 49.5
def test_query_mineral():
r = Registry()
r.reset()
# define some numerals
NumeralUpdateCommand('bork', 'I').execute()
NumeralUpdateCommand('kmar', 'V').execute()
NumeralUpdateCommand('gromp', 'X').execute()
NumeralUpdateCommand('urx', 'L').execute()
# add some minerals
MineralUpdateCommand('Iron', ['bork', 'bork'], 99).execute()
MineralUpdateCommand('Gold', ['gromp'], 2500).execute()
# check that mineral prices are calculated correctly
assert MineralQueryCommand(['gromp'], 'Iron').execute()[-12:-7] == " 495 "
assert MineralQueryCommand(['bork'], 'Gold').execute()[-12:-7] == " 250 "
# check that unknown minerals/numerals and invalid numbers are handled correctly
assert MineralQueryCommand(['glorp'], 'Iron').execute()[:7] == "Unknown"
assert MineralQueryCommand(['bork'], 'Unobtainium').execute()[:7] == "Unknown"
assert MineralQueryCommand(['kmar', 'kmar'], 'Iron').execute()[:9] == "Could not"
def test_reprs_dont_fail():
assert repr(NumeralUpdateCommand('bork', 'I')) != repr(MineralUpdateCommand('Iron', ['bork'], 23))\
!= repr(NumberQueryCommand(['bork'])) != repr(MineralQueryCommand(['bork'], 'Iron'))\
!= repr(UnknownCommand())
|
salario = float(input('Valor do salário: R$'))
aumento = float(input('Valor do aumento (ex.: 5%): '))
r = salario * aumento / 100
salfinal = salario + aumento
print(f'''Salário: R${salario:.2f}
Aumento: {aumento:.1f}%
→ Salário final: R${salfinal:.2f}''')
|
import os
from typing import Mapping
from flask import Flask
def create_app(test_config: Mapping = None) -> Flask:
app = Flask(import_name='app', instance_relative_config=True)
if test_config is None:
# load the instance config from ../instance/config.json
try:
app.config.from_json('config.json')
except:
pass
else:
app.config.from_mapping(test_config)
# create instance folder if it doesn't already exist
try:
os.makedirs(app.instance_path)
except OSError:
pass
return app
|
"""Support for Atlantic Electrical Heater IO controller."""
import logging
from typing import List
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from ..coordinator import TahomaDataUpdateCoordinator
from ..tahoma_entity import TahomaEntity
_LOGGER = logging.getLogger(__name__)
COMMAND_GET_LEVEL = "getLevel"
COMMAND_SET_LEVEL = "setLevel"
CORE_LEVEL_STATE = "core:LevelState"
class DimmerExteriorHeating(TahomaEntity, ClimateEntity):
"""Representation of TaHoma IO Atlantic Electrical Heater."""
def __init__(self, device_url: str, coordinator: TahomaDataUpdateCoordinator):
"""Init method."""
super().__init__(device_url, coordinator)
self._saved_level = 100 - self.select_state(CORE_LEVEL_STATE)
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_TARGET_TEMPERATURE
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def min_temp(self) -> float:
"""Return minimum percentage."""
return 0
@property
def max_temp(self) -> float:
"""Return maximum percentage."""
return 100
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return 100 - self.select_state(CORE_LEVEL_STATE)
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
level = kwargs.get(ATTR_TEMPERATURE)
if level is None:
return
await self.async_execute_command(COMMAND_SET_LEVEL, 100 - int(level))
await self.async_execute_command(COMMAND_GET_LEVEL)
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode."""
if self.select_state(CORE_LEVEL_STATE) == 100:
return HVAC_MODE_OFF
return HVAC_MODE_HEAT
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes."""
return [HVAC_MODE_OFF, HVAC_MODE_HEAT]
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
level = 0
if hvac_mode == HVAC_MODE_HEAT:
level = self._saved_level
else:
self._saved_level = self.target_temperature
await self.async_execute_command(COMMAND_SET_LEVEL, 100 - int(level))
await self.async_execute_command(COMMAND_GET_LEVEL)
|
# %% [markdown]
# This is a simple notebook for H2O AutoMl prediction.
# MLflow used as tracking tool since experiments take long time complete
# and it is hard to manage too many experiments.
#%%
# Importing necessary libraries
import os
import pandas as pd
from sklearn.model_selection import train_test_split
import h2o
from h2o.automl import H2OAutoML
import mlflow
import mlflow.h2o
# %%
# Initialize h2o server
h2o.init(max_mem_size='16G')
# Initialize mlflow experiment
mlflow.set_tracking_uri(f'..{os.sep}mlruns')
experiment_name = 'automl-benchmark'
try:
experiment = mlflow.create_experiment(experiment_name)
except:
experiment = mlflow.get_experiment_by_name(experiment_name)
mlflow.set_experiment(experiment_name)
# Reading seeds
seed_path = f'..{os.sep}data{os.sep}seeds.txt'
seeds = []
with open(seed_path,mode ='r') as file:
for seed in file:
seed.strip(r'/n')
seeds.append(int(seed))
dataset_name = 'taxi_trip_duration'
data = pd.read_pickle(f'..{os.sep}data{os.sep}{dataset_name}{os.sep}{dataset_name}.pkl')
#%%
run_time_secs = 600
target_column = 'trip_duration'
for seed in seeds:
with mlflow.start_run(run_name='h2o'):
# Split data into two parts (train, valid)
train, valid = train_test_split(data, random_state = seed)
train_df = h2o.H2OFrame(train)
# preparing column data for validation frame it must have same types and names with train frame
test_column_names = train_df.columns
test_column_types = train_df.types
valid_df = h2o.H2OFrame(valid, column_names=test_column_names, column_types=test_column_types)
# Identify predictors and response
x = train_df.columns
x.remove(target_column)
# For binary classification, response should be a factor
train_df[target_column] = train_df[target_column].asfactor()
valid_df[target_column] = valid_df[target_column].asfactor()
# Run AutoML for 10 mins with AUC metric
aml = H2OAutoML(max_runtime_secs = run_time_secs,
seed=seed,
stopping_metric='RMSLE',
sort_metric = 'RMSLE')
aml.train(x=x, y=target_column, training_frame=train_df, validation_frame=valid_df)
mlflow.log_metric('RMSLE', aml.leader.rmsle(valid=True))
mlflow.log_param('seed', seed)
mlflow.log_param('run_time', run_time_secs)
mlflow.log_param('dataset_name', dataset_name)
mlflow.log_param('model_name',aml.leaderboard[0,0])
mlflow.h2o.log_model(aml.leader, 'model')
|
"""
Solution to HSPT 2017 - Naughty or Nice
Solution idea - On a particular day, we can easily determine which
house is the first and the last house to be pranked using simple
math in constant time. Unfortunately, we can not update all the houses
in the range because with the large bounds of the problem, this will take
far too long to compute and keep count. However, something very important
to note is that the maximum skip size is 10, so we can abuse this fact.
Our final runtime will be O(n), but Python is somewhat slow when it comes to
these types of operations so the same solution in Java/C will be much faster.
"""
T = int(input())
while T > 0:
h, d = map(int, input().split(" "))
best = 0
# Sol stores the final answer, add and dele are used to compute final answer
sol = [0]*h
add = [0]*11
dele = [0]*11
# Initialize add and dele as 2D arrays of size [11][h], since 1 - 10 are valid skip sizes
for i in range(11):
add[i] = [0]*h
dele[i] = [0]*h
for i in range(d):
s,k,p=map(int, input().split(" "))
s-=1
# +1 since we know a range of updates starts at index s
add[k][s]+=1
# +1 to denote that we need to stop 1 range at index (p-1)*k+s
if (p-1)*k + s < h:
dele[k][(p-1)*k+s]+=1
# Sweep through each house in order
for i in range(h):
# Sweep through all possible skip sizes
for j in range(1,11):
# If add[j][i] is non-zero we know santa hit this house with a skip size of j, add[j][k] times
sol[i]+=add[j][i]
# dele[j][i] ranges ended at this position, so get rid of them
add[j][i]-=dele[j][i]
# Santa moves j positions forward, so we need to update the add array to account for this
if(i+j<h):
add[j][i+j]+=add[j][i]
if sol[best] < sol[i]:
best = i
print("House "+str(best+1)+" should get the biggest and best gift next Christmas.")
T-=1
|
"""
Tasks for the grades app
"""
import logging
from celery import group
from celery.result import GroupResult
from django.contrib.auth.models import User
from django.core.cache import caches
from django.db import IntegrityError
from django.db.models import OuterRef, Exists
from django_redis import get_redis_connection
from courses.models import CourseRun, Course
from grades import api
from grades.constants import FinalGradeStatus
from grades.models import (
FinalGrade,
ProctoredExamGrade,
MicromastersCourseCertificate,
CourseRunGradingStatus,
CombinedFinalGrade,
)
from micromasters.celery import app
from micromasters.utils import chunks, now_in_utc
CACHE_ID_BASE_STR = "freeze_grade_{0}"
log = logging.getLogger(__name__)
cache_redis = caches['redis']
@app.task
def generate_course_certificates_for_fa_students():
"""
Creates any missing unique course-user FACourseCertificates
"""
courses = Course.objects.filter(
program__live=True,
program__financial_aid_availability=True
)
for course in courses:
if not course.has_frozen_runs():
continue
course_certificates = MicromastersCourseCertificate.objects.filter(
course=course,
user=OuterRef('user')
)
# Find users that passed the course but don't have a certificate yet
users_need_cert = FinalGrade.objects.annotate(
course_certificate=Exists(course_certificates)
).filter(
course_run__course=course,
status=FinalGradeStatus.COMPLETE,
passed=True,
course_certificate=False
).values_list('user', flat=True)
if course.has_exam:
# need also to pass exam
users_need_cert = ProctoredExamGrade.objects.filter(
course=course,
passed=True,
exam_run__date_grades_available__lte=now_in_utc(),
user__in=users_need_cert
).values_list('user', flat=True)
for user in users_need_cert:
try:
MicromastersCourseCertificate.objects.get_or_create(
user_id=user,
course=course
)
except (IntegrityError, MicromastersCourseCertificate.DoesNotExist):
log.exception(
"Unable to fetch or create certificate for user id: %d and course: %s",
user,
course.title
)
@app.task
def create_combined_final_grades():
"""
Creates any missing CombinedFinalGrades
"""
courses = Course.objects.filter(
program__live=True,
program__financial_aid_availability=True
)
for course in courses:
if course.has_frozen_runs() and course.has_exam:
exam_grades = ProctoredExamGrade.objects.filter(
course=course,
passed=True,
exam_run__date_grades_available__lte=now_in_utc()
)
users_with_grade = set(CombinedFinalGrade.objects.filter(course=course).values_list('user', flat=True))
for exam_grade in exam_grades:
if exam_grade.user.id not in users_with_grade:
api.update_or_create_combined_final_grade(exam_grade.user, course)
@app.task
def find_course_runs_and_freeze_grades():
"""
Async task that takes care of finding all the course
runs that can freeze the final grade to their students.
Args:
None
Returns:
None
"""
runs_to_freeze = CourseRun.get_freezable()
for run in runs_to_freeze:
freeze_course_run_final_grades.delay(run.id)
@app.task
def freeze_course_run_final_grades(course_run_id):
"""
Async task manager to freeze all the users' final grade in a course run
Args:
course_run_id (int): a course run id
Returns:
None
"""
course_run = CourseRun.objects.get(id=course_run_id)
# no need to do anything if the course run is not ready
if not course_run.can_freeze_grades:
log.info('the grades course "%s" cannot be frozen yet', course_run.edx_course_key)
return
# if it has already completed, do not do anything
if CourseRunGradingStatus.is_complete(course_run):
log.info('Final Grades freezing for course run "%s" has already been completed', course_run.edx_course_key)
return
# cache id string for this task
cache_id = CACHE_ID_BASE_STR.format(course_run.edx_course_key)
# try to get the result id from a previous iteration of this task for this course run
group_results_id = cache_redis.get(cache_id)
# if the id is not none, it means that this task already run before for this course run
# so we need to check if its subtasks have finished
if group_results_id is not None:
# delete the entry from the cache (if needed it will be added again later)
cache_redis.delete(cache_id)
# extract the results from the id
results = GroupResult.restore(group_results_id, app=app)
# if the subtasks are not done, revoke them
results.revoke()
# delete the results anyway
results.delete()
# extract the users to be frozen for this course
user_ids_qset = api.get_users_without_frozen_final_grade(course_run).values_list('id', flat=True)
# find number of users for which cache could not be updated
con = get_redis_connection("redis")
failed_users_cache_key = api.CACHE_KEY_FAILED_USERS_BASE_STR.format(course_run.edx_course_key)
failed_users_count = con.llen(failed_users_cache_key)
# get the list of users that failed authentication last run of the task
failed_users_list = list(map(int, con.lrange(failed_users_cache_key, 0, failed_users_count)))
users_need_freeze = list(user_ids_qset)
users_left = list(set(users_need_freeze) - set(failed_users_list))
# if there are no more users to be frozen, just complete the task
if not users_left:
log.info('Completing grading with %d users getting refresh cache errors', len(failed_users_list))
CourseRunGradingStatus.set_to_complete(course_run)
con.delete(failed_users_cache_key)
return
# if the task reaches this point, it means there are users still to be processed
# clear the list for users for whom cache update failed
con.delete(failed_users_cache_key)
# create an entry in with pending status ('pending' is the default status)
CourseRunGradingStatus.create_pending(course_run=course_run)
# create a group of subtasks to be run in parallel
job = group(
freeze_users_final_grade_async.s(list_user_ids, course_run.id) for list_user_ids in chunks(user_ids_qset)
)
results = job.apply_async()
# save the result ID in the celery backend
results.save()
# put the results id in the cache to be retrieved and finalized later
cache_redis.set(cache_id, results.id, None)
@app.task
def freeze_users_final_grade_async(user_ids, course_run_id):
"""
Async task to freeze the final grade in a course run for a list of users.
Args:
user_ids (list): a list of django user ids
course_run_id (int): a course run id
Returns:
None
"""
# pylint: disable=bare-except
course_run = CourseRun.objects.get(id=course_run_id)
for user in User.objects.filter(id__in=user_ids):
try:
api.freeze_user_final_grade(user, course_run)
except:
log.exception(
'Impossible to freeze final grade for user "%s" in course %s',
user.username, course_run.edx_course_key
)
|
# Something
import os
import sys
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# Magic to get the library directory properly
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
from common import moving_average
def suq_curve(q, N, A, kc, gamma):
return (N/A) / (kc*q**4 + gamma*q**2)
def graph_sim_membranemodes(sim,
ax,
color = 'b',
**kwargs):
r""" Plotting function for membrane modes average over several simulations
"""
# Prepare some variables from the first seed
min_size_fft = 0
qcutoff_mean = []
area_mean = []
for sd in sim.seeds:
fft_length = sd.df['x_fft'].to_numpy()
fft_length = fft_length[~np.isnan(fft_length)]
fft_length = len(fft_length)
if fft_length > min_size_fft: min_size_fft = fft_length
qcutoff_mean.append(np.nanmean(sd.df['uq_2d_fft_qcutoff'].to_numpy()))
area_mean.append(np.nanmean(sd.df['membrane_area'].to_numpy()))
qcutoff_mean = np.mean(qcutoff_mean)
area_mean = np.mean(area_mean)
x_fft_arr = None
su_fft_arr = None
for sd in sim.seeds:
# Get the data for each seed and add it to an average
nlipids_per_leaflet = sd.lipids.nlipids_per_leaflet
x_fft = sd.df['x_fft'].to_numpy()
su_fft = sd.df['su_fft'].to_numpy()
x_fft = x_fft[~np.isnan(x_fft)]
su_fft = su_fft[~np.isnan(su_fft)]
# Cheap trick to assign the right size of array the first time around
if type(x_fft_arr) != type(x_fft):
x_fft_arr = x_fft
su_fft_arr = su_fft
else:
x_fft_arr = np.column_stack((x_fft_arr, x_fft))
su_fft_arr = np.column_stack((su_fft_arr, su_fft))
# Create the average/mean/std variables
if type(x_fft_arr) == None:
print(f"Didn't have any FFT components")
else:
# X coordinates should not change, but do the average anyway
x_fft_mean = np.mean(x_fft_arr, axis = 1)
su_fft_mean = np.mean(su_fft_arr, axis = 1)
su_fft_std = np.std(su_fft_arr, axis = 1)
ax.errorbar(x_fft_mean, su_fft_mean, yerr = su_fft_std, label = 'FFT', color = color, marker = 's', linestyle = 'none', markerfacecolor = 'none', capsize = 5)
# Try to fit the FFT data with the appropriate curves
idx = np.where(np.greater(x_fft_mean, qcutoff_mean))
idx = np.int32(idx[0][0])
jdx = np.where(np.greater(x_fft_mean, 1.0))
jdx = np.int32(jdx[0][0])
# Generate some guesses for the fit
kcguess1 = nlipids_per_leaflet / area_mean / su_fft_mean[idx] / (x_fft_mean[idx]**4)
# Try to do a fit on this
from scipy.optimize import curve_fit
popt_kc, pcov_kc = curve_fit(lambda q, kc: suq_curve(q, nlipids_per_leaflet, area_mean, kc, 0.0), x_fft_mean[idx:jdx], su_fft_mean[idx:jdx], bounds = ([0.0, np.inf]), p0 = [kcguess1])
popt_ga, pcov_ga = curve_fit(lambda q, kc, gamma: suq_curve(q, nlipids_per_leaflet, area_mean, kc, gamma), x_fft_mean[idx:jdx], su_fft_mean[idx:jdx], bounds = ([0.0, -np.inf], [np.inf, np.inf]), p0 = [kcguess1, 0.0])
print(f"Simuation fit values:")
print(f" kc(guess): {kcguess1}")
print(f" kc only: {popt_kc[0]}")
print(f" kc (both): {popt_ga[0]}")
print(f" gamma (both): {popt_ga[1]}")
ax.plot(x_fft_mean[idx:jdx], suq_curve(x_fft[idx:jdx], N = nlipids_per_leaflet, A = area_mean, kc = popt_kc[0], gamma = 0.0), color = color, linestyle = '--')
ax.plot(x_fft[idx:jdx], suq_curve(x_fft_mean[idx:jdx], N = nlipids_per_leaflet, A = area_mean, kc = popt_ga[0], gamma = popt_ga[1]), color = color, linestyle = ':')
# Set the correct log scale stuff
ax.set_yscale('log')
ax.set_xscale('log')
if 'xlabel' in kwargs and kwargs['xlabel'] == True: ax.set_xlabel(r'q ($\sigma^{-1}$)')
|
#!/usr/bin/env python
import collections
from PyQt4 import QtCore, QtGui
class Notification(QtGui.QWidget):
aboutToClose = QtCore.pyqtSignal()
contentChanged = QtCore.pyqtSignal()
def __init__(self, text, parent, timeout=None, icon=None, links=None,
recyclable=None):
super(Notification, self).__init__(parent)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.horizontalLayout = QtGui.QHBoxLayout(self)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setMargin(0)
# ------------------- Elements
self.pixmap = None
if icon is not None:
self.pixmap = QtGui.QLabel(self)
self.pixmap.setPixmap(icon.pixmap(
QtCore.QSize(
self.pixmap.height(),
self.pixmap.height()
)
))
self.pixmap.setAutoFillBackground(True)
self.horizontalLayout.addWidget(self.pixmap)
self.label = QtGui.QLabel(self)
self.label.setText(text)
self.label.setAutoFillBackground(True)
self.horizontalLayout.addWidget(self.label)
if links is not None:
self.links = links
self.label.linkActivated.connect(self.linkHandler)
self.timeoutTimer = QtCore.QTimer(self)
self.timeoutTimer.setSingleShot(True)
# ---------- Animation
#self.shadow = QtGui.QGraphicsDropShadowEffect()
#self.shadow.setBlurRadius(10)
self.goe = QtGui.QGraphicsOpacityEffect(self)
self.setGraphicsEffect(self.goe)
# Fade in
self.animationIn = QtCore.QPropertyAnimation(self.goe, "opacity")
self.animationIn.setDuration(300)
self.animationIn.setStartValue(0)
self.animationIn.setEndValue(1.0)
# Fade out
self.animationOut = QtCore.QPropertyAnimation(self.goe, "opacity")
self.animationOut.setDuration(300)
self.animationOut.setStartValue(1.0)
self.animationOut.setEndValue(0)
if timeout is not None:
self.timeoutTimer.setInterval(timeout)
self.animationIn.finished.connect(self.timeoutTimer.start)
self.timeoutTimer.timeout.connect(recyclable and self.hide or self.close)
def setText(self, text):
self.label.setText(text)
self.adjustSize()
self.contentChanged.emit()
def applyStyle(self, background, foreground, padding = 5, border = 1,
radius = 5):
label_style = "; ".join((
"background-color: %s" % background,
"color: %s" % foreground,
"border-color: %s" % foreground,
"border: %dpx solid %s" % (border, foreground),
"padding: %dpx" % padding))
if self.pixmap is not None:
self.pixmap.setStyleSheet("; ".join((
"background-color: %s" % background,
"color: %s" % foreground,
"border: %dpx solid %s" % (border, foreground),
"border-right: 0px",
"padding: %dpx" % padding,
"border-top-left-radius: %dpx" % radius,
"border-bottom-left-radius: %dpx" % radius
)))
label_style = "; ".join((
label_style,
"border-left: 0px",
"border-top-right-radius: %dpx" % radius,
"border-bottom-right-radius: %dpx" % radius
))
else:
label_style = "; ".join((
label_style,
"border-radius: %dpx" % radius))
self.label.setStyleSheet(label_style)
def show(self, text=None):
if text is not None:
self.setText(text)
self.setWindowOpacity(0.0)
super(Notification, self).show()
self.animationIn.start()
def hide(self):
self.animationOut.finished.connect(super(Notification, self).hide)
self.animationOut.start()
def close(self):
self.aboutToClose.emit()
self.animationOut.finished.connect(super(Notification, self).close)
self.animationOut.start()
def linkHandler(self, link):
callback = self.links.get(link, None)
if isinstance(callback, collections.Callable):
callback()
def enterEvent(self, event):
if self.timeoutTimer.isActive():
self.timeoutTimer.stop()
def leaveEvent(self, event):
self.timeoutTimer.start()
class OverlayNotifier(QtCore.QObject):
margin = 10
timeout = 2000
def __init__(self, parent = None):
super(OverlayNotifier, self).__init__(parent)
parent.installEventFilter(self)
self.notifications = []
self.palette = QtGui.QPalette()
self.font = QtGui.QFont()
self.background_role = QtGui.QPalette.ToolTipBase
self.foreground_role = QtGui.QPalette.ToolTipText
def setFont(self, font):
self.font = font
def setBackgroundRole(self, role):
self.background_role = role
def setForegroundRole(self, role):
self.foreground_role = role
def setPalette(self, palette):
self.palette = palette
def eventFilter(self, obj, event):
if event.type() == QtCore.QEvent.Resize:
self._fix_positions()
return super(OverlayNotifier, self).eventFilter(obj, event)
def _remove_notification(self):
notification = self.sender()
notification.aboutToClose.disconnect(self._remove_notification)
notification.contentChanged.disconnect(self._fix_positions)
self.notifications.remove(notification)
self._fix_positions()
def _fix_positions(self):
offsets = {}
for notification in self.notifications:
parent = notification.parent()
offset = offsets.setdefault(parent, self.margin)
rect = parent.geometry()
x = rect.width() - notification.width() - self.margin
y = rect.height() - notification.height() - offset
notification.setGeometry(x, y,
notification.width(),
notification.height())
offsets[parent] += (notification.height() + self.margin)
def _notification(self, message, title="", frmt="text", timeout=None, icon=None,
links={}, widget=None, recyclable=False):
if title:
title = "%s:\n" % title if frmt == "text" else "<h4>%s</h4>" % title
message = title + message
if frmt == "text" and links:
message = "<pre>%s</pre>" % message
if links:
message += "<div style='text-align: right; font-size: small;'>"
for key in links.keys():
message += "<a href='%s'>%s</a>" % (key, key.title())
message += "</div>"
notification = Notification(message,
widget or self.parent(),
timeout,
icon,
links,
recyclable)
# --------------- Style
notification.setFont(self.font)
background = self.palette.color(self.background_role).name()
color = self.palette.color(self.foreground_role).name()
notification.applyStyle(background, color)
notification.adjustSize()
return notification
def message(self, *args, **kwargs):
kwargs.setdefault("timeout", self.timeout)
notification = self._notification(*args, **kwargs)
notification.aboutToClose.connect(self._remove_notification)
notification.contentChanged.connect(self._fix_positions)
self.notifications.insert(0, notification)
self._fix_positions()
return notification
def status(self, *args, **kwargs):
notification = self._notification(*args, **kwargs)
notification.aboutToClose.connect(self._remove_notification)
notification.contentChanged.connect(self._fix_positions)
self.notifications.insert(0, notification)
self._fix_positions()
return notification
def tooltip(self, *args, **kwargs):
kwargs.setdefault("timeout", self.timeout)
point = kwargs.pop("point", QtCore.QPoint(self.margin,self.margin))
notification = self._notification(*args, **kwargs)
notification.setGeometry(point.x(), point.y(),
notification.width(), notification.height())
return notification
|
from flask import Flask, url_for, redirect, json, request, make_response, Response, stream_with_context
import atp_classes, os, gzip, glob
app = Flask(__name__)
config = atp_classes.Config()
app.secret_key = config.get_config()['session_secret']
cache = atp_classes.Cache()
app_db = atp_classes.AppDB()
hive_db = atp_classes.HiveDB()
app_login = atp_classes.AppLogin(app)
def get_attributes_from_db():
attribute_list = []
attribute_list_db = app_db.get_collection('attributes')
for attribute in attribute_list_db:
attribute_obj = atp_classes.Attribute(attribute)
attribute_obj.expression_string = attribute_obj.logical_expression.convert_to_string()
attribute_list.append(attribute_obj)
return attribute_list
@app.route('/')
@app.route('/<path:path>')
@app_login.required_login
def index(path=None):
return make_response(open('static/index.html').read())
@app.route('/handleLogin', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
user = atp_classes.User.find_user_by_username(json.loads(request.data)['username'])
if user and atp_classes.User.validate_login(user.password, json.loads(request.data)['password']):
user_obj = atp_classes.User(str(user._id), user.username)
app_login.log_user_in(user_obj)
return json.dumps({"status": 'success'})
return json.dumps({"status": 'failed'})
return redirect(url_for('login_form', next=request.args.get("next")))
@app.route('/login/')
def login_form():
if app_login.current_user.is_authenticated:
return redirect(url_for('index'))
else:
return make_response(open('static/index.html').read())
@app.route('/isUserAuthenticated/')
def is_user_authenticated():
if app_login.current_user.is_authenticated:
return json.dumps({"status": True, "username": app_login.current_user.username})
else:
return json.dumps({"status": False})
@app.route('/isUserAdmin/')
def is_user_admin():
if app_login.current_user.is_authenticated and app_login.current_user.is_admin():
return json.dumps({"status": True})
else:
return json.dumps({"status": False})
@app.route('/logout/')
def logout():
app_login.log_user_out()
return redirect('/')
@app.route('/queryHive/', methods=['POST'])
@app_login.required_login
@cache
def query_hive():
form_chosen_attributes = json.loads(request.data)['chosenAttributes']
chosen_attributes = []
query_string = ''
for dbattribute in get_attributes_from_db():
for cattribute in form_chosen_attributes:
if str(dbattribute._id) == cattribute['id']:
chosen_attributes.append(dbattribute)
for index, attribute in enumerate(chosen_attributes):
if config.get_config()['database']["bigData"]['partitionCondition'] != '':
if not query_string:
query_string = 'SELECT SUM(CASE WHEN {partitionCondition} THEN 1 ELSE 0 END) total_idp'\
.format(partitionCondition=config.get_config()['database']["bigData"]['partitionCondition'])
query_string += ''',
SUM(CASE WHEN {expression} AND {partitionCondition} THEN 1 ELSE 0 END) total_{id}'''\
.format(id=attribute._id, expression=attribute.logical_expression.convert_to_string(),
partitionCondition=config.get_config()['database']["bigData"]['partitionCondition'])
for index2, attribute2 in enumerate(chosen_attributes[(index + 1):]):
query_string += ''',
SUM(CASE WHEN ({expression} AND {expression2} AND {partitionCondition}) THEN 1 ELSE 0 END) total_{id1}_{id2}''' \
.format(id1=attribute._id, id2=attribute2._id,
expression=attribute.logical_expression.convert_to_string(),
expression2=attribute2.logical_expression.convert_to_string(),
partitionCondition=config.get_config()['database']["bigData"]['partitionCondition'])
else:
query_string = '''SELECT COUNT(1) total_idp'''
query_string += ''',
SUM(CASE WHEN {expression} THEN 1 ELSE 0 END) total_{id}''' \
.format(id=attribute._id, expression=attribute.logical_expression.convert_to_string())
for index2, attribute2 in enumerate(chosen_attributes[(index + 1):]):
query_string += ''',
SUM(CASE WHEN ({expression} AND {expression2}) THEN 1 ELSE 0 END) total_{id1}_{id2}''' \
.format(id1=attribute._id, id2=attribute2._id,
expression=attribute.logical_expression.convert_to_string(),
expression2=attribute2.logical_expression.convert_to_string())
query_string += '''
FROM {tableName}'''\
.format(tableName=config.get_config()['database']["bigData"]['tableName'])
results = hive_db.execute_query(query_string)
if not isinstance(results, list):
raise Exception(results)
return json.dumps(results[0])
@app.route('/queryHive/segments', methods=['POST'])
@app_login.required_login
@cache
def query_hive_segments():
form_logical_expression = json.loads(request.data)['logical_expression']
query_logical_expression = atp_classes.LogicalExpression(form_logical_expression)
if config.get_config()['database']["bigData"]['partitionCondition'] != '':
query_string = '''SELECT SUM(CASE WHEN {partitionCondition} THEN 1 ELSE 0 END) total_idp,
SUM(CASE WHEN {expression} AND {partitionCondition} THEN 1 ELSE 0 END) total_seg_idp
''' \
.format(expression=query_logical_expression.convert_to_string(),
partitionCondition=config.get_config()['database']["bigData"]['partitionCondition'])
query_string += '''FROM {tableName}'''\
.format(tableName=config.get_config()['database']["bigData"]['tableName'])
else:
query_string = '''SELECT COUNT(1) total_idp,
SUM(CASE WHEN {expression} THEN 1 ELSE 0 END) total_seg_idp
''' \
.format(expression=query_logical_expression.convert_to_string())
query_string += '''FROM {tableName}'''\
.format(tableName=config.get_config()['database']["bigData"]['tableName'])
results = hive_db.execute_query(query_string)
if not isinstance(results, list):
raise Exception(results)
return json.dumps(results[0])
@app.route('/queryHive/segments/ids', methods=['POST'])
@app_login.required_login
def query_hive_segments_ids():
form_logical_expression = json.loads(request.data)['logical_expression']
query_logical_expression = atp_classes.LogicalExpression(form_logical_expression)
if config.get_config()['database']["bigData"]['partitionCondition'] != '':
query_string = '''SELECT id, total_idp, total_seg_idp
FROM (SELECT SUM(CASE WHEN {partitionCondition} THEN 1 ELSE 0 END) total_idp,
SUM(CASE WHEN {expression} AND {partitionCondition} THEN 1 ELSE 0 END) total_seg_idp,
COLLECT_LIST(CASE WHEN {expression} AND {partitionCondition} THEN id ELSE NULL END) id_list
FROM {tableName}) aggregateTable
LATERAL VIEW explode(id_list) idTable as id
''' \
.format(expression=query_logical_expression.convert_to_string(),
tableName=config.get_config()['database']["bigData"]['tableName'],
partitionCondition=config.get_config()['database']["bigData"]['partitionCondition'])
else:
query_string = '''SELECT id, total_idp, total_seg_idp
FROM (SELECT COUNT(1) total_idp,
SUM(CASE WHEN {expression} THEN 1 ELSE 0 END) total_seg_idp,
COLLECT_LIST(CASE WHEN {expression} THEN id ELSE NULL END) id_list
FROM {tableName}) aggregateTable
LATERAL VIEW explode(id_list) idTable as id
''' \
.format(expression=query_logical_expression.convert_to_string(),
tableName=config.get_config()['database']["bigData"]['tableName'])
# Function to pass to generator to format data from results returned by Hive
def result_formatter(row, index, filename=''):
if index == 1:
format_string = '{{"total_idp":{idp_count},"total_seg_idp":{seg_count},"filename":"{output_file}"}}'\
.format(idp_count=row['total_idp'], seg_count=row['total_seg_idp'], firstId=row['id'], output_file=filename)
return format_string, str(row['id'])
else:
return None, "\n" + str(row['id'])
return Response(hive_db.to_file_generator_execute_query(query_string, result_formatter, 3000000))
@app.route('/getAttributesList/')
@app_login.required_login
def get_attributes():
attribute_list = []
for attribute in get_attributes_from_db():
attribute_list.append({"id": attribute._id, "name": attribute.name})
return json.dumps(attribute_list, default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/getAttributesList/')
@app_login.required_login
def get_admin_attributes():
return json.dumps(get_attributes_from_db(), default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/getFieldsList/')
@app_login.required_login
@cache
def get_admin_fields_list():
return json.dumps(app_db.get_collection('fields', [('data_source', atp_classes.AppDB.ASCENDING),
('name', atp_classes.AppDB.ASCENDING)]),
default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/updateAttribute/', methods=['POST'])
@app_login.required_login
def update_attribute():
form_attribute = json.loads(request.data)['updateAttribute']
return json.dumps(app_db.update_collection('attributes', form_attribute),
default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/addAttribute/', methods=['POST'])
@app_login.required_login
def add_attribute():
form_attribute = json.loads(request.data)['addAttribute']
return json.dumps(app_db.add_to_collection('attributes', form_attribute),
default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/removeAttribute/', methods=['POST'])
@app_login.required_login
def remove_attribute():
form_attribute = json.loads(request.data)['removeAttribute']
if app_db.remove_from_collection('attributes', form_attribute) > 0:
return json.dumps({"status": True})
else:
return json.dumps({"status": False})
@app.route('/admin/getUsers/')
@app_login.required_login
@app_login.required_admin
def get_users():
users_list = []
for user in app_db.get_collection('users'):
user["password"] = ''
users_list.append(user)
return json.dumps(users_list, default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/updateUser/', methods=['POST'])
@app_login.required_login
@app_login.required_admin
def update_user():
form_user = json.loads(request.data)['updateUser']
form_user['password'] = atp_classes.User.generate_hash(form_user['password'])
return json.dumps(app_db.update_collection('users', form_user),
default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/addUser/', methods=['POST'])
@app_login.required_login
@app_login.required_admin
def add_user():
form_user = json.loads(request.data)['addUser']
form_user['password'] = atp_classes.User.generate_hash(form_user['password'])
return json.dumps(app_db.add_to_collection('users', form_user),
default=atp_classes.JSONHandler.JSONHandler)
@app.route('/admin/removeUser/', methods=['POST'])
@app_login.required_login
@app_login.required_admin
def remove_user():
form_user = json.loads(request.data)['removeUser']
if app_db.remove_from_collection('users', form_user) > 0:
return json.dumps({"status": True})
else:
return json.dumps({"status": False})
@app.route("/downloadIDs/<filename>")
@app_login.required_login
def download_ids(filename):
def read_file():
tmp_dir = os.environ['TMPDIR'] or './tmp'
for file_part in glob.glob(tmp_dir + '/' + filename + '*.txt.gz'):
f = gzip.open(file_part, 'rb')
while True:
piece = f.read(1024)
if not piece:
break
yield piece
f.close()
return Response(stream_with_context(read_file()))
@app.route("/downloadIDsStatus/<filename>")
@app_login.required_login
def download_ids_status(filename):
tmp_dir = os.environ['TMPDIR'] or './tmp'
if os.path.isfile(tmp_dir + '/' + filename + '.txt.build'):
return 'processing'
else:
return 'done'
@app.errorhandler(Exception)
def handle_exceptions(err):
err_message = str(err)
if len(err_message) > 150:
err_message = err_message[:150] + '...'
return make_response(err_message, 500)
if __name__ == '__main__':
app.run(debug=True, host=config.get_config()['host'], threaded=True,
port=int(os.getenv('PORT', config.get_config()['port'])))
|
def progress(count, total, suffix=''):
bar_len = 60
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = '=' * filled_len + '-' * (bar_len - filled_len)
if percents < 100:
print('\r[{}] {}{} ...{}'.format(bar, percents, '%', suffix), end='', flush=1)
else:
print('\r[{}] {}{}'.format(bar, percents, '%'), end='\n', flush=1)
|
# 入力
N, M = map(int, input().split())
# (等比*等差)数列の和の公式を用いる
ans = (100 * N + 1800 * M) * 2**M
# 出力
print(ans)
|
#!/usr/bin/env python
from optparse import OptionParser
import os
import subprocess
import shutil
import logging
import signal
import re
pwd = os.path.abspath(os.path.dirname(__file__))
vedir = os.path.abspath(os.path.join(pwd, "ve"))
def kill_daemons(sig=signal.SIGKILL):
uid = os.getuid()
cmd = ['ps', 'x', '-U', str(uid), '-o', 'pid,command']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
lines = proc.stdout.readlines()[1:]
logging.debug("Searching %d processes for this user.", len(lines))
for l in lines:
l = l.strip()
m = re.match('(\\d+) (npact-.*)', l)
if m:
pid, name = m.groups()
logging.warning("Killing proc %s %r", pid, name)
os.kill(int(pid), sig)
def cleanup_existing():
if os.path.exists(vedir):
logging.info("Removing existing virtual environment")
shutil.rmtree(vedir)
def init_virtualenv():
logging.info("Creating virtualenvironment")
virtualenv_support_dir = os.path.abspath(
os.path.join(pwd, "lib", "virtualenv_support"))
ret = subprocess.call(["python", "virtualenv.py",
"--extra-search-dir=%s" % virtualenv_support_dir,
"--prompt=(npact)",
vedir],
cwd=pwd)
if ret:
logging.critical("Failed creating virtualenv: %s", ret)
exit(ret)
logging.debug("Installing libraries")
cmd = [os.path.join(vedir, 'bin', 'pip'), "install",
# "--index-url=''",
"--requirement", os.path.join(pwd, "requirements.txt")]
ret = subprocess.call(cmd, cwd=pwd)
if ret:
logging.critical("Failed installing libraries from requirements.txt")
exit(ret)
def create_aux_directories():
if not os.path.exists('webroot'):
os.makedirs('webroot')
def build_pynpact():
logging.info("Building pynpact C code.")
pynpact_dir = os.path.join(os.path.dirname(__file__), "pynpact/")
ret = subprocess.call(["make"], cwd=pynpact_dir)
if ret:
logging.critical("Make failed; C programs may not be availalbe.")
if __name__ == '__main__':
parser = OptionParser("""%prog [options] [Command ...]
Runs the initialization and building code to get the system into a
working state after a fresh checkout.
When given with no commands does the full process. Specific steps
can be invoked by name, including:
* kill-daemons
* cleanup-existing
* init-virtualenv
* create-aux-directories
* build-pynpact
""")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
help="Show more verbose log messages.")
(options, args) = parser.parse_args()
logging.basicConfig(level=(options.verbose and logging.DEBUG or logging.INFO),
format="%(asctime)s %(levelname)-8s %(message)s",
datefmt='%H:%M:%S')
if len(args) == 0:
cleanup_existing()
kill_daemons()
init_virtualenv()
create_aux_directories()
build_pynpact()
print "\nSuccessfully finished bootstrap.py!"
else:
for arg in args:
globals()[arg.replace('-','_')]()
|
# Copyright cocotb contributors
# Licensed under the Revised BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-3-Clause
import ctypes
import pytest
import cocotb.utils
class TestHexDump:
def test_int_illegal(dut):
# this used to be legal, but deliberately is no longer
with pytest.raises(TypeError):
cocotb.utils.hexdump(1)
def test_str_deprecated(dut):
with pytest.warns(DeprecationWarning) as w:
dump_str = cocotb.utils.hexdump("\x20\x65\x00\xff")
assert "str" in str(w[-1].message)
assert "bytes instead" in str(w[-1].message)
dump_bytes = cocotb.utils.hexdump(b"\x20\x65\x00\xff")
assert dump_bytes == dump_str
class TestHexDiffs:
def test_int_illegal(dut):
# this used to be legal, but deliberately is no longer
with pytest.raises(TypeError):
cocotb.utils.hexdiffs(0, 1)
def test_str_deprecated(dut):
with pytest.warns(DeprecationWarning) as w:
diff_str = cocotb.utils.hexdiffs("\x20\x65\x00\xff", "\x20\x00\x65")
assert "str" in str(w[-1].message)
assert "bytes instead" in str(w[-1].message)
diff_bytes = cocotb.utils.hexdiffs(b"\x20\x65\x00\xff", b"\x20\x00\x65")
assert diff_bytes == diff_str
def test_pack_deprecated():
class Example(ctypes.Structure):
_fields_ = [("a", ctypes.c_byte), ("b", ctypes.c_byte)]
e = Example(a=0xCC, b=0x55)
with pytest.warns(DeprecationWarning):
a = cocotb.utils.pack(e)
assert a == bytes(e)
def test_unpack_deprecated():
class Example(ctypes.Structure):
_fields_ = [("a", ctypes.c_byte), ("b", ctypes.c_byte)]
e = Example(a=0xCC, b=0x55)
f = Example(a=0xCC, b=0x55)
b = b"\x01\x02"
with pytest.warns(DeprecationWarning):
cocotb.utils.unpack(e, b)
assert e.a == 1 and e.b == 2
memoryview(f).cast("B")[:] = b
assert f.a == 1 and f.b == 2
|
import pytest
from determined.experimental import Determined, ModelSortBy
from tests import config as conf
from tests import experiment as exp
@pytest.mark.e2e_cpu
def test_model_registry() -> None:
exp_id = exp.run_basic_test(
conf.fixtures_path("mnist_pytorch/const-pytorch11.yaml"),
conf.tutorials_path("mnist_pytorch"),
None,
)
d = Determined(conf.make_master_url())
# Create a model and validate twiddling the metadata.
mnist = d.create_model("mnist", "simple computer vision model", labels=["a", "b"])
assert mnist.metadata == {}
mnist.add_metadata({"testing": "metadata"})
db_model = d.get_model(mnist.model_id)
# Make sure the model metadata is correct and correctly saved to the db.
assert mnist.metadata == db_model.metadata
assert mnist.metadata == {"testing": "metadata"}
# Confirm DB assigned username
assert db_model.username == "determined"
mnist.add_metadata({"some_key": "some_value"})
db_model = d.get_model(mnist.model_id)
assert mnist.metadata == db_model.metadata
assert mnist.metadata == {"testing": "metadata", "some_key": "some_value"}
mnist.add_metadata({"testing": "override"})
db_model = d.get_model(mnist.model_id)
assert mnist.metadata == db_model.metadata
assert mnist.metadata == {"testing": "override", "some_key": "some_value"}
mnist.remove_metadata(["some_key"])
db_model = d.get_model(mnist.model_id)
assert mnist.metadata == db_model.metadata
assert mnist.metadata == {"testing": "override"}
mnist.set_labels(["hello", "world"])
db_model = d.get_model(mnist.model_id)
assert mnist.labels == db_model.labels
assert db_model.labels == ["hello", "world"]
# confirm patch does not overwrite other fields
assert db_model.metadata == {"testing": "override"}
# archive and unarchive
assert mnist.archived is False
mnist.archive()
db_model = d.get_model(mnist.model_id)
assert db_model.archived is True
mnist.unarchive()
db_model = d.get_model(mnist.model_id)
assert db_model.archived is False
# Register a version for the model and validate the latest.
checkpoint = d.get_experiment(exp_id).top_checkpoint()
model_version = mnist.register_version(checkpoint.uuid)
assert model_version.model_version == 1
latest_version = mnist.get_version()
assert latest_version is not None
assert latest_version.checkpoint.uuid == checkpoint.uuid
latest_version.set_name("Test 2021")
db_version = mnist.get_version()
assert db_version is not None
assert db_version.name == "Test 2021"
latest_version.set_notes("# Hello Markdown")
db_version = mnist.get_version()
assert db_version is not None
assert db_version.notes == "# Hello Markdown"
# Run another basic test and register its checkpoint as a version as well.
# Validate the latest has been updated.
exp_id = exp.run_basic_test(
conf.fixtures_path("mnist_pytorch/const-pytorch11.yaml"),
conf.tutorials_path("mnist_pytorch"),
None,
)
checkpoint = d.get_experiment(exp_id).top_checkpoint()
model_version = mnist.register_version(checkpoint.uuid)
assert model_version.model_version == 2
latest_version = mnist.get_version()
assert latest_version is not None
assert latest_version.checkpoint.uuid == checkpoint.uuid
# Ensure the correct number of versions are present.
all_versions = mnist.get_versions()
assert len(all_versions) == 2
# Test deletion of model version
latest_version.delete()
all_versions = mnist.get_versions()
assert len(all_versions) == 1
# Create some more models and validate listing models.
tform = d.create_model("transformer", "all you need is attention")
d.create_model("object-detection", "a bounding box model")
models = d.get_models(sort_by=ModelSortBy.NAME)
assert [m.name for m in models] == ["mnist", "object-detection", "transformer"]
# Test model labels combined
tform.set_labels(["world", "test", "zebra"])
labels = d.get_model_labels()
assert labels == ["world", "hello", "test", "zebra"]
# Test deletion of model
tform.delete()
models = d.get_models(sort_by=ModelSortBy.NAME)
assert [m.name for m in models] == ["mnist", "object-detection"]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__package__)
|
#!/usr/bin/env python
import roslib; roslib.load_manifest('wiimote')
import rospy,math
from sensor_msgs.msg import JoyFeedbackArray
from sensor_msgs.msg import JoyFeedback
from wiimote.msg import State
siita=444
sp=2
past=State()
def callback(data):
global past,led0,led1,led2,led3,rum,siita,sp
if not (data.linear_acceleration_zeroed == past.linear_acceleration_zeroed):
x=data.linear_acceleration_zeroed.x/math.sqrt(pow((data.linear_acceleration_zeroed.x),2)+pow((data.linear_acceleration_zeroed.y),2))
y=data.linear_acceleration_zeroed.y/math.sqrt(pow((data.linear_acceleration_zeroed.x),2)+pow((data.linear_acceleration_zeroed.y),2))
siita=180+math.degrees(math.atan2(y,x))
siita=int(siita)
if(10>siita or siita>350):
rum.intensity=1
else:
rum.intensity=0
if data.buttons[1]==True and past.buttons[1]==False:
#accel
pass
elif data.buttons[0]==True and past.buttons[0]==False:
#Brake
pass
elif data.buttons[6]==True and past.buttons[6]==False:
#Left
pass
elif data.buttons[7]==True and past.buttons[7]==False:
#Right
pass
elif data.buttons[2]==True and past.buttons[2]==False:
if sp>=4:
pass
else:
sp+=1
msg.array[sp-1].intensity=1
#Speed Up
elif data.buttons[3]==True and past.buttons[3]==False:
if sp<=1:
pass
else:
sp-=1
msg.array[sp].intensity=0
#Speed down
elif data.buttons[5]==True and past.buttons[5]==False:
#LED ON
pass
elif data.buttons[5]==False and past.buttons[5]==True:
#LED OFF
pass
print(siita)
pub.publish(msg)
past=data
rospy.Subscriber('/wiimote/state', State, callback, queue_size=10)
pub = rospy.Publisher('/joy/set_feedback', JoyFeedbackArray, queue_size=10)
#stm = rospy.Publisher('/stm', stm_carrot, queue_size=1)
rospy.init_node('ledControlTester', anonymous=True)
led0 = JoyFeedback()
led0.type = JoyFeedback.TYPE_LED
led0.id = 0
led1 = JoyFeedback()
led1.type = JoyFeedback.TYPE_LED
led1.id = 1
led2 = JoyFeedback()
led2.type = JoyFeedback.TYPE_LED
led2.id = 2
led3 = JoyFeedback()
led3.type = JoyFeedback.TYPE_LED
led3.id = 3
rum = JoyFeedback()
rum.type = JoyFeedback.TYPE_RUMBLE
rum.id = 0
msg = JoyFeedbackArray()
msg.array = [led0, led1, led2, led3, rum]
led0.intensity=1
led1.intensity=1
if __name__ == '__main__':
try:
rospy.spin()
except KeyboardInterrupt:
ser.close()
sys.exit()
|
#encoding:utf-8
'''
@author: look
@copyright: 1999-2020 Alibaba.com. All rights reserved.
@license: Apache Software License 2.0
@contact: 390125133@qq.com
'''
import os
import re
import sys,csv
import threading
import random
import time
import traceback
BaseDir=os.path.dirname(__file__)
sys.path.append(os.path.join(BaseDir,'../..'))
from mobileperf.common.log import logger
from mobileperf.android.tools.androiddevice import AndroidDevice
from mobileperf.android.globaldata import RuntimeData
from mobileperf.common.utils import TimeUtils
class DeviceMonitor(object):
'''
一个监控类,监控手机中的一些状态变化,目前监控应用是否卸载,获取前台正在活动的activity
'''
def __init__(self, device_id, packagename, interval = 1.0,main_activity=[],activity_list=[],event=None,activity_queue = None):
''''
:param list main_activity 指定模块的主入口
:param list activity_list : 限制默认范围的activity列表,默认为空,则不限制
'''
self.uninstall_flag = event
self.device = AndroidDevice(device_id)
self.packagename = packagename
self.interval = interval
self.main_activity = main_activity
self.activity_list = activity_list
self.stop_event = threading.Event()
self.activity_queue = activity_queue
self.current_activity = None
def start(self, starttime):
self.activity_monitor_thread = threading.Thread(target=self._activity_monitor_thread)
self.activity_monitor_thread.start()
logger.debug("DeviceMonitor activitymonitor has started...")
# self.uninstaller_checker_thread = threading.Thread(target=self._uninstaller_checker_thread)
# self.uninstaller_checker_thread.start()
# logger.debug("DeviceMonitor uninstaller checker has started...")
def stop(self):
if self.activity_monitor_thread.isAlive():
self.stop_event.set()
self.activity_monitor_thread.join(timeout=1)
self.activity_monitor_thread = None
if self.activity_queue:
self.activity_queue.task_done()
logger.debug("DeviceMonitor stopped!")
def _activity_monitor_thread(self):
activity_title = ("datetime", "current_activity")
self.activity_file = os.path.join(RuntimeData.package_save_path, 'current_activity.csv')
try:
with open(self.activity_file, 'a+') as af:
csv.writer(af, lineterminator='\n').writerow(activity_title)
except Exception as e:
logger.error("file not found: " + str(self.activity_file))
while not self.stop_event.is_set():
try:
before = time.time()
self.current_activity = self.device.adb.get_current_activity()
collection_time = time.time()
activity_list = [collection_time, self.current_activity]
if self.activity_queue:
logger.debug("activity monitor thread activity_list: " + str(activity_list))
self.activity_queue.put(activity_list)
if self.current_activity:
logger.debug("current activity: " + self.current_activity)
if self.main_activity and self.activity_list:
if self.current_activity not in self.activity_list:
start_activity = self.packagename + "/" + self.main_activity[
random.randint(0, len(self.main_activity) - 1)]
logger.debug("start_activity:" + start_activity)
self.device.adb.start_activity(start_activity)
activity_tuple=(TimeUtils.getCurrentTime(),self.current_activity)
# 写文件
try:
with open(self.activity_file, 'a+') as writer:
writer_p = csv.writer(writer, lineterminator='\n')
writer_p.writerow(activity_tuple)
except RuntimeError as e:
logger.error(e)
time_consume = time.time() - before
delta_inter = self.interval - time_consume
logger.debug("get app activity time consumed: " + str(time_consume))
if delta_inter > 0 :
time.sleep(delta_inter)
except Exception as e:
s = traceback.format_exc()
logger.debug(s) # 将堆栈信息打印到log中
if self.activity_queue:
self.activity_queue.task_done()
# 这个检查频率不用那么高
def _uninstaller_checker_thread(self):
'''
这个方法用轮询的方式查询指定的应用是否被卸载,一旦卸载会往主线程发送一个卸载的信号,终止程序
:return:
'''
while not self.stop_event.is_set():
before = time.time()
is_installed = self.device.adb.is_app_installed(self.packagename)
if not is_installed:
if self.uninstall_flag and isinstance(self.uninstall_flag, threading._Event):
logger.debug("uninstall flag is set, as the app has checked uninstalled!")
self.uninstall_flag.set()
time_consume = time.time() - before
delta_inter = self.interval*10 - time_consume
logger.debug("check installed app: " + self.packagename +", time consumed: " + str(time_consume) + ", is installed: " + str(is_installed))
if delta_inter > 0:
time.sleep(delta_inter)
if __name__ == '__main__':
monitor = DeviceMonitor("NVGILZSO99999999", "com.kaola", 2)
monitor.start(time.time())
time.sleep(60)
monitor.stop()
|
from qweNet.qweNet import *
from data_util.generate_bc_feature import generate_bc_feature
import networkx as nx
import matplotlib.pyplot as plt
import numpy as np
import time
from scipy.stats import kendalltau as kd
import torch.optim as optim
EMBEDDING_SIZE = 128
REG_HIDDEN = (int)(EMBEDDING_SIZE / 2)
MIN_SIZE = 100
MAX_SIZE = 200
MAX_EPOCH = 10000
N_VALID = 10 # number of validation graphs
N_TRAIN = 100
BATCH_SIZE = 16
LEARNING_RATE = 0.0002
max_bp_iter = 5 # neighbor propagation steps
def main():
########## Train
btl = QweNet(max_bp_iter, 3, EMBEDDING_SIZE, EMBEDDING_SIZE, REG_HIDDEN, 1, 0, inTraining = False)
System = QweTool()
top001, top005, top01, kendal, run_time = System.evaluateRealData(btl, "/root/CS280_QWE/model/nrange_iter_100_200_5000.pkl", "/p300/data/GNN_Data/Real/amazon.txt", "/p300/data/GNN_Data/Real/exact_bc/amazon.txt")
print("%f, %f, %f, %f, %f" % (top001, top005, top01, kendal, run_time))
if __name__ == "__main__":
main()
|
import numpy as np
import torch
import torch.nn as nn
import torch.autograd as autograd
class RandomMirror(nn.Module):
def __init__(self, mirror_prob):
super(RandomMirror, self).__init__()
self.mirror_prob = mirror_prob
self.inv_idx = None
def forward(self, x):
input_size = x.size(2)
perform_mirror = np.random.rand() < self.mirror_prob
if perform_mirror:
if self.inv_idx is None:
self.inv_idx = autograd.Variable(
torch.arange(input_size-1, -1, -1).long()).cuda(x.get_device())
mirrored = x.index_select(3, self.inv_idx)
else:
mirrored = x
return mirrored
class Mirror(nn.Module):
def __init__(self, ):
super(Mirror, self).__init__()
def forward(self, x):
input_size = x.size(2)
inv_idx = torch.arange(input_size - 1, -1, -1, device=x.device).long()
mirrored = x.index_select(3, inv_idx)
return mirrored
|
from django.utils.translation import ugettext_lazy as _
from mayan.apps.common.apps import MayanAppConfig
from mayan.apps.common.menus import menu_object, menu_secondary, menu_tools
from mayan.apps.navigation.classes import SourceColumn
from .classes import StatisticLineChart, StatisticNamespace
from .links import (
link_execute, link_namespace_details, link_namespace_list,
link_statistics, link_view
)
class StatisticsApp(MayanAppConfig):
app_namespace = 'statistics'
app_url = 'statistics'
has_static_media = True
has_tests = True
name = 'mayan.apps.mayan_statistics'
static_media_ignore_patterns = (
'statistics/node_modules/chart.js/book.*',
'statistics/node_modules/chart.js/karma.conf.*',
'statistics/node_modules/chart.js/samples/*',
'statistics/node_modules/chart.js/src/*',
'statistics/node_modules/chart.js/*docs*',
)
verbose_name = _('Statistics')
def ready(self):
super().ready()
SourceColumn(
attribute='schedule',
# Translators: Schedule here is a noun, the 'schedule' at
# which the statistic will be updated
include_label=True, label=_('Schedule'),
source=StatisticLineChart
)
SourceColumn(
attribute='get_last_update', include_label=True,
label=_('Last update'), source=StatisticLineChart
)
menu_object.bind_links(
links=(link_execute, link_view), sources=(StatisticLineChart,)
)
menu_object.bind_links(
links=(link_namespace_details,), sources=(StatisticNamespace,)
)
menu_secondary.bind_links(
links=(link_namespace_list,),
sources=(StatisticNamespace, 'statistics:namespace_list')
)
menu_tools.bind_links(links=(link_statistics,))
|
import pytest
from pathlib import Path
from timeit import repeat
from statistics import mean
from ..json import dumps, JsonOpt, loads
xfail = pytest.mark.xfail
parametrize = pytest.mark.parametrize
base = Path(__file__).parent / 'json_test_data'
paths = [base/f for f in ('twitter.json', 'github.json')]
def ops_per_sec(n, *vals):
val = mean(vals)
return n * (1/val), val, sum(vals, 0)
class JsonTests:
def test_bytes_vs_str(self):
for path in paths:
raw = path.read_bytes()
assert raw.decode().encode() == raw
data = loads(raw)
n = int(.5e3)
bfunc = lambda: dumps(data)
sfunc = lambda: dumps(data, opts=JsonOpt.DECODE)
self.run(f'{path.name} ({round(len(raw)/1000, 2)}kb)', bfunc, sfunc, n)
print(' ')
# assert 0
def run(self, lbl, mfn, ifn, n=int(1e3), rep=2, r=3):
mres, mt, mtt = ops_per_sec(n, *repeat(mfn, number=n, repeat=rep, globals=locals()))
ires, it, itt = ops_per_sec(n, *repeat(ifn, number=n, repeat=rep, globals=locals()))
if mres > ires:
d = f'B {round(mres/ires, r)}x faster'
else:
d = f'S {round(ires/mres, r)}x faster'
M, I = f'{round(mtt, r)} secs'.ljust(12) + f' avg {round(mt, r)} secs'.ljust(16) \
+ f'{round(mres, r)} ops/sec'.ljust(16+r), \
f'{round(itt, r)} secs'.ljust(12) + f' avg {round(it, r)} secs'.ljust(16) \
+ f'{round(ires, r)} ops/sec'.ljust(16+r)
print(f'{lbl}\n {rep} x {n} ({rep * n}) ops == {d}\n - B={M!s}\n - S={I!s}')
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 14 21:57:02 2019
@author: Ashwin
"""
import matplotlib.pyplot as plt
import numpy as np
cartesian_coordinates = np.load('data/cartesian_coordinates.npy')
pixel_centers = np.load('data/pixel_centers.npy')
image_squares = np.load('data/image_squares.npy')
point_to_display = 1000
cartesian_coordinate_single = cartesian_coordinates[point_to_display]
pixel_center_single = np.repeat(pixel_centers[point_to_display, :, :, None], 3, axis=-1)
image_square_single = np.repeat(image_squares[point_to_display, :, :, None], 3, axis=-1)
print('Display data for point 0')
print('Cartesian coordinate is: {}'.format(cartesian_coordinate_single))
plt.figure()
plt.imshow(pixel_center_single)
plt.figure()
plt.imshow(image_square_single)
#image = np.repeat(images_[3000], 3, axis=-1)
#plt.figure()
#plt.imshow(image, cmap=plt.cm.gray)
#
pc_reshape = np.reshape(image_squares, [56,56,64,64])
bottom_quad = pc_reshape[28:, 28:, :,:]
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from lib.net import IP, TCP
|
from rest_framework.test import APIRequestFactory
from rest_framework.reverse import reverse
from django.db.models import Count
from contentcuration.management.commands.setup import create_user
from contentcuration.management.commands.setup import create_channel
from contentcuration.models import User
from contentcuration.models import Channel
from contentcuration.views.admin import AdminUserListView
from contentcuration.views.admin import AdminChannelListView
from base import BaseAPITestCase
get_users = AdminUserListView.as_view()
get_channels = AdminChannelListView.as_view()
class TestGetUsersAndChannels(BaseAPITestCase):
@classmethod
def setUpTestData(self):
self.admin_user = self.first_user = create_user('a@a.com', 'a', 'a', 'a', True)
dummy_chef_channel = create_channel('a_chef_channel')
dummy_chef_channel.ricecooker_version = 'ricecooker of the future!'
dummy_chef_channel.public = False
dummy_chef_channel.save()
self.first_channel = dummy_chef_channel
dummy_chef_channel_public = create_channel('z_chef_channel_public')
dummy_chef_channel_public.ricecooker_version = 'ricecooker of the future!'
dummy_chef_channel_public.public = True
dummy_chef_channel_public.save()
self.last_channel = dummy_chef_channel_public
self.dummy_users = []
self.dummy_channels = [dummy_chef_channel]
num_users = 4
for x in ['is_admin', 'is_not_active', 'is_chef']:
for i in range(num_users):
name = 'user_%s_%i' % (x, i)
user = create_user(name+'@a.com', name, name, name, x == 'is_admin')
user.is_active = x != 'is_not_active'
user.save()
self.dummy_users.append(user)
if x == 'is_chef':
dummy_chef_channel.editors.add(user)
for i in range(3):
name = 'channel%i' % i
channel = create_channel(name, "", [self.dummy_users[i]])
self.dummy_channels.append(channel)
self.last_user = create_user('z@z.com', 'z', 'z', 'z', True)
# def test_get_users_for_channel(self):
# url = reverse('get_users')
# channel = self.dummy_channels[0]
# request = APIRequestFactory().get(url, {'editable_channels__contains': channel.id})
# request.user = self.admin_user
# response = get_users(request)
# self.assertEqual(response.data.get('count'), channel.editors.count())
def test_get_admin_users(self):
url = reverse('get_users')
request = APIRequestFactory().get(url, {'is_admin': True})
request.user = self.admin_user
response = get_users(request)
self.assertEqual(response.data.get('count'), User.objects.filter(is_admin=True).count())
def test_get_not_admin_users(self):
url = reverse('get_users')
request = APIRequestFactory().get(url, {'is_admin': False})
request.user = self.admin_user
response = get_users(request)
self.assertEqual(response.data.get('count'), User.objects.exclude(is_admin=True).count())
def test_get_chef_users(self):
url = reverse('get_users')
users = User.objects.annotate(chef_channels=Count('editable_channels__ricecooker_version'))
num_chef_users = users.filter(chef_channels__gt=0).count()
request = APIRequestFactory().get(url, {'chef_channels_count__gt': 0})
request.user = self.admin_user
response = get_users(request)
self.assertEqual(response.data.get('count'), num_chef_users)
def test_get_non_chef_users(self):
url = reverse('get_users')
users = User.objects.annotate(chef_channels=Count('editable_channels__ricecooker_version'))
num_non_chef_users = users.filter(chef_channels=0).count()
request = APIRequestFactory().get(url, {'chef_channels_count': 0})
request.user = self.admin_user
response = get_users(request)
self.assertEqual(response.data.get('count'), num_non_chef_users)
def test_get_users_paginated(self):
url = reverse('get_users')
request = APIRequestFactory().get(url, {'page_size': 1})
request.user = self.admin_user
response = get_users(request)
self.assertEqual(len(response.data.get('results')), 1)
def test_get_users_sort_by_email_asc(self):
url = reverse('get_users')
request = APIRequestFactory().get(url, {'ordering': 'email'})
request.user = self.admin_user
response = get_users(request)
self.assertEqual(response.data.get('results')[0]['email'], self.first_user.email)
def test_get_users_sort_by_last_name_desc(self):
url = reverse('get_users')
request = APIRequestFactory().get(url, {'ordering': '-email'})
request.user = self.admin_user
response = get_users(request)
self.assertEqual(response.data.get('results')[0]['email'], self.last_user.email)
def test_get_all_channels(self):
url = reverse('get_channels')
request = APIRequestFactory().get(url)
request.user = self.admin_user
response = get_channels(request)
self.assertEqual(response.data.get('count'), Channel.objects.all().count())
# def test_get_channels_for_user(self):
# url = reverse('get_channels')
# user = self.dummy_users[0]
# request = APIRequestFactory().get(url, {'user_id': user.id})
# request.user = self.admin_user
# response = get_channels(request)
# self.assertEqual(response.data.get('count'), user.editable_channels.count())
def test_get_live_channels(self):
url = reverse('get_channels')
request = APIRequestFactory().get(url, {'is_live': True})
request.user = self.admin_user
response = get_channels(request)
self.assertEqual(response.data.get('count'), Channel.objects.exclude(deleted=True).count())
def test_get_live_and_public_channels(self):
url = reverse('get_channels')
request = APIRequestFactory().get(url, {
'deleted': False,
'public': True
})
request.user = self.admin_user
response = get_channels(request)
self.assertEqual(
response.data.get('count'),
Channel.objects.exclude(deleted=True).filter(public=True).count()
)
def test_get_ricecooker_channels(self):
url = reverse('get_channels')
request = APIRequestFactory().get(url, {
'ricecooker_version__isnull': False,
})
request.user = self.admin_user
response = get_channels(request)
self.assertEqual(
response.data.get('count'),
Channel.objects.exclude(ricecooker_version=None).count()
)
def test_get_public_ricecooker_channels(self):
url = reverse('get_channels')
request = APIRequestFactory().get(url, {
'ricecooker_version__isnull': False,
'public': True,
})
request.user = self.admin_user
response = get_channels(request)
channels = Channel.objects.exclude(ricecooker_version=None).filter(public=True)
self.assertEqual(
response.data.get('count'),
channels.count()
)
def test_sort_by_name_asc(self):
url = reverse('get_channels')
request = APIRequestFactory().get(url, {'ordering': 'name'})
request.user = self.admin_user
response = get_channels(request)
self.assertEqual(response.data.get('results')[0]['name'], self.first_channel.name)
def test_sort_by_name_desc(self):
url = reverse('get_channels')
request = APIRequestFactory().get(url, {'ordering': '-name'})
request.user = self.admin_user
response = get_channels(request)
self.assertEqual(response.data.get('results')[0]['name'], self.last_channel.name)
|
import csv
import os
import sys
import cPickle
import operator
from cnn_text_trainer.rw import datasets
__author__ = 'devashish.shankar'
#TODO clean up this. Move to core maybe?
def evaluate(data,outputf):
"""
Ported from initial version. TODO refactor to accept new format of data and clean up this code
:param data: array containing outputs in old format: [[prob_pred,pred_label,actual_label,text],...]
:param outputf: output directory
"""
filept=open(outputf+"/info_"+testfile.split("/")[-1].split(".")[0]+"_"+modelfile.split("/")[-1].split(".")[0]+".csv", "wb")
filep=csv.writer(filept)
filep.writerow(["Number of data-points ",len(data)])
print "Number of data-points: "+str(len(data))
filep.writerow(["Number of labels ",len(labels)])
print "Number of labels: "+str(len(labels))
perf=float(len([row[1] for row in data if row[1]==row[2]]))/float(len(data))
filep.writerow(["Accuracy ",str(perf*100)+"%"])
filep.writerow([])
print "Performance: "+str(perf*100)+"%\n"
data.sort(key=operator.itemgetter(0),reverse=True)
y_pred=[row[1] for row in data]
y_true=[row[2] for row in data]
for n in labels:
tp=float(sum([(y_true[i]==n) and (y_pred[i]==n) for i in range(len(y_true))]))
tn=float(sum([(y_true[i]!=n) and (y_pred[i]!=n) for i in range(len(y_true))]))
fp=float(sum([(y_true[i]!=n) and (y_pred[i]==n) for i in range(len(y_true))]))
fn=float(sum([(y_true[i]==n) and (y_pred[i]!=n) for i in range(len(y_true))]))
fscore=(200*tp)/(2*tp+fp+fn)
filep.writerow(["Label ",n])
filep.writerow(["F-score ",str(fscore)+"%"])
filep.writerow(["TP ",int(tp),"FP ",int(fp),"TN ",int(tn),"FN ",int(fn)])
filep.writerow([])
print "F-score for label-"+str(n)+" is: "+str(fscore)+"%"
filept.close()
print "Printing output file"
with open(outputf+"/output_"+testfile.split("/")[-1].split(".")[0]+"_"+modelfile.split("/")[-1].split(".")[0]+".csv", "wb") as f:
writer = csv.writer(f)
writer.writerow(["probabilities","y_predicted","y_actual","tweets"])
for line in data:
writer.writerow(line)
print "Printing misclassification file"
with open(outputf+"/misclassification_"+testfile.split("/")[-1].split(".")[0]+"_"+modelfile.split("/")[-1].split(".")[0]+".csv", "wb") as f:
writer = csv.writer(f)
writer.writerow(["probabilities","y_predicted","y_actual","tweets"])
for line in data:
if line[1]!=line[2]:
writer.writerow(line)
if __name__=="__main__":
if len(sys.argv)<6:
print "Usage: testing.py"
print "\t<model file path>"
print "\t<testing file path>"
print "\t<folder to store detailed output analysis>"
print "\t<true/false preprocess>"
print "\t<load word vectors? (true/false). This will give accuracy gains, but will have a lot of memory pressure. If false, words not encountered during training are skipped while testing >"
exit(0)
import theano
theano.config.experimental.unpickle_gpu_on_cpu = True
testfile=sys.argv[2]
modelfile=sys.argv[1]
outputdir=sys.argv[3]
preprocess=sys.argv[4].lower()
load_word_vecs = sys.argv[5].lower()=="true"
if not os.path.exists(outputdir):
print "Output dir ",outputdir, " doesn't exist. Creating it"
os.makedirs(outputdir)
else:
print "Using Output dir ",outputdir,". Any previous results in this dir on same dataset might get overwritten. "
model = cPickle.load(open(modelfile,"rb"))
if load_word_vecs:
print "Loading word vectors"
model.add_global_word_vecs({})
print "Loading word vectors done"
sentences,vocab, labels = datasets.build_data(testfile,preprocess)
labels = model.get_labels()
output = model.classify(sentences)
#Free memory
del model
print "Removed model from memory"
#Format the output to earlier format
#TODO evaluate function should be changed to accept newer format, which is cleaner
data = []
for i in range(len(output[0])):
actual_label = sentences[i]['y']
text = sentences[i]['text']
predicted_label = output[0][i]
predicted_prob = output[1][i][predicted_label]
data.append([predicted_prob,labels[predicted_label],labels[actual_label],text])
evaluate(data,outputdir)
|
HTK_FEEDBACK_EMAIL_SUBJECT = 'New feedback from Hacktoolkit form'
|
import numpy as np
import matplotlib.pyplot as plt
import math
import vmath
print(vmath)
print(dir(vmath))
S = \
[
"vmath.sqrt_( 2 )",
"vmath.sin_ ( 0 )",
"vmath.cos_ ( 0 )",
"vmath.tan_ ( 0 )",
"vmath.sin_ (math.pi/4)",
"vmath.cos_ (math.pi/4)",
"vmath.tan_ (math.pi/4)",
]
length_of_longest_string = len(max(S, key=len))
print(length_of_longest_string)
for item in S:
pad_string = " " * (length_of_longest_string - len(item))
print(item + pad_string + ":" + format(eval(item), '9.6f'))
x = np.arange(-1, 10, 0.001)
y = np.empty_like(x)
vmath.Sqrt_(x, y)
plt.xlim(-1, 10)
plt.ylim(-1, 10)
plt.plot(x, y)
plt.show()
x = np.arange(0, 2 * np.pi, 0.001)
y = np.empty_like(x)
vmath.Sin_(x, y)
plt.xlim(0, 2*math.pi)
plt.ylim(-math.pi, math.pi)
plt.plot(x, y)
plt.show()
x = np.arange(0, 2 * np.pi, 0.001)
y = np.empty_like(x)
vmath.Cos_(x, y)
plt.xlim(0, 2*math.pi)
plt.ylim(-math.pi, math.pi)
plt.plot(x, y)
plt.show()
x = np.arange(0, 2 * np.pi, 0.001)
y = np.empty_like(x)
vmath.Tan_(x, y)
plt.xlim(0, 2*math.pi)
plt.ylim(-math.pi, math.pi)
plt.plot(x, y)
plt.show()
|
from functools import reduce
identity = lambda x: x
is_truthy = bool
NOTHING = ''
def to_dict(named_tuple):
return dict(zip(named_tuple._fields, named_tuple))
def enum_values(enum):
return (member.value for member in enum)
def get_enum_member(enum, value):
for member in enum:
if member.value == value:
return member
def join(*items):
return NOTHING.join(items)
def curry(prior, *additional):
def curried(*args):
return prior(*(args + additional))
return curried
def skip_falsy_and_join(*items):
return join(*filter(is_truthy, items))
|
import numpy as np
import logging
from multiprocessing import Pool, cpu_count
logger = logging.getLogger(__name__)
NCPUS = cpu_count()
def get_frequency_grid(times,
samplesperpeak=5,
nyquistfactor=5,
minfreq=None,
maxfreq=None,
returnf0dfnf=False):
baseline = times.max() - times.min()
nsamples = times.size
df = 1. / baseline / samplesperpeak
if minfreq is not None:
f0 = minfreq
else:
f0 = 0.5 * df
if maxfreq is not None:
Nf = int(np.ceil((maxfreq - f0) / df))
else:
Nf = int(0.5 * samplesperpeak * nyquistfactor * nsamples)
if returnf0dfnf:
return f0, df, Nf, f0 + df * np.arange(Nf)
else:
return f0 + df * np.arange(Nf)
def phase_magseries(times, mags, period, epoch, wrap=True, sort=True):
'''
This phases the given magnitude timeseries using the given period and
epoch. If wrap is True, wraps the result around 0.0 (and returns an array
that has twice the number of the original elements). If sort is True,
returns the magnitude timeseries in phase sorted order.
'''
# find all the finite values of the magnitudes and times
finiteind = np.isfinite(mags) & np.isfinite(times)
finite_times = times[finiteind]
finite_mags = mags[finiteind]
magseries_phase = (
(finite_times - epoch)/period -
np.floor(((finite_times - epoch)/period))
)
outdict = {'phase':magseries_phase,
'mags':finite_mags,
'period':period,
'epoch':epoch}
if sort:
sortorder = np.argsort(outdict['phase'])
outdict['phase'] = outdict['phase'][sortorder]
outdict['mags'] = outdict['mags'][sortorder]
if wrap:
outdict['phase'] = np.concatenate((outdict['phase']-1.0,
outdict['phase']))
outdict['mags'] = np.concatenate((outdict['mags'],
outdict['mags']))
return outdict
def stellingwerf_pdm_theta(times, mags, errs, frequency,
binsize=0.05, minbin=9):
'''
This calculates the Stellingwerf PDM theta value at a test frequency.
'''
period = 1.0/frequency
fold_time = times[0]
phased = phase_magseries(times,
mags,
period,
fold_time,
wrap=False,
sort=True)
phases = phased['phase']
pmags = phased['mags']
bins = np.arange(0.0, 1.0, binsize)
nbins = bins.size
binnedphaseinds = np.digitize(phases, bins)
binvariances = []
binndets = []
goodbins = 0
for x in np.unique(binnedphaseinds):
thisbin_inds = binnedphaseinds == x
thisbin_phases = phases[thisbin_inds]
thisbin_mags = pmags[thisbin_inds]
if thisbin_mags.size > minbin:
thisbin_variance = np.var(thisbin_mags,ddof=1)
binvariances.append(thisbin_variance)
binndets.append(thisbin_mags.size)
goodbins = goodbins + 1
# now calculate theta
binvariances = np.array(binvariances)
binndets = np.array(binndets)
theta_top = np.sum(binvariances*(binndets - 1)) / (np.sum(binndets) -
goodbins)
theta_bot = np.var(pmags,ddof=1)
theta = theta_top/theta_bot
return theta
def stellingwerf_pdm_worker(task):
'''
This is a parallel worker for the function below.
task[0] = times
task[1] = mags
task[2] = errs
task[3] = frequency
task[4] = binsize
task[5] = minbin
'''
times, mags, errs, frequency, binsize, minbin = task
try:
theta = stellingwerf_pdm_theta(times, mags, errs, frequency,
binsize=binsize, minbin=minbin)
return theta
except Exception as e:
return np.nan
def stellingwerf_pdm(stimes,
smags,
serrs,
autofreq=True,
startp=None,
endp=None,
normalize=False,
stepsize=1.0e-4,
phasebinsize=0.05,
mindetperbin=9,
nbestpeaks=5,
periodepsilon=0.1,
sigclip=10.0,
nworkers=None,
verbose=True):
'''This runs a parallel Stellingwerf PDM period search.
'''
# get the frequencies to use
if startp:
endf = 1.0/startp
else:
# default start period is 0.1 day
endf = 1.0/0.1
if endp:
startf = 1.0/endp
else:
# default end period is length of time series
startf = 1.0/(stimes.max() - stimes.min())
# if we're not using autofreq, then use the provided frequencies
if not autofreq:
frequencies = np.arange(startf, endf, stepsize)
if verbose:
logger.info(
'using %s frequency points, start P = %.3f, end P = %.3f' %
(frequencies.size, 1.0/endf, 1.0/startf)
)
else:
# this gets an automatic grid of frequencies to use
frequencies = get_frequency_grid(stimes,
minfreq=startf,
maxfreq=endf)
if verbose:
logger.info(
'using autofreq with %s frequency points, '
'start P = %.3f, end P = %.3f' %
(frequencies.size,
1.0/frequencies.max(),
1.0/frequencies.min())
)
# map to parallel workers
if (not nworkers) or (nworkers > NCPUS):
nworkers = NCPUS
if verbose:
logger.info('using %s workers...' % nworkers)
pool = Pool(nworkers)
# renormalize the working mags to zero and scale them so that the
# variance = 1 for use with our LSP functions
if normalize:
nmags = (smags - np.median(smags))/np.std(smags)
else:
nmags = smags
tasks = [(stimes, nmags, serrs, x, phasebinsize, mindetperbin)
for x in frequencies]
lsp = pool.map(stellingwerf_pdm_worker, tasks)
pool.close()
pool.join()
del pool
lsp = np.array(lsp)
periods = 1.0/frequencies
# find the nbestpeaks for the periodogram: 1. sort the lsp array by
# lowest value first 2. go down the values until we find five values
# that are separated by at least periodepsilon in period
# make sure to filter out the non-finite values of lsp
finitepeakind = np.isfinite(lsp)
finlsp = lsp[finitepeakind]
finperiods = periods[finitepeakind]
# finlsp might not have any finite values if the period finding
# failed. if so, argmin will return a ValueError.
try:
bestperiodind = np.argmin(finlsp)
except ValueError:
logger.error('no finite periodogram values for '
'this mag series, skipping...')
return {'bestperiod':np.nan,
'bestlspval':np.nan,
'nbestpeaks':nbestpeaks,
'nbestlspvals':None,
'nbestperiods':None,
'lspvals':None,
'periods':None,
'method':'pdm',
'kwargs':{'startp':startp,
'endp':endp,
'stepsize':stepsize,
'normalize':normalize,
'phasebinsize':phasebinsize,
'mindetperbin':mindetperbin,
'autofreq':autofreq,
'periodepsilon':periodepsilon,
'nbestpeaks':nbestpeaks,
'sigclip':sigclip}}
sortedlspind = np.argsort(finlsp)
sortedlspperiods = finperiods[sortedlspind]
sortedlspvals = finlsp[sortedlspind]
prevbestlspval = sortedlspvals[0]
# now get the nbestpeaks
nbestperiods, nbestlspvals, peakcount = (
[finperiods[bestperiodind]],
[finlsp[bestperiodind]],
1
)
prevperiod = sortedlspperiods[0]
# find the best nbestpeaks in the lsp and their periods
for period, lspval in zip(sortedlspperiods, sortedlspvals):
if peakcount == nbestpeaks:
break
perioddiff = abs(period - prevperiod)
bestperiodsdiff = [abs(period - x) for x in nbestperiods]
# print('prevperiod = %s, thisperiod = %s, '
# 'perioddiff = %s, peakcount = %s' %
# (prevperiod, period, perioddiff, peakcount))
# this ensures that this period is different from the last
# period and from all the other existing best periods by
# periodepsilon to make sure we jump to an entire different peak
# in the periodogram
if (perioddiff > (periodepsilon*prevperiod) and
all(x > (periodepsilon*prevperiod) for x in bestperiodsdiff)):
nbestperiods.append(period)
nbestlspvals.append(lspval)
peakcount = peakcount + 1
prevperiod = period
return {'bestperiod':finperiods[bestperiodind],
'bestlspval':finlsp[bestperiodind],
'nbestpeaks':nbestpeaks,
'nbestlspvals':nbestlspvals,
'nbestperiods':nbestperiods,
'lspvals':lsp,
'periods':periods,
'method':'pdm',
'kwargs':{'startp':startp,
'endp':endp,
'stepsize':stepsize,
'normalize':normalize,
'phasebinsize':phasebinsize,
'mindetperbin':mindetperbin,
'autofreq':autofreq,
'periodepsilon':periodepsilon,
'nbestpeaks':nbestpeaks,
'sigclip':sigclip}}
|
import pandas as pd
from datacode.typing import StrList
from datacode.typing import IntOrNone
from datacode.summarize.subset.missing.detail.textfuncs import (
missing_more_than_str,
missing_more_than_pct_str,
missing_tolerance_count_str,
id_count_str
)
def by_id_pct_long_df(df: pd.DataFrame, byvars: StrList, id_var: str,
count_with_missings_var: str, missing_tolerance: IntOrNone = 0,
missing_quantile: IntOrNone = None,
missing_display_str: str = 'Missing') -> pd.DataFrame:
by_id_var = _by_id_count_long_df(
df,
byvars,
id_var,
count_with_missings_var,
missing_tolerance=missing_tolerance,
missing_quantile=missing_quantile,
missing_display_str=missing_display_str
)
name_args = (missing_tolerance, missing_quantile, missing_display_str, id_var)
missing_more_than_name = missing_more_than_str(*name_args)
missing_tolerance_count_name = missing_tolerance_count_str(*name_args)
id_count_name = id_count_str(id_var)
by_id_var[id_count_name] = by_id_var[missing_more_than_name] + \
by_id_var[missing_tolerance_count_name]
by_id_var[missing_more_than_pct_str(missing_tolerance, missing_quantile, missing_display_str, id_var)] = \
(by_id_var[missing_more_than_name] /
by_id_var[id_count_name]) * 100
by_id_var.drop([
missing_more_than_name,
missing_tolerance_count_name
], axis=1, inplace=True)
return by_id_var
def _by_id_count_long_df(df: pd.DataFrame, byvars: StrList, id_var: str,
count_with_missings_var: str, missing_tolerance: IntOrNone = 0,
missing_quantile: IntOrNone = None,
missing_display_str: str = 'Missing') -> pd.DataFrame:
"""
Note: uses missing_quantile if specified, else uses missing_tolerance
Args:
df:
byvars:
id_var:
count_with_missings_var:
missing_tolerance:
missing_quantile:
missing_display_str:
Returns:
Raises:
ValueError: if missing_tolerance and missing_quantile are both None
ValueError: if columns _one or _pct are in df
"""
if '_one' in df.columns or '_pct' in df.columns:
raise ValueError('will override column _one and _pct in df')
df['_one'] = 1 # temporary variable for counting without missing
by_firm_counts = df.groupby([id_var] + byvars)[['_one', count_with_missings_var]].count().reset_index()
df.drop('_one', axis=1, inplace=True)
if missing_quantile is not None:
by_firm_counts['_pct'] = by_firm_counts[count_with_missings_var] / by_firm_counts['_one'] # percentage coverage
minimum_allowed_coverage = (1 - missing_quantile)
missing_df = by_firm_counts[by_firm_counts['_pct'] < minimum_allowed_coverage]
full_df = by_firm_counts[by_firm_counts['_pct'] >= minimum_allowed_coverage]
elif missing_tolerance is not None:
missing_df = by_firm_counts[by_firm_counts[count_with_missings_var] + missing_tolerance < by_firm_counts['_one']]
full_df = by_firm_counts[by_firm_counts[count_with_missings_var] + missing_tolerance >= by_firm_counts['_one']]
else:
raise ValueError('pass one of missing_tolerance and missing_quantile. got both as None')
name_args = (missing_tolerance, missing_quantile, missing_display_str, id_var)
missing_counts = missing_df.groupby(byvars)[id_var].count()
missing_counts.name = missing_more_than_str(*name_args)
full_counts = full_df.groupby(byvars)[id_var].count()
full_counts.name = missing_tolerance_count_str(*name_args)
by_id_var = pd.concat([missing_counts, full_counts], axis=1).fillna(0)
return by_id_var.reset_index()
|
from flask_script import Manager
from resume import app, db, Professor, Course
manager = Manager(app)
# reset the database and create two artists
@manager.command
def deploy():
db.drop_all()
db.create_all()
monk = Professor(name='Ellen Monk', dept='MIS')
bayley = Professor(name='Elizabeth Bayley', dept='ECON')
MISY261 = Course(number='MISY261', title='Business Information Systems', desc='Introduction to management information systems', professor=monk)
MISY380 = Course(number='MISY380', title='Enterprise Resource Planning Systems', desc='Focuses on large scale enterprise resource planning systems, their development, functionality, and implementation', professor=monk)
ECON101 = Course(number='ECON101', title='Introduction to Microeconomics', desc='Introduces supply and demand concepts with basic economic graphs', professor=bayley)
ECON300 = Course(number='ECON300', title='Intermediate Microeconomic Theory', desc='Price determination and income distribution in a market economy; the behavior of firms and industry under conditions of pure and imperfect competition', professor=bayley)
db.session.add(monk)
db.session.add(bayley)
db.session.add(MISY261)
db.session.add(MISY380)
db.session.add(ECON101)
db.session.add(ECON300)
db.session.commit()
@manager.command
def hello():
print "hello"
if __name__ == "__main__":
manager.run()
|
from django.shortcuts import render
import django_filters
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiExample
from drf_spectacular.types import OpenApiTypes
from rest_framework import viewsets, permissions, views, generics, status
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.db.models import Count, F
from django.utils.timezone import now
from ..serializers import *
from ..models import *
FAKE_DATA = [{
'id': 1,
'region': {'id': 1, 'name': '新图 E100', 'group': 1, 'capacity': 100},
'time': {'id': 1, 'from_time': '2021-03-27T21:14:19.191815+08:00',
'to_time': '2021-03-27T22:14:19.191821+08:00'},
'user': {'id': 1, 'username': 'Alex Chi', 'user_info': {'fingerprint_id': 1, 'face_id': None}}}]
class DeviceView(views.APIView):
@extend_schema(
parameters=[
OpenApiParameter('id', OpenApiTypes.INT,
OpenApiParameter.PATH, description='设备 ID'),
OpenApiParameter('api_key', OpenApiTypes.STR,
description='API Key'),
OpenApiParameter('fake', OpenApiTypes.BOOL, description='返回测试数据'),
OpenApiParameter(
'from_time', OpenApiTypes.DATETIME, description='起始时间'),
OpenApiParameter('to_time', OpenApiTypes.DATETIME,
description='终止时间'),
],
responses=DeviceReservationSerializer(many=True),
)
def get(self, request, id, format=None):
"""获取当前位置上的用户信息"""
device_id = id
api_key = request.GET.get('api_key')
device = Device.objects.get(id=device_id)
if device.api_key == api_key:
if request.GET.get('fake') == 'true':
return Response(FAKE_DATA, status.HTTP_200_OK)
reservations = Reservation.objects.filter(
region=device.region,
time__from_time__gte=request.GET.get('from_time'),
time__from_time__lte=request.GET.get('to_time')).select_related()
return Response(DeviceReservationSerializer(reservations, many=True).data, status.HTTP_200_OK)
else:
return Response('invalid credentials', status.HTTP_401_UNAUTHORIZED)
@extend_schema(
parameters=[
OpenApiParameter('id', OpenApiTypes.INT,
OpenApiParameter.PATH, description='设备 ID')
],
request=DeviceModifySerializer(),
responses=ErrorSerializer(),
)
def post(self, request, id, format=None):
"""
处理用户入座逻辑。
如果是用户入座,需要提供预定 ID。
如果用户首次入座修改指纹等信息,需要提供指纹 ID 和用户的一次性验证 Token。
"""
device_id = id
device = Device.objects.get(id=device_id)
serializer = DeviceModifySerializer(
data=request.data, context={'request': request})
if serializer.is_valid():
if device.api_key == serializer.data['api_key']:
if 'reservation_id' in serializer.data:
# 首先,更新用户的预定情况为“已入座”
reservation = Reservation.objects.get(
id=serializer.data['reservation_id'])
reservation.is_present = True
reservation.save()
if 'fingerprint_id' in serializer.data:
# 而后,更新用户的身份信息
# TODO: 验证一次性 Token
user = User.objects.get(id=serializer.data['user_id'])
user_info, created = UserInfo.objects.get_or_create(
user=user)
user_info.fingerprint_id = serializer.data['fingerprint_id']
user_info.save()
return Response('', status.HTTP_200_OK)
else:
return Response('invalid credentials', status.HTTP_401_UNAUTHORIZED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
from datetime import datetime
from django.conf import settings
from django.db.models import CharField, DateTimeField
from jeevesdb.JeevesModel import JeevesModel as Model, JeevesForeignKey as ForeignKey
from jeevesdb.JeevesModel import label_for
import pytz
from sourcetrans.macro_module import macros, jeeves
import JeevesLib
# An example model.
# Right now self-reference is either impossible or difficult because JeevesForeignKey
# only supports a model class (not a string) as the related object. (TODO fix this.)
class UserProfile(Model):
username = CharField(max_length=256)
name = CharField(max_length=512)
email = CharField(max_length=256)
@staticmethod
def jeeves_get_private_email(user):
return "[redacted]"
@staticmethod
@label_for('email')
@jeeves
def jeeves_restrict_userprofilelabel(user, ctxt):
return user == ctxt
@jeeves
def has_event(self,event):
return (EventGuest.objects.get(event=event, guest=self) != None) \
or (EventHost.objects.get(event=event, host=self) != None)
@jeeves
def get_events(self):
guest_events = EventGuest.objects.filter(guest=self).all()
host_events = EventHost.objects.filter(host=self).all()
events = []
for event in guest_events:
events.append(event.event)
for event in host_events:
events.append(event.event)
return events
class Event(Model):
VISIBILITY = (('E', 'Everyone'), ('G', 'Guests' ))
name = CharField(max_length=256)
location = CharField(max_length=512)
time = DateTimeField()
description = CharField(max_length=1024)
visibility = CharField(max_length=1, choices=VISIBILITY, default='E')
@jeeves
def has_host(self, host):
return EventHost.objects.get(event=self, host=host) != None
@jeeves
def has_guest(self, guest):
return EventGuest.objects.get(event=self, guest=guest) != None
@staticmethod
def jeeves_get_private_name(event):
return "Private event"
@staticmethod
def jeeves_get_private_location(event):
return "Undisclosed location"
@staticmethod
def jeeves_get_private_time(event):
return datetime.now(tz=pytz.utc)
@staticmethod
def jeeves_get_private_description(event):
return "An event."
@staticmethod
@label_for('name', 'location', 'time', 'description')
@jeeves
def jeeves_restrict_event(event, ctxt):
if event.visibility == 'G':
return event.has_host(ctxt) or event.has_guest(ctxt)
else:
return True
class EventHost(Model):
"""Relates events to hosts.
"""
event = ForeignKey(Event, null=True)
host = ForeignKey(UserProfile, null=True)
class EventGuest(Model):
"""Relates events to guests.
"""
event = ForeignKey(Event, null=True)
guest = ForeignKey(UserProfile, null=True)
from django.dispatch import receiver
from django.db.models.signals import post_syncdb
import os
import sys
current_module = sys.modules[__name__]
@receiver(post_syncdb, sender=current_module)
def dbSynced(sender, **kwargs):
if settings.DEBUG:
execfile(os.path.join(settings.BASE_DIR, '..', 'SampleData.py'))
|
#
# Resource.py
#
# (c) 2020 by Andreas Kraft
# License: BSD 3-Clause License. See the LICENSE file for further details.
#
# Base class for all resources
#
from Logging import Logging
from Constants import Constants as C
from Configuration import Configuration
import Utils, CSE
import datetime, random
# Future TODO: Check RO/WO etc for attributes (list of attributes per resource?)
class Resource(object):
_rtype = '__rtype__'
_srn = '__srn__'
_node = '__node__'
def __init__(self, tpe, jsn=None, pi=None, ty=None, create=False, inheritACP=False, readOnly=False, rn=None):
self.tpe = tpe
self.readOnly = readOnly
self.inheritACP = inheritACP
self.json = {}
if jsn is not None:
if tpe in jsn:
self.json = jsn[tpe].copy()
else:
self.json = jsn.copy()
else:
pass
# TODO Exception?
if self.json is not None:
if self.tpe is None: # and self._rtype in self:
self.tpe = self.__rtype__
self.setAttribute('ri', Utils.uniqueRI(self.tpe), overwrite=False)
# override rn if given
if rn is not None:
self.setAttribute('rn', rn, overwrite=True)
# Check uniqueness of ri. otherwise generate a new one. Only when creating
# TODO: could be a BAD REQUEST?
if create:
while Utils.isUniqueRI(ri := self.attribute('ri')) == False:
Logging.logWarn("RI: %s is already assigned. Generating new RI." % ri)
self.setAttribute('ri', Utils.uniqueRI(self.tpe), overwrite=True)
# Create an RN if there is none
self.setAttribute('rn', Utils.uniqueRN(self.tpe), overwrite=False)
# Set some more attributes
ts = Utils.getResourceDate()
self.setAttribute('ct', ts, overwrite=False)
self.setAttribute('lt', ts, overwrite=False)
self.setAttribute('et', Utils.getResourceDate(Configuration.get('cse.expirationDelta')), overwrite=False)
self.setAttribute('st', 0, overwrite=False)
if pi is not None:
self.setAttribute('pi', pi, overwrite=False)
if ty is not None:
self.setAttribute('ty', ty)
#
## Note: ACPI is set in activate()
#
# Remove empty / null attributes from json
self.json = {k: v for (k, v) in self.json.items() if v is not None }
# determine and add the srn
self[self._srn] = Utils.structuredPath(self)
self[self._rtype] = self.tpe
# Default encoding implementation. Overwrite in subclasses
def asJSON(self, embedded=True, update=False, noACP=False):
# remove (from a copy) all internal attributes before printing
jsn = self.json.copy()
for k in [ self._rtype, self._srn, self._node]:
if k in jsn:
del jsn[k]
if noACP:
if 'acpi' in jsn:
del jsn['acpi']
if update:
for k in [ 'ri', 'ty', 'pi', 'ct', 'lt', 'st', 'rn', 'mgd']:
del jsn[k]
return { self.tpe : jsn } if embedded else jsn
# This method is called to to activate a resource. This is not always the
# case, e.g. when a resource object is just used temporarly.
# NO notification on activation/creation!
# Implemented in sub-classes.
def activate(self, originator):
Logging.logDebug('Activating resource: %s' % self.ri)
if not (result := self.validate(originator, create=True))[0]:
return result
# Note: CR is set in RegistrationManager
# Handle ACPI assignments here
if self.inheritACP:
self.delAttribute('acpi')
else:
if self.ty != C.tAE: # Don't handle AE's here. This is done in the RegistrationManager
#adminACPIRI = Configuration.get('cse.adminACPI')
defaultACPIRI = Configuration.get('cse.defaultACPI')
if self.acpi is None:
self.setAttribute('acpi', [ defaultACPIRI ]) # Set default ACPIRIs
#self.setAttribute('acpi', [ adminACPIRI, defaultACPIRI ]) # Set admin and default ACPIRIs
# else:
# if not adminACPIRI in self.acpi:
# self.acpi.append(adminACPIRI)
self.setAttribute(self._rtype, self.tpe, overwrite=False)
return (True, C.rcOK)
# Deactivate an active resource.
# Send notification on deletion
def deactivate(self, originator):
Logging.logDebug('Deactivating and removing sub-resources: %s' % self.ri)
# First check notification because the subscription will be removed
# when the subresources are removed
CSE.notification.checkSubscriptions(self, C.netResourceDelete)
# Remove subresources
rs = CSE.dispatcher.subResources(self.ri)
for r in rs:
self.childRemoved(r, originator)
CSE.dispatcher.deleteResource(r, originator)
# Update this resource with (new) fields.
# Call validate() afterward to react on changes.
def update(self, jsn=None, originator=None):
if jsn is not None:
if self.tpe not in jsn:
Logging.logWarn("Update types don't match")
return (False, C.rcContentsUnacceptable)
j = jsn[self.tpe] # get structure under the resource type specifier
for key in j:
# Leave out some attributes
if key in ['ct', 'lt', 'pi', 'ri', 'rn', 'st', 'ty']:
continue
self[key] = j[key] # copy new value
# - state and lt
if 'st' in self.json: # Update the state
self['st'] += 1
if 'lt' in self.json: # Update the lastModifiedTime
self['lt'] = Utils.getResourceDate()
# Do some extra validations, if necessary
if not (res := self.validate(originator))[0]:
return res
# Check subscriptions
CSE.notification.checkSubscriptions(self, C.netResourceUpdate)
return (True, C.rcOK)
# Child was added to the resource.
def childAdded(self, childResource, originator):
CSE.notification.checkSubscriptions(self, C.netCreateDirectChild, childResource)
# Child was removed from the resource.
def childRemoved(self, childResource, originator):
CSE.notification.checkSubscriptions(self, C.netDeleteDirectChild, childResource)
# MUST be implemented by each class
def canHaveChild(self, resource):
raise NotImplementedError('canHaveChild()')
# Is be called from child class
def _canHaveChild(self, resource, allowedChildResourceTypes):
from .Unknown import Unknown # Unknown imports this class, therefore import only here
return resource['ty'] in allowedChildResourceTypes or isinstance(resource, Unknown)
# Validate a resource. Usually called within activate() or
# update() methods.
def validate(self, originator=None, create=False):
Logging.logDebug('Validating resource: %s' % self.ri)
if (not Utils.isValidID(self.ri) or
not Utils.isValidID(self.pi) or
not Utils.isValidID(self.rn)):
Logging.logDebug('Invalid ID ri: %s, pi: %s, rn: %s)' % (self.ri, self.pi, self.rn))
return (False, C.rcContentsUnacceptable)
return (True, C.rcOK)
#########################################################################
#
# Attribute handling
#
def setAttribute(self, name, value, overwrite=True):
Utils.setXPath(self.json, name, value, overwrite)
def attribute(self, key, default=None):
if '/' in key: # search in path
return Utils.findXPath(self.json, key, default)
if self.hasAttribute(key):
return self.json[key]
return default
def hasAttribute(self, key):
# TODO check sub-elements as well
return key in self.json
def delAttribute(self, key):
if self.hasAttribute(key):
del self.json[key]
def __setitem__(self, key, value):
self.setAttribute(key, value)
def __getitem__(self, key):
return self.attribute(key)
def __delitem__(self, key):
self.delAttribute(key)
def __contains__(self, key):
return self.hasAttribute(key)
def __getattr__(self, name):
return self.attribute(name)
#########################################################################
#
# Misc utilities
#
def __str__(self):
return str(self.asJSON())
def __eq__(self, other):
return self.ri == other.ri
def isModifiedSince(self, other):
return self.lt > other.lt
def retrieveParentResource(self):
(parentResource, _) = CSE.dispatcher.retrieveResource(self.pi)
return parentResource
|
# see table at page 26 for the PDF document
# GUIDA ALLA COMPILAZIONE DELLE FATTURE ELETTRONICHE E DELL’ESTEROMETRO
# from AdE (Agenzia Delle Entrate), version 1.6 - 2022/02/04
# https://www.agenziaentrate.gov.it/portale/documents/20143/451259/Guida_compilazione-FE_2021_07_07.pdf/e6fcdd04-a7bd-e6f2-ced4-cac04403a768
# see also:
# - https://agenziaentrate.gov.it/portale/documents/20143/296703/Variazioni+alle+specifiche+tecniche+fatture+elettroniche2021-07-02.pdf # noqa
# - https://www.agenziaentrate.gov.it/portale/web/guest/schede/comunicazioni/fatture-e-corrispettivi/faq-fe/risposte-alle-domande-piu-frequenti-categoria/compilazione-della-fattura-elettronica # noqa
NATURA_IVA = (
"N1",
"N2",
"N2.1", # non soggette ad IVA ai sensi degli artt. da 7 a 7-septies del D.P.R. n. 633/72
"N2.2", # non soggette - altri casi
"N3",
"N3.1", # non imponibili - esportazioni
"N3.2", # non imponibili - cessioni intracomunitarie
"N3.3", # non imponibili - cessioni verso San Marino
"N3.4", # non imponibili - operazioni assimilate alle cessioni all'esportazione
"N3.5", # non imponibili - a seguito di dichiarazioni d'intento
"N3.6", # non imponibili - altre operazioni
"N4",
"N5",
"N6",
"N6.1", # inversione contabile - cessione di rottami e altri materiali di recupero
"N6.2", # inversione contabile – cessione di oro e argento ai sensi della
# legge 7/2000 nonché di oreficeria usata ad OPO
"N6.3", # inversione contabile - subappalto nel settore edile
"N6.4", # inversione contabile - cessione di fabbricati
"N6.5", # inversione contabile - cessione di telefoni cellulari
"N6.6", # inversione contabile - cessione di prodotti elettronici
"N6.7", # inversione contabile - prestazioni comparto edile e settori connessi
"N6.8", # inversione contabile - operazioni settore energetico
"N6.9", # inversione contabile - altri casi
"N7",
)
# see pages 1 to 25 for the PDF document
# GUIDA ALLA COMPILAZIONE DELLE FATTURE ELETTRONICHE E DELL’ESTEROMETRO
# from AdE (Agenzia Delle Entrate), version 1.6 - 2022/02/04
# https://www.agenziaentrate.gov.it/portale/documents/20143/451259/Guida_compilazione-FE_2021_07_07.pdf/e6fcdd04-a7bd-e6f2-ced4-cac04403a768
TIPO_DOCUMENTO = (
"TD01", # FATTURA
"TD02", # ACCONTO/ANTICIPO SU FATTURA
"TD03", # ACCONTO/ANTICIPO SU PARCELLA
"TD04", # NOTA DI CREDITO
"TD05", # NOTA DI DEBITO
"TD06", # PARCELLA
"TD07", # FATTURA SEMPLIFICATA
"TD08", # NOTA DI CREDITO SEMPLIFICATA
"TD09", # NOTA DI DEBITO SEMPLIFICATA
"TD16", # INTEGRAZIONE FATTURA DA REVERSE CHARGE INTERNO
"TD17", # INTEGRAZIONE/AUTOFATTURA PER ACQUISTO SERVIZI DALL'ESTERO
"TD18", # INTEGRAZIONE PER ACQUISTO DI BENI INTRACOMUNITARI
"TD19", # INTEGRAZIONE/AUTOFATTURA PER ACQUISTO DI BENI EX ART. 17 C.2 D.P.R. 633/72
"TD20", # AUTOFATTURA PER REGOLARIZZAZIONE E INTEGRAZIONE DELLE FATTURE
# (EX ART. 6 COMMI 8 E 9-BIS D. LGS. 471/97 O ART. 46 C.5 D.L. 331/93)
"TD21", # AUTOFATTURA PER SPLAFONAMENTO
"TD22", # ESTRAZIONE BENI DA DEPOSITO IVA
"TD23", # ESTRAZIONE BENI DA DEPOSITO IVA CON VERSAMENTO DELL'IVA
"TD24", # FATTURA DIFFERITA DI CUI ALL'ART. 21, COMMA 4, TERZO PERIODO, LETT. A), DEL D.P.R. N. 633/72
"TD25", # FATTURA DIFFERITA DI CUI ALL'ART. 21, COMMA 4, TERZO PERIODO LETT. B), DEL D.P.R. N. 633/72
"TD26", # CESSIONE DI BENI AMMORTIZZABILI E PER PASSAGGI INTERNI (EX ART. 36 D.P.R. 633/72)
"TD27", # FATTURA PER AUTOCONSUMO O PER CESSIONI GRATUITE SENZA RIVALSA
)
# Copied from Documentazione valida a partire dal 1 ottobre 2020
# Rappresentazione tabellare del tracciato fattura ordinaria - excel
REGIME_FISCALE = (
"RF01", # Ordinario
"RF02", # Contribuenti minimi (art.1, c.96-117, L. 244/07)
"RF04", # Agricoltura e attività connesse e pesca (artt.34 e 34-bis, DPR 633/72)
"RF05", # Vendita sali e tabacchi (art.74, c.1, DPR. 633/72)
"RF06", # Commercio fiammiferi (art.74, c.1, DPR 633/72)
"RF07", # Editoria (art.74, c.1, DPR 633/72)
"RF08", # Gestione servizi telefonia pubblica (art.74, c.1, DPR 633/72)
"RF09", # Rivendita documenti di trasporto pubblico e di sosta (art.74, c.1, DPR 633/72)
"RF10", # Intrattenimenti, giochi e altre attività di cui alla tariffa
# allegata al DPR 640/72 (art.74, c.6, DPR 633/72)
"RF11", # Agenzie viaggi e turismo (art.74-ter, DPR 633/72)
"RF12", # Agriturismo (art.5, c.2, L. 413/91)
"RF13", # Vendite a domicilio (art.25-bis, c.6, DPR 600/73)
"RF14", # Rivendita beni usati, oggetti d’arte, d’antiquariato o da collezione (art.36, DL 41/95)
"RF15", # Agenzie di vendite all’asta di oggetti d’arte, antiquariato o da collezione (art.40-bis, DL 41/95)
"RF16", # IVA per cassa P.A. (art.6, c.5, DPR 633/72)
"RF17", # IVA per cassa (art. 32-bis, DL 83/2012)
"RF18", # Altro
"RF19", # Regime forfettario (art.1, c.54-89, L. 190/2014)
)
# Copied from Documentazione valida a partire dal 1 ottobre 2020
# Rappresentazione tabellare del tracciato fattura ordinaria - excel
TIPO_CASSA = (
"TC01", # Cassa nazionale previdenza e assistenza avvocati e procuratori legali
"TC02", # Cassa previdenza dottori commercialisti
"TC03", # Cassa previdenza e assistenza geometri
"TC04", # Cassa nazionale previdenza e assistenza ingegneri e architetti liberi professionisti
"TC05", # Cassa nazionale del notariato
"TC06", # Cassa nazionale previdenza e assistenza ragionieri e periti commerciali
"TC07", # Ente nazionale assistenza agenti e rappresentanti di commercio (ENASARCO)
"TC08", # Ente nazionale previdenza e assistenza consulenti del lavoro (ENPACL)
"TC09", # Ente nazionale previdenza e assistenza medici (ENPAM)
"TC10", # Ente nazionale previdenza e assistenza farmacisti (ENPAF)
"TC11", # Ente nazionale previdenza e assistenza veterinari (ENPAV)
"TC12", # Ente nazionale previdenza e assistenza impiegati dell'agricoltura (ENPAIA)
"TC13", # Fondo previdenza impiegati imprese di spedizione e agenzie marittime
"TC14", # Istituto nazionale previdenza giornalisti italiani (INPGI)
"TC15", # Opera nazionale assistenza orfani sanitari italiani (ONAOSI)
"TC16", # Cassa autonoma assistenza integrativa giornalisti italiani (CASAGIT)
"TC17", # Ente previdenza periti industriali e periti industriali laureati (EPPI)
"TC18", # Ente previdenza e assistenza pluricategoriale (EPAP)
"TC19", # Ente nazionale previdenza e assistenza biologi (ENPAB)
"TC20", # Ente nazionale previdenza e assistenza professione infermieristica (ENPAPI)
"TC21", # Ente nazionale previdenza e assistenza psicologi (ENPAP)
"TC22", # INPS
)
# Copied from Documentazione valida a partire dal 1 ottobre 2020
# Rappresentazione tabellare del tracciato fattura ordinaria - excel
MODALITA_PAGAMENTO = (
"MP01", # contanti
"MP02", # assegno
"MP03", # assegno circolare
"MP04", # contanti presso Tesoreria
"MP05", # bonifico
"MP06", # vaglia cambiario
"MP07", # bollettino bancario
"MP08", # carta di pagamento
"MP09", # RID
"MP10", # RID utenze
"MP11", # RID veloce
"MP12", # RIBA
"MP13", # MAV
"MP14", # quietanza erario
"MP15", # giroconto su conti di contabilità speciale
"MP16", # domiciliazione bancaria
"MP17", # domiciliazione postale
"MP18", # bollettino di c/c postale
"MP19", # SEPA Direct Debit
"MP20", # SEPA Direct Debit CORE
"MP21", # SEPA Direct Debit B2B
"MP22", # Trattenuta su somme già riscosse
"MP23", # PagoPA
)
# Copied from Documentazione valida a partire dal 1 ottobre 2020
# Rappresentazione tabellare del tracciato fattura ordinaria - excel
TIPO_RITENUTA = (
"RT01", # ritenuta persone fisiche
"RT02", # ritenuta persone giuridiche
"RT03", # contributo INPS
"RT04", # contributo ENASARCO
"RT05", # contributo ENPAM
"RT06", # altro contributo previdenziale
)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 10 12:50:59 2020
@author: Luke
"""
#==============================================================================
# SUMMARY
#==============================================================================
# 18 May 2020
# plots maps of climate change impacts
#==============================================================================
# IMPORT
#==============================================================================
import xarray as xr
import numpy as np
import matplotlib as mpl
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
#==============================================================================
# FUNCTION
#==============================================================================
def plot_p3(wt_annual,wt_jja_scaled,iisig_data,lat,lon,outDIR,flag_svplt):
#==============================================================================
# general
#==============================================================================
# font settings
title_font = 15
cbtitle_font = 15
tick_font = 12
arrow_font = 14
# list of figure panel ids
letters = ['a', 'b', 'c',\
'd', 'e', 'f',\
'g', 'h', 'i',\
'j', 'k', 'l']
lettercount = 0
lon, lat = np.meshgrid(lon, lat)
#==============================================================================
# initialize
#==============================================================================
f = plt.figure(figsize=(15,14))
gs1 = gridspec.GridSpec(1,2)
ax1 = f.add_subplot(gs1[0])
ax2 = f.add_subplot(gs1[1])
# rect=[left, bottom, right, top]
gs1.tight_layout(figure=f, rect=[0, 0.65, 1, 1])
gs2 = gridspec.GridSpec(1,3)
ax3 = f.add_subplot(gs2[0])
ax4 = f.add_subplot(gs2[1])
ax5 = f.add_subplot(gs2[2])
gs2.tight_layout(figure=f, rect=[0, 0.1, 1, 0.65])
#==============================================================================
# watertemp annual plotting
#==============================================================================
# continent fill color
col_cont='white'
# ocean fill color
col_ocean='whitesmoke'
# zero change color
col_zero='gray'
#========== COLORBAR ==========#
# identify colors
cmap_whole = plt.cm.get_cmap('RdBu')
cmap55 = cmap_whole(0.01)
cmap50 = cmap_whole(0.05) #blue
cmap45 = cmap_whole(0.1)
cmap40 = cmap_whole(0.15)
cmap35 = cmap_whole(0.2)
cmap30 = cmap_whole(0.25)
cmap25 = cmap_whole(0.3)
cmap20 = cmap_whole(0.325)
cmap10 = cmap_whole(0.4)
cmap5 = cmap_whole(0.475)
cmap0 = col_zero
cmap_5 = cmap_whole(0.525)
cmap_10 = cmap_whole(0.6)
cmap_20 = cmap_whole(0.625)
cmap_25 = cmap_whole(0.7)
cmap_30 = cmap_whole(0.75)
cmap_35 = cmap_whole(0.8)
cmap_40 = cmap_whole(0.85)
cmap_45 = cmap_whole(0.9)
cmap_50 = cmap_whole(0.95) #red
cmap_55 = cmap_whole(0.99)
# declare list of colors for discrete colormap of colorbar
cmap = mpl.colors.ListedColormap([cmap_45,cmap_35,cmap_25,cmap_10,cmap_5,cmap0,
cmap5,cmap10,cmap20,cmap30,cmap40],N=11)
# set color of over/under arrows in colorbar
cmap.set_over(cmap50)
cmap.set_under(cmap_50)
# colorbar args
values = [-5,-4,-3,-2,-1,-0.5,0.5,1,2,3,4,5]
tick_locs = [-5,-4,-3,-2,-1,0,1,2,3,4,5]
norm = mpl.colors.BoundaryNorm(values,cmap.N)
# colorbar label
cblabel = 'Δ lake temperature (°C)'
# bbox (arrow plot relative to this axis)
cb_x0 = 0.063
cb_y0 = 0.675
cb_xlen = 0.4
cb_ylen = 0.015
#========== ARROWS ==========#
# blue arrow label
bluelabel = 'Colder'
x0_bluelab = 0.25
y0_bluelab = -2.8
# blue arrow
x0_bluearr = 0.495
y0_bluearr = -3.3
xlen_bluearr = -0.4
ylen_bluearr = 0
# red arrow label
redlabel = 'Warmer'
x0_redlab = 0.75
y0_redlab = -2.8
# red arrow
x0_redarr = 0.505
y0_redarr = -3.3
xlen_redarr = 0.4
ylen_redarr = 0
# general
arrow_width = 0.25
arrow_linew = 0.1
arrow_headwidth = 0.5
arrow_headlength = 0.06
#========== PLOTTING ==========#
lettercount += 1
m = Basemap(llcrnrlon=-170, llcrnrlat=-60, urcrnrlon=180, urcrnrlat=90, suppress_ticks=False);
m.ax = ax1
m.drawcoastlines(linewidth=0.1);
m.drawmapboundary(fill_color=col_ocean)
m.fillcontinents(color=col_cont);
m.pcolormesh(lon,lat,wt_annual,latlon=True,cmap=cmap,norm=norm,vmax=5,vmin=-5,zorder=3)
ax1.set_title(letters[lettercount-1],loc='left',pad=10,fontsize=title_font,fontweight='bold')
ax1.set_title('Annual',loc='center',pad=10,fontsize=title_font)
ax1.tick_params(labelbottom=False, labeltop=False, labelleft=False, labelright=False,
bottom=False, top=False, left=False, right=False, color='0.2',\
labelcolor='0.2',width=0.4,direction="in",length=2.5)
ax1.spines['bottom'].set_color('0.2')
ax1.spines['bottom'].set_linewidth(0.4)
ax1.spines['top'].set_color('0.2')
ax1.spines['top'].set_linewidth(0.4)
ax1.xaxis.label.set_color('0.2')
ax1.spines['left'].set_color('0.2')
ax1.spines['left'].set_linewidth(0.4)
ax1.spines['right'].set_color('0.2')
ax1.spines['right'].set_linewidth(0.4)
ax1.yaxis.label.set_color('0.2')
# colorbar setup
cbax = f.add_axes([cb_x0, cb_y0, cb_xlen, cb_ylen])
cb = mpl.colorbar.ColorbarBase(ax=cbax, cmap=cmap,
norm=norm,
spacing='proportional',
orientation='horizontal',
extend='both',
ticks=tick_locs)
cb.set_label(cblabel,size=cbtitle_font)
cb.ax.xaxis.set_label_position('top');
cb.ax.tick_params(labelcolor='0.2', labelsize=tick_font, color='0.2',length=2.5, width=0.4, direction='out'); #change color of ticks?
cb.ax.set_xticklabels([r'$\leq$-5','-4','-3','-2','-1','0','1','2','3','4',r'5$\leq$'])
cb.outline.set_edgecolor('0.2')
cb.outline.set_linewidth(0.4)
#arrows
plt.text(x0_redlab, y0_redlab, redlabel, size=arrow_font, ha='center', va='center')
plt.text(x0_bluelab, y0_bluelab, bluelabel, size=arrow_font, ha='center', va='center')
plt.arrow(x0_bluearr, y0_bluearr, xlen_bluearr, ylen_bluearr, width=arrow_width, linewidth=arrow_linew,\
shape='left', head_width=arrow_headwidth, head_length=arrow_headlength,\
facecolor=cmap_40, edgecolor='k', clip_on=False)
plt.arrow(x0_redarr, y0_redarr, xlen_redarr, ylen_redarr, width=arrow_width, linewidth=arrow_linew,\
shape='right', head_width=arrow_headwidth, head_length=arrow_headlength,\
facecolor=cmap40, edgecolor='k', clip_on=False)
#==============================================================================
# watertemp jja scaled plotting
#==============================================================================
# continent fill color
col_cont='white'
# ocean fill color
col_ocean='whitesmoke'
# zero change color
col_zero='gray'
#========== COLORBAR ==========#
# colorbar colormap setup
cmap_whole = plt.cm.get_cmap('PRGn')
cmap55 = cmap_whole(0.01)
cmap50 = cmap_whole(0.05) #purple
cmap45 = cmap_whole(0.1)
cmap40 = cmap_whole(0.15)
cmap35 = cmap_whole(0.2)
cmap30 = cmap_whole(0.25)
cmap25 = cmap_whole(0.3)
cmap20 = cmap_whole(0.35)
cmap10 = cmap_whole(0.4)
cmap0 = col_zero
cmap_5 = cmap_whole(0.55)
cmap_10 = cmap_whole(0.6)
cmap_20 = cmap_whole(0.65)
cmap_25 = cmap_whole(0.7)
cmap_30 = cmap_whole(0.75)
cmap_35 = cmap_whole(0.8)
cmap_40 = cmap_whole(0.85)
cmap_45 = cmap_whole(0.9)
cmap_50 = cmap_whole(0.95) #green
cmap_55 = cmap_whole(0.99)
# list of discrete colors as colormap for colorbar
cmap = mpl.colors.ListedColormap([cmap_50,cmap_40,cmap_30,cmap_20,
cmap20,cmap30,cmap40,cmap50], N=8)
# set color of over/under arrows on colorbar
cmap.set_over(cmap55)
cmap.set_under(cmap_55)
# colorbar args
values = [0,0.25,0.5,0.75,1,1.25,1.5,1.75,2]
tick_locs = np.arange(0,2.25,0.25)
norm = mpl.colors.BoundaryNorm(values,cmap.N)
# colorbar label
cblabel = 'Δ lake temperature / Δ global mean air temperature (°C/°C)'
# bbox (arrow plot relative to this axis)
cb_x0 = 0.553
cb_y0 = 0.675
cb_xlen = 0.4
cb_ylen = 0.015
#========== ARROWS ==========#
# green arrow label
greenlabel = 'Low sensitivity'
x0_greenlab = 0.25
y0_greenlab = -2.8
# green arrow
x0_greenarr = 0.495
y0_greenarr = -3.3
xlen_greenarr = -0.5
ylen_greenarr = 0
# purple arrow label
purplelabel = 'High sensitivity'
x0_purplelab = 0.75
y0_purplelab = -2.8
# purple arrow
x0_purplearr = 0.505
y0_purplearr = -3.3
xlen_purplearr = 0.5
ylen_purplearr = 0
# general
arrow_width = 0.25
arrow_linew = 0.1
arrow_headwidth = 0.5
arrow_headlength = 0.06
#========== PLOTTING ==========#
lettercount += 1
m = Basemap(llcrnrlon=-170, llcrnrlat=-60, urcrnrlon=180, urcrnrlat=90, suppress_ticks=False);
m.ax = ax2
m.drawcoastlines(linewidth=0.1);
m.drawmapboundary(fill_color=col_ocean)
m.fillcontinents(color=col_cont);
m.pcolormesh(lon,lat,wt_jja_scaled,latlon=True,cmap=cmap,norm=norm,vmax=2,vmin=0,zorder=3)
ax2.set_title(letters[lettercount-1],loc='left',pad=10,fontsize=title_font,fontweight='bold')
ax2.set_title('JJA',loc='center',pad=10,fontsize=title_font)
ax2.tick_params(labelbottom=False, labeltop=False, labelleft=False, labelright=False,
bottom=False, top=False, left=False, right=False, color='0.2',\
labelcolor='0.2',width=0.4,direction="in",length=2.5)
ax2.spines['bottom'].set_color('0.2')
ax2.spines['bottom'].set_linewidth(0.4)
ax2.spines['top'].set_color('0.2')
ax2.spines['top'].set_linewidth(0.4)
ax2.xaxis.label.set_color('0.2')
ax2.spines['left'].set_color('0.2')
ax2.spines['left'].set_linewidth(0.4)
ax2.spines['right'].set_color('0.2')
ax2.spines['right'].set_linewidth(0.4)
ax2.yaxis.label.set_color('0.2')
# colorbar setup
cbax2 = f.add_axes([cb_x0, cb_y0, cb_xlen, cb_ylen])
cb = mpl.colorbar.ColorbarBase(ax=cbax2, cmap=cmap,
norm=norm,
spacing='proportional',
orientation='horizontal',
extend='both',
ticks=tick_locs)
cb.set_label(cblabel,size=cbtitle_font)
cb.ax.xaxis.set_label_position('top');
cb.ax.tick_params(labelcolor='0.2', labelsize=tick_font, color='0.2',length=2.5, width=0.4, direction='out'); #change color of ticks?
cb.ax.set_xticklabels([r'$\leq$0','0.25','0.5','0.75','1','1.25','1.5','1.75',r'2$\leq$'])
cb.outline.set_edgecolor('0.2')
cb.outline.set_linewidth(0.4)
#arrows
plt.text(x0_purplelab, y0_purplelab, purplelabel, size=arrow_font, ha='center', va='center')
plt.text(x0_greenlab, y0_greenlab, greenlabel, size=arrow_font, ha='center', va='center')
plt.arrow(x0_greenarr, y0_greenarr, xlen_greenarr, ylen_greenarr, width=arrow_width, linewidth=arrow_linew,\
shape='left', head_width=arrow_headwidth, head_length=arrow_headlength,\
facecolor=cmap_40, edgecolor='k', clip_on=False)
plt.arrow(x0_purplearr, y0_purplearr, xlen_purplearr, ylen_purplearr, width=arrow_width, linewidth=arrow_linew,\
shape='right', head_width=arrow_headwidth, head_length=arrow_headlength,\
facecolor=cmap40, edgecolor='k', clip_on=False)
#==============================================================================
# ice index plotting
#==============================================================================
# continent fill
col_cont='white'
# ocean fill
col_ocean='whitesmoke'
# zero change color
col_zero='gray'
# list of ice index titles
ice_titles = ['Ice onset', 'Ice break-up', 'Ice duration']
#========== COLORBAR ==========#
# identify colors
cmap_whole = plt.cm.get_cmap('RdBu_r')
cmap55 = cmap_whole(0.01)
cmap50 = cmap_whole(0.05) #blue
cmap45 = cmap_whole(0.1)
cmap40 = cmap_whole(0.15)
cmap35 = cmap_whole(0.2)
cmap30 = cmap_whole(0.25)
cmap25 = cmap_whole(0.3)
cmap20 = cmap_whole(0.325)
cmap10 = cmap_whole(0.4)
cmap5 = cmap_whole(0.475)
cmap0 = col_zero
cmap_5 = cmap_whole(0.525)
cmap_10 = cmap_whole(0.6)
cmap_20 = cmap_whole(0.625)
cmap_25 = cmap_whole(0.7)
cmap_30 = cmap_whole(0.75)
cmap_35 = cmap_whole(0.8)
cmap_40 = cmap_whole(0.85)
cmap_45 = cmap_whole(0.9)
cmap_50 = cmap_whole(0.95) #red
cmap_55 = cmap_whole(0.99)
# declare list of colors for discrete colormap of colorbar
cmap = mpl.colors.ListedColormap([cmap_45,cmap_40,cmap_35,cmap_30,cmap_25,cmap_10,cmap0,
cmap5,cmap10,cmap20,cmap30,cmap35,cmap40],N=13)
# set color of over/under arrows in colorbar
cmap.set_over(cmap55)
cmap.set_under(cmap_55)
# colorbar args
values = [-90,-75,-60,-45,-30,-15,-5,5,15,30,45,60,75,90]
tick_locs = [-90,-75,-60,-45,-30,-15,0,15,30,45,60,75,90]
norm = mpl.colors.BoundaryNorm(values,cmap.N)
# bbox (arrow plot relative to this axis)
cb_x0 = 0.235
cb_y0 = 0.175
cb_xlen = 0.55
cb_ylen = 0.015
# colorbar label
cblabel = 'Δ ice index (days)'
#========== ARROWS ==========#
# blue arrow label
bluelabel = 'Later date (panels a,b) or longer duration (panel c)'
x0_bluelab = 0.85
y0_bluelab = -2.7
# blue arrow
x0_bluearr = 0.505
y0_bluearr = -3.3
xlen_bluearr = 0.8
ylen_bluearr = 0
# red arrow label
redlabel = 'Earlier date (panels a,b) or shorter duration (panel c)'
x0_redlab = 0.14
y0_redlab = -2.7
# red arrow
x0_redarr = 0.495
y0_redarr = -3.3
xlen_redarr = -0.8
ylen_redarr = 0
# general
arrow_width = 0.25
arrow_linew = 0.1
arrow_headwidth = 0.5
arrow_headlength = 0.06
#========== PLOTTING ==========#
ax_list = [ax3,ax4,ax5]
count = 0
for array,ax in zip(iisig_data,ax_list):
count += 1
lettercount += 1
m = Basemap(projection='npaeqd',round=True,boundinglat=20,\
lat_0=80,lon_0=0,resolution='l');
m.ax = ax
m.drawcoastlines(linewidth=0.05);
m.drawmapboundary(linewidth=0.15,fill_color=col_ocean);
m.fillcontinents(color=col_cont);
m.pcolormesh(lon,lat,array,latlon=True,cmap=cmap,norm=norm,vmax=90,vmin=-90,zorder=3)
ax.set_title(letters[lettercount-1],loc='left',fontsize=title_font,fontweight='bold')
if count<=3:
ax.set_title(ice_titles[count-1],loc='center',pad=10,fontsize=title_font)
cbax = f.add_axes([cb_x0, cb_y0, cb_xlen, cb_ylen])
cb = mpl.colorbar.ColorbarBase(ax=cbax,cmap=cmap,
norm=norm,
spacing='proportional',
orientation='horizontal',
extend='both',
ticks=tick_locs)
cb.set_label(cblabel,size=cbtitle_font)
cb.ax.xaxis.set_label_position('top');
cb.ax.tick_params(labelcolor='0.2',labelsize=tick_font,color='0.2',\
length=2.5,width=0.35,direction='out');
cb.ax.set_xticklabels([r'$\leq$-90','-75','-60','-45','-30','-15',\
'0','15','30','45','60','75',r'90$\leq$'])
cb.outline.set_edgecolor('0.2')
cb.outline.set_linewidth(0.4)
# arrow setup
plt.text(x0_bluelab, y0_bluelab, bluelabel, size=arrow_font, ha='center', va='center')
plt.text(x0_redlab, y0_redlab, redlabel, size=arrow_font, ha='center', va='center')
plt.arrow(x0_bluearr, y0_bluearr, xlen_bluearr, ylen_bluearr, width=arrow_width, linewidth=arrow_linew,\
shape='right', head_width=arrow_headwidth, head_length=arrow_headlength,\
facecolor=cmap40, edgecolor='k', clip_on=False)
plt.arrow(x0_redarr, y0_redarr, xlen_redarr, ylen_redarr, width=arrow_width, linewidth=arrow_linew,\
shape='left', head_width=arrow_headwidth, head_length=arrow_headlength,\
facecolor=cmap_40, edgecolor='k', clip_on=False)
plt.show()
# save figure
if flag_svplt == 0:
None
elif flag_svplt == 1:
f.savefig(outDIR+'/f3.png',bbox_inches='tight',dpi=500)
|
from django.db import models
class ReportUser(models.Model):
user_id = models.CharField(max_length=250)
reported_by = models.CharField(max_length=250)
reason = models.CharField(max_length=250)
timestamp = models.DateTimeField(auto_now_add=True)
class CreateShopRecommendation(models.Model):
user_id = models.CharField(max_length=250)
recommended_for = models.CharField(max_length=250)
name_of_shop = models.CharField(max_length=100)
item = models.CharField(max_length=100)
phone_number = models.CharField(max_length=15, blank=True)
landmark = models.CharField(max_length=100)
extra_instruction = models.CharField(max_length=250,blank=True)
description_of_shop = models.CharField(max_length=250)
read_by_user = models.BooleanField(default=0)
|
from random import choice
class Comparator:
def compare(self, clients, table):
return [choice(list(clients.keys()))]
|
import unittest
import tempfile
import shutil
import numpy as np
import os
from dicomml.cases.case import DicommlCase
from dicomml.tasks.main import run_task
from tests import sample_case_config
class TestLTS(unittest.TestCase):
def get_config(self):
from ray import tune
scheduler = tune.schedulers.PopulationBasedTraining(
time_attr="training_iteration",
metric='jaccard_score',
mode='max',
perturbation_interval=2,
hyperparam_mutations={
"optimizer_lr": lambda: np.random.uniform(0.001, 1),
"model_dropoutrate": lambda: np.random.uniform(0.05, 0.15)},
quantile_fraction=0.5,
resample_probability=1.0,
log_config=True,
require_attrs=True)
return dict(
metric='jaccard_score',
mode='max',
trainable_config=dict(
train_iterations_per_step=2,
eval_iterations_per_step=1,
train_path=os.path.join(self.folder_in, 'train', '*.zip'),
eval_path=os.path.join(self.folder_in, 'eval', '*.zip'),
train_batch_size=2,
eval_batch_size=2,
transformations={
'transforms.array.Cut': dict(
x_range=[0, 80],
y_range=[0, 90]),
'transforms.array.Pad': dict(target_shape=[128, 128]),
'transforms.array.Window': dict(window='soft_tissue')},
export_config=dict(
include_diagnoses=False,
include_rois=True),
loss_function=('nn.CrossEntropyLoss', dict()),
eval_metrics={'jaccard_score': dict()},
model_class='models.unet.UNETModel',
model_config=dict(n_classes=2, block_depth=3),
optimizer_class='optim.Adam',
prediction_target='class'),
scheduler=scheduler,
stop={"training_iteration": 6},
num_samples=2,
reuse_actors=True,
resources_per_trial=dict(
cpu=0.25,
gpu=0))
def setUp(self):
self.folder_in = tempfile.mkdtemp()
for i in range(10):
DicommlCase(**sample_case_config(
caseid='train-case-{i}'.format(i=i),
n_images=10
)).save(os.path.join(self.folder_in, 'train'))
for i in range(4):
DicommlCase(**sample_case_config(
caseid='eval-case-{i}'.format(i=i),
n_images=10
)).save(os.path.join(self.folder_in, 'eval'))
def test_task(self):
import ray
ray.init(num_cpus=1, num_gpus=0, local_mode=True)
result = run_task(
task_class='tasks.tasks.DicommlTrain',
config=self.get_config())
self.assertIsInstance(result, dict)
def test_step_save_load(self):
from dicomml.tasks.trainable import DicommlTrainable
trainable = DicommlTrainable(
config=self.get_config()['trainable_config'])
_ = trainable.step()
with tempfile.TemporaryDirectory() as tmp_checkpoint_dir:
_dir = trainable.save_checkpoint(tmp_checkpoint_dir)
trainable.load_checkpoint(_dir)
def tearDown(self):
shutil.rmtree(self.folder_in)
|
au2fs = 2.41888432651e-2 # femtoseconds
au2as = 24.1888432651 # attoseconds
au2k = 315775.13 #K
au2ev = 27.2116
au2kev = 27.2116e-3
au2mev = 27.2116e3
au2wn = au2wavenumber = 219474.6305
wavenumber2hartree = wavenum2au = 4.55633525277e-06
ev2wavenumber = 8065.73
au2debye = 2.541765 # hbar^2/(m_e * e)
au2nm = bohr2nanometer = 0.0529177249 # nanometer
au2angstrom = bohr2angstrom = 0.529177249 # Bohr to angstrom
#hartree2nanometer =
ev2nm = electronvolt2nanometer = 1239.84193
# 1 eV = 1239.847 nm
alpha = 0.0072973525693 # fine structure constant
eps0 = epsilon_0 = 8.85418781762e-12 # Farad m^-1
c0 = speed_of_light = 299792458.0 #m s^-1
imp0 = 376.730313668 # impedence of free space, Ohm
|
# Copyright (c) 2018 Ansible, Inc.
# All Rights Reserved.
from ansiblelint import AnsibleLintRule
class NoTabsRule(AnsibleLintRule):
id = '203'
shortdesc = 'Most files should not contain tabs'
description = 'Tabs can cause unexpected display issues. Use spaces'
tags = ['formatting']
def match(self, file, line):
return '\t' in line
|
# -*- coding: utf-8 -*-
from textlytics.sentiment.lexicons import SentimentLexicons
|
"""
##############################################################################
PyDraw 1.1: simple canvas paint program and object mover/animator.
Uses time.sleep loops to implement object move loops, such that only
one move can be in progress at once; this is smooth and fast, but see
the widget.after and thread-based subclasses here for other techniques.
Version 1.1 has been updated to run under Python 3.X (2.X not supported)
##############################################################################
"""
helpstr = """--PyDraw version 1.1--
Mouse commands:
Left = Set target spot
Left+Move = Draw new object
Double Left = Clear all objects
Right = Move current object
Middle = Select closest object
Middle+Move = Drag current object
Keyboard commands:
w=Pick border width c=Pick color
u=Pick move unit s=Pick move delay
o=Draw ovals r=Draw rectangles
l=Draw lines a=Draw arcs
d=Delete object 1=Raise object
2=Lower object f=Fill object
b=Fill background p=Add photo
z=Save postscript x=Pick pen modes
?=Help other=clear text
"""
import time, sys
from tkinter import *
from tkinter.filedialog import *
from tkinter.messagebox import *
PicDir = '../gifs'
if sys.platform[:3] == 'win':
HelpFont = ('courier', 9, 'normal')
else:
HelpFont = ('courier', 12, 'normal')
pickDelays = [0.01, 0.025, 0.05, 0.10, 0.25, 0.0, 0.001, 0.005]
pickUnits = [1, 2, 4, 6, 8, 10, 12]
pickWidths = [1, 2, 5, 10, 20]
pickFills = [None,'white','blue','red','black','yellow','green','purple']
pickPens = ['elastic', 'scribble', 'trails']
class MovingPics:
def __init__(self, parent=None):
canvas = Canvas(parent, width=500, height=500, bg= 'white')
canvas.pack(expand=YES, fill=BOTH)
canvas.bind('<ButtonPress-1>', self.onStart)
canvas.bind('<B1-Motion>', self.onGrow)
canvas.bind('<Double-1>', self.onClear)
canvas.bind('<ButtonPress-3>', self.onMove)
canvas.bind('<Button-2>', self.onSelect)
canvas.bind('<B2-Motion>', self.onDrag)
parent.bind('<KeyPress>', self.onOptions)
self.createMethod = Canvas.create_oval
self.canvas = canvas
self.moving = []
self.images = []
self.object = None
self.where = None
self.scribbleMode = 0
parent.title('PyDraw - Moving Pictures 1.1')
parent.protocol('WM_DELETE_WINDOW', self.onQuit)
self.realquit = parent.quit
self.textInfo = self.canvas.create_text(
5, 5, anchor=NW,
font=HelpFont,
text='Press ? for help')
def onStart(self, event):
self.where = event
self.object = None
def onGrow(self, event):
canvas = event.widget
if self.object and pickPens[0] == 'elastic':
canvas.delete(self.object)
self.object = self.createMethod(canvas,
self.where.x, self.where.y, # start
event.x, event.y, # stop
fill=pickFills[0], width=pickWidths[0])
if pickPens[0] == 'scribble':
self.where = event # from here next time
def onClear(self, event):
if self.moving: return # ok if moving but confusing
event.widget.delete('all') # use all tag
self.images = []
self.textInfo = self.canvas.create_text(
5, 5, anchor=NW,
font=HelpFont,
text='Press ? for help')
def plotMoves(self, event):
diffX = event.x - self.where.x # plan animated moves
diffY = event.y - self.where.y # horizontal then vertical
reptX = abs(diffX) // pickUnits[0] # incr per move, number moves
reptY = abs(diffY) // pickUnits[0] # from last to event click
incrX = pickUnits[0] * ((diffX > 0) or -1) # 3.x // trunc div required
incrY = pickUnits[0] * ((diffY > 0) or -1)
return incrX, reptX, incrY, reptY
def onMove(self, event):
traceEvent('onMove', event, 0) # move current object to click
object = self.object # ignore some ops during mv
if object and object not in self.moving:
msecs = int(pickDelays[0] * 1000)
parms = 'Delay=%d msec, Units=%d' % (msecs, pickUnits[0])
self.setTextInfo(parms)
self.moving.append(object)
canvas = event.widget
incrX, reptX, incrY, reptY = self.plotMoves(event)
for i in range(reptX):
canvas.move(object, incrX, 0)
canvas.update()
time.sleep(pickDelays[0])
for i in range(reptY):
canvas.move(object, 0, incrY)
canvas.update() # update runs other ops
time.sleep(pickDelays[0]) # sleep until next move
self.moving.remove(object)
if self.object == object: self.where = event
def onSelect(self, event):
self.where = event
self.object = self.canvas.find_closest(event.x, event.y)[0] # tuple
def onDrag(self, event):
diffX = event.x - self.where.x # OK if object in moving
diffY = event.y - self.where.y # throws it off course
self.canvas.move(self.object, diffX, diffY)
self.where = event
def onOptions(self, event):
keymap = {
'w': lambda self: self.changeOption(pickWidths, 'Pen Width'),
'c': lambda self: self.changeOption(pickFills, 'Color'),
'u': lambda self: self.changeOption(pickUnits, 'Move Unit'),
's': lambda self: self.changeOption(pickDelays, 'Move Delay'),
'x': lambda self: self.changeOption(pickPens, 'Pen Mode'),
'o': lambda self: self.changeDraw(Canvas.create_oval, 'Oval'),
'r': lambda self: self.changeDraw(Canvas.create_rectangle, 'Rect'),
'l': lambda self: self.changeDraw(Canvas.create_line, 'Line'),
'a': lambda self: self.changeDraw(Canvas.create_arc, 'Arc'),
'd': MovingPics.deleteObject,
'1': MovingPics.raiseObject,
'2': MovingPics.lowerObject, # if only 1 call pattern
'f': MovingPics.fillObject, # use unbound method objects
'b': MovingPics.fillBackground, # else lambda passed self
'p': MovingPics.addPhotoItem,
'z': MovingPics.savePostscript,
'?': MovingPics.help}
try:
keymap[event.char](self)
except KeyError:
self.setTextInfo('Press ? for help')
def changeDraw(self, method, name):
self.createMethod = method # unbound Canvas method
self.setTextInfo('Draw Object=' + name)
def changeOption(self, list, name):
list.append(list[0])
del list[0]
self.setTextInfo('%s=%s' % (name, list[0]))
def deleteObject(self):
if self.object != self.textInfo: # ok if object in moving
self.canvas.delete(self.object) # erases but move goes on
self.object = None
def raiseObject(self):
if self.object: # ok if moving
self.canvas.tkraise(self.object) # raises while moving
def lowerObject(self):
if self.object:
self.canvas.lower(self.object)
def fillObject(self):
if self.object:
type = self.canvas.type(self.object)
if type == 'image':
pass
elif type == 'text':
self.canvas.itemconfig(self.object, fill=pickFills[0])
else:
self.canvas.itemconfig(self.object,
fill=pickFills[0], width=pickWidths[0])
def fillBackground(self):
self.canvas.config(bg=pickFills[0])
def addPhotoItem(self):
if not self.where: return
filetypes=[('Gif files', '.gif'), ('All files', '*')]
file = askopenfilename(initialdir=PicDir, filetypes=filetypes)
if file:
image = PhotoImage(file=file) # load image
self.images.append(image) # keep reference
self.object = self.canvas.create_image( # add to canvas
self.where.x, self.where.y, # at last spot
image=image, anchor=NW)
def savePostscript(self):
file = asksaveasfilename()
if file:
self.canvas.postscript(file=file) # save canvas to file
def help(self):
self.setTextInfo(helpstr)
#showinfo('PyDraw', helpstr)
def setTextInfo(self, text):
self.canvas.dchars(self.textInfo, 0, END)
self.canvas.insert(self.textInfo, 0, text)
self.canvas.tkraise(self.textInfo)
def onQuit(self):
if self.moving:
self.setTextInfo("Can't quit while move in progress")
else:
self.realquit() # std wm delete: err msg if move in progress
def traceEvent(label, event, fullTrace=True):
print(label)
if fullTrace:
for atrr in dir(event):
if attr[:2] != '__':
print(attr, '=>', getattr(event, attr))
if __name__ == '__main__':
from sys import argv # when this file is executed
if len(argv) == 2: PicDir = argv[1] # '..' fails if run elsewhere
root = Tk() # make, run a MovingPics object
MovingPics(root)
root.mainloop()
|
"""
*F♯ - Level 8*
"""
from ..._pitch import Pitch
__all__ = ["Fs8"]
class Fs8(
Pitch,
):
pass
|
# This code is auto-generated.
import http.client as http_client
import logging
import os
import numpy as np
from joblib import load
from scipy import sparse
from sagemaker_containers.beta.framework import encoders
from sagemaker_containers.beta.framework import worker
from sagemaker_sklearn_extension.externals import read_csv_data
def _is_inverse_label_transform():
"""Returns True if if it's running in inverse label transform."""
return os.getenv('AUTOML_TRANSFORM_MODE') == 'inverse-label-transform'
def _is_feature_transform():
"""Returns True if it's running in feature transform mode."""
return os.getenv('AUTOML_TRANSFORM_MODE') == 'feature-transform'
def _sparsify_if_needed(x):
"""Returns a sparse matrix if the needed for encoding to sparse recordio protobuf."""
if os.getenv('AUTOML_SPARSE_ENCODE_RECORDIO_PROTOBUF') == '1' \
and not sparse.issparse(x):
return sparse.csr_matrix(x)
return x
def _split_features_target(x):
"""Returns the features and target by splitting the input array."""
if os.getenv('AUTOML_TRANSFORM_MODE') == 'feature-transform':
return _sparsify_if_needed(x), None
if sparse.issparse(x):
return x[:, 1:], x[:, 0].toarray()
return _sparsify_if_needed(x[:, 1:]), np.ravel(x[:, 0])
def model_fn(model_dir):
"""Loads the model.
The SageMaker Scikit-learn model server loads model by invoking this method.
Parameters
----------
model_dir: str
the directory where the model files reside
Returns
-------
: AutoMLTransformer
deserialized model object that can be used for model serving
"""
return load(filename=os.path.join(model_dir, 'model.joblib'))
def predict_fn(input_object, model):
"""Generates prediction for the input_object based on the model.
The SageMaker Scikit-learn model server invokes this method with the return value of the input_fn.
This method invokes the loaded model's transform method, if it's expected to transform the input data
and invokes the loaded model's inverse_label_transform, if the task is to perform
inverse label transformation.
Parameters
----------
input_object : array-like
the object returned from input_fn
model : AutoMLTransformer
the model returned from model_fn
Returns
-------
: ndarray
transformed input data or inverse transformed label data
"""
if isinstance(input_object, worker.Response):
return input_object
if _is_inverse_label_transform():
return model.inverse_label_transform(
input_object.ravel().astype(np.float).astype(np.int)
)
try:
return model.transform(input_object)
except ValueError as e:
return worker.Response(
response='{}'.format(str(e) or 'Unknown error.'),
status=http_client.BAD_REQUEST
)
def input_fn(request_body, request_content_type):
"""Decodes request body to 2D numpy array.
The SageMaker Scikit-learn model server invokes this method to deserialize the request data into an object
for prediction.
Parameters
----------
request_body : str
the request body
request_content_type : str
the media type for the request body
Returns
-------
: array-like
decoded data as 2D numpy array
"""
content_type = request_content_type.lower(
) if request_content_type else "text/csv"
content_type = content_type.split(";")[0].strip()
if content_type == 'text/csv':
if isinstance(request_body, str):
byte_buffer = request_body.encode()
else:
byte_buffer = request_body
val = read_csv_data(source=byte_buffer)
logging.info(f"Shape of the requested data: '{val.shape}'")
return val
return worker.Response(
response=f"'{request_content_type}' is an unsupported content type.",
status=http_client.UNSUPPORTED_MEDIA_TYPE
)
def output_fn(prediction, accept_type):
"""Encodes prediction to accept type.
The SageMaker Scikit-learn model server invokes this method with the result of prediction and
serializes this according to the response MIME type. It expects the input to be numpy array and encodes
to requested response MIME type.
Parameters
----------
prediction : array-like
the object returned from predict_fn
accept_type : str
the expected MIME type of the response
Returns
-------
: Response obj
serialized predictions in accept type
"""
if isinstance(prediction, worker.Response):
return prediction
if _is_inverse_label_transform():
if accept_type == 'text/csv':
return worker.Response(
response=encoders.encode(prediction, accept_type),
status=http_client.OK,
mimetype=accept_type
)
else:
return worker.Response(
response=f"Accept type '{accept_type}' is not supported "
f"during inverse label transformation.",
status=413
)
if isinstance(prediction, tuple):
X, y = prediction
else:
X, y = _split_features_target(prediction)
if accept_type == 'application/x-recordio-protobuf':
return worker.Response(
response=encoders.array_to_recordio_protobuf(
_sparsify_if_needed(X).astype('float32'),
y.astype('float32') if y is not None else y
),
status=http_client.OK,
mimetype=accept_type
)
if accept_type == 'text/csv':
if y is not None:
X = np.column_stack(
(np.ravel(y), X.todense() if sparse.issparse(X) else X)
)
return worker.Response(
response=encoders.encode(X, accept_type),
status=http_client.OK,
mimetype=accept_type
)
return worker.Response(
response=f"Accept type '{accept_type}' is not supported.",
status=http_client.NOT_ACCEPTABLE
)
def execution_parameters_fn():
"""Return the response for execution-parameters request used by SageMaker Batch transform.
The SageMaker Scikit-learn model server invokes when execution-parameters endpoint is called.
For the models generated by AutoML Jobs, returns the MaxPayloadInMB to be 1MB for feature transform
used during inference and defaults to 6MB otherwise.
"""
if _is_feature_transform():
return worker.Response(
response='{"MaxPayloadInMB":1}',
status=http_client.OK,
mimetype="application/json"
)
return worker.Response(
response='{"MaxPayloadInMB":6}',
status=http_client.OK,
mimetype="application/json"
)
|
import torch.optim as optim
import torch.nn as nn
from dl_modules.metric.psnr import PSNR
from dl_modules.metric.ssim import SSIM
from lpips import LPIPS
from dl_modules.loss import VGGPerceptual, LSGANGenLoss, \
LSGANDisLoss, LSGANDisFakeLoss, LSGANDisRealLoss
gen_opt_state_dict = None
dis_opt_state_dict = None
gan_loss_coeff = 0.0
init_gen_lr = 0.001
dis_lr = 0.0001
lpips = None
ssim = None
psnr = None
def get_lpips() -> nn.Module:
global lpips
if lpips is None:
lpips = LPIPS(verbose=False)
return lpips
def get_ssim() -> nn.Module:
global ssim
if ssim is None:
ssim = SSIM()
return ssim
def get_psnr() -> nn.Module:
global psnr
if psnr is None:
psnr = PSNR()
return psnr
def get_super_loss() -> nn.Module:
return VGGPerceptual(l1_coeff=0.01, features_coeff=1, edge_coeff=7.5)
def get_gen_loss() -> nn.Module:
return LSGANGenLoss()
def get_dis_loss() -> nn.Module:
return LSGANDisLoss()
def get_dis_fake_loss() -> nn.Module:
return LSGANDisFakeLoss()
def get_dis_real_loss() -> nn.Module:
return LSGANDisRealLoss()
def get_gen_optimizer(net: nn.Module) -> optim.Optimizer:
global gen_opt_state_dict
optimizer = optim.Adam(net.parameters(), lr=init_gen_lr, betas=(0.5, 0.999))
if gen_opt_state_dict is not None:
optimizer.load_state_dict(gen_opt_state_dict)
return optimizer
def get_dis_optimizer(net: nn.Module) -> optim.Optimizer:
global dis_opt_state_dict
optimizer = optim.Adam(net.parameters(), lr=dis_lr, betas=(0.5, 0.999))
if dis_opt_state_dict is not None:
optimizer.load_state_dict(dis_opt_state_dict)
return optimizer
def update_optimizer(optimizer: optim.Optimizer, params: tuple) -> None:
for g in optimizer.param_groups:
g['lr'] = params[0]
pass
|
# :coding: utf-8
import os.path
import click
import synes
@click.command()
@click.argument("width", type=int)
@click.argument("input_path", type=click.Path(exists=True))
@click.option("-o", "--output_path", type=click.Path())
def main(input_path, width, output_path):
ext = os.path.splitext(input_path)[1]
if ext in (".png", ".jpg"):
synes.translate_image(input_path, width, output_path)
elif ext == ".wav":
synes.translate_audio(input_path, width, output_path)
else:
raise RuntimeError(
"Unable to translate file, unknown file type '{}'".format(ext)
)
|
"""PDB2PQR Version number.
Store the version here so:
* we don't load dependencies by storing it in :file:`__init__.py`
* we can import it in setup.py for the same reason
* we can import it into your module
"""
__version__ = "3.5.0"
|
# encoding: utf-8
import re
from sect import cluster
from lxml.html import fromstring
# we expect you to override in response to necessary.
# but need customize not necessarily.
def not_body_rate(block):
# not_body_rate() takes account of not_body_rate.
# return value has to be float or integer.
return 0
# for scoring layout block according to the emergence position of its layout block
def lbscore(block, factor):
text = block.a_droped_text
# At least link-removed content has to have 20 charactors.
if len(text) == 0 or len(text) < 20 * block.num_of_a:
text = u''
score = len(text) * factor
rate = not_body_rate(block)
if rate > 0:
score *= (0.72 ** rate)
return score
# for scoring title according to the emergence position and header element
def lbttlscore(block, factor):
# calculate title score
# define 40 as title regular length
text = re.compile('\s').sub('', block.text)
if not len(text):
return 0
score = (factor / len(block.text)) * 100
m = re.compile(r'<h([1-6]).*?>', re.DOTALL).match(block.body)
if m:
# h1 - h6
score *= float(6.0 / float(m.groups()[0])) * 4.63
return score
# clusters continuous high score blocks
def lbcluster(blocks, score = lbscore, threshold = 100, continuous_factor = 1.62, decay_factor = .93):
factor = 1.0
continuous = 1.0
clusts = [cluster()]
for b in blocks:
if len(clusts[-1].body) > 0:
continuous /= continuous_factor
if len(b.text) == 0 or len(b.a_droped_text) == 0:
continue
factor *= decay_factor
points = score(b, factor)
if points * continuous > threshold:
clusts[-1].append(b).add_score(points * continuous)
continuous = continuous_factor
elif points > threshold:
clusts.append(cluster([b], points))
factor = 1.0
continuous = continuous_factor
return clusts
|
from assay_interchange import app
from flask import request
from assay_interchange.lib import convert_data, validate_data
@app.route('/', methods=['GET'])
def index():
return app.send_static_file('index.html')
@app.route('/', methods=['POST'])
def index_post():
return convert_data(request)
@app.route('/validate', methods=['POST'])
def validate():
return validate_data(request)
|
'''this module make main color tint and shade then return it as hex color.'''
import typing
from random import randint
def make_Tint(color: str, percent: int) -> str:
percent = abs(percent)
if percent > 100:
raise ValueError('percent must be between 0 and 100')
color = color.lstrip('#')
rgb = tuple(int(color[i:i+2], 16) for i in (0, 2, 4))
rgb = tuple(map(lambda x: round(x + ((255 - x) * (percent/100))), rgb))
rgb = tuple(map(lambda x: format(x, '#04x'), rgb))
return '#' + rgb[0][2::] + rgb[1][2::] + rgb[2][2::]
def make_Shade(color: str, percent: int) -> str:
if percent > 100:
raise ValueError('percent must be between 0 and 100')
color = color.lstrip('#')
rgb = tuple(int(color[i:i+2], 16) for i in (0, 2, 4))
rgb = tuple(map(lambda x: round(x * (abs(percent) / 100)), rgb))
rgb = tuple(map(lambda x: format(x, '#04x'), rgb))
return '#' + rgb[0][2::] + rgb[1][2::] + rgb[2][2::]
# https://github.com/edelstone/tints-and-shades
def random_hex_color(python_hex_format=False):
if python_hex_format == True:
return
return format(randint(0, 16777215), '#08x').replace('0x', '#')
|
import sys
sys.path.append('../')
from geometry_translation.options.train_options import TrainOptions
from geometry_translation.utils.visualizer import Visualizer
from geometry_translation.models import create_model
from geometry_translation.data_loader import get_data_loader
from datetime import datetime
import os
class Trainer:
def __init__(self, opt, model, train_dl, val_dl, visualizer):
self.opt = opt
self.model = model
self.train_dl = train_dl
self.val_dl = val_dl
self.visualizer = visualizer
def fit(self):
best_f1_score = 0.0
# training phase
tot_iters = 0
for epoch in range(1, self.opt.n_epochs + 1):
print(f'epoch {epoch}/{self.opt.n_epochs}')
ep_time = datetime.now()
for i, data in enumerate(self.train_dl):
self.model.train()
self.model.set_input(data)
iter_loss_all, iter_loss_road, iter_loss_cl, iter_metrics, iter_road_metrics = self.model.optimize_parameters()
iter_metrics = iter_metrics.numpy()
iter_road_metrics = iter_road_metrics.numpy()
print("[Epoch %d/%d] [Batch %d/%d] [Loss: %f] [Road Loss: %f] [CL Loss: %f] [Precision: %f] [Recall: %f] [F1: %f] [Road IOU: %f] [CL IOU: %f]" % (epoch, opt.n_epochs, i, len(train_dl), iter_loss_all.item(), iter_loss_road.item(), iter_loss_cl.item(), iter_metrics[0], iter_metrics[1], iter_metrics[2], iter_road_metrics[3], iter_metrics[3]))
tot_iters += 1
if tot_iters % self.opt.display_freq == 0: # display images on visdom and save images to a HTML file
save_result = tot_iters % self.opt.update_html_freq == 0
model.compute_visuals()
visualizer.display_current_results(model.get_current_visuals(), epoch, save_result)
if tot_iters % self.opt.print_freq == 0: # print training losses
losses = model.get_current_losses()
if opt.display_id > 0:
visualizer.plot_current_losses(epoch, i / len(self.train_dl), losses)
# validating phase
if tot_iters % opt.sample_interval == 0:
self.model.eval()
tot_loss = 0
tot_metrics = 0
tot_road_metrics = 0
for i, data in enumerate(self.val_dl):
self.model.set_input(data)
_, iter_loss, iter_metrics, iter_road_metrics = self.model.test()
tot_loss += iter_loss.item()
tot_metrics += iter_metrics.numpy()
tot_road_metrics += iter_road_metrics.numpy()
tot_loss /= len(self.val_dl)
tot_metrics /= len(self.val_dl)
tot_road_metrics /= len(self.val_dl)
if tot_metrics[2] > best_f1_score:
best_f1_score = tot_metrics[2]
self.model.save_networks('latest')
self.model.save_networks(epoch)
with open(os.path.join(opt.checkpoints_dir, opt.name, 'results.txt'), 'a') as f:
f.write('epoch\t{}\titer\t{}\tloss\t{:.6f}\tprecision\t{:.4f}\trecall\t{:.4f}\tf1\t{:.4f}\troad_iou\t{:.4f}\tcl_iou\t{:.4f}\n'.format(epoch, tot_iters, tot_loss, tot_metrics[0], tot_metrics[1], tot_metrics[2], tot_road_metrics[3], tot_metrics[3]))
f.close()
print('=================time cost: {}==================='.format(datetime.now() - ep_time))
self.model.update_learning_rate()
if __name__ == '__main__':
opt = TrainOptions().parse()
model = create_model(opt)
model.setup(opt)
train_dl = get_data_loader(opt.dataroot, 'train')
val_dl = get_data_loader(opt.dataroot, 'val')
visualizer = Visualizer(opt)
trainer = Trainer(opt, model, train_dl, val_dl, visualizer)
trainer.fit()
|
# type: ignore[misc]
"""Custom widgets for yt-dlg"""
# -*- coding: future_annotations -*-
from datetime import timedelta
from pathlib import Path
from typing import TYPE_CHECKING, Callable
import wx
import wx.lib.masked as masked
from .darktheme import DARK_BACKGROUND_COLOUR, DARK_FOREGROUND_COLOUR, dark_mode
from .utils import IS_WINDOWS
if TYPE_CHECKING:
from .downloadmanager import DownloadItem
from .mainframe import MainFrame
from .optionsframe import OptionsFrame
_: "Callable[[str], str]" = wx.GetTranslation
def crt_command_event(event: wx.PyEventBinder, event_id: int = 0) -> wx.CommandEvent:
"""Shortcut to create command events."""
return wx.CommandEvent(event.typeId, event_id)
# noinspection PyUnresolvedReferences,PyPep8Naming
class ListBoxWithHeaders(wx.ListBox):
"""Custom ListBox object that supports 'headers'.
Attributes:
NAME (str): Default name for the name argument of the __init__.
TEXT_PREFIX (str): Text to add before normal items in order to
distinguish them (normal items) from headers.
EVENTS (list): List with events to overwrite to avoid header selection.
"""
NAME = "listBoxWithHeaders"
TEXT_PREFIX = " "
EVENTS: "list[wx.PyEventBinder]" = [
wx.EVT_LEFT_DOWN,
wx.EVT_LEFT_DCLICK,
wx.EVT_RIGHT_DOWN,
wx.EVT_RIGHT_DCLICK,
wx.EVT_MIDDLE_DOWN,
wx.EVT_MIDDLE_DCLICK,
]
# noinspection PyShadowingBuiltins
def __init__(
self,
parent,
id=wx.ID_ANY,
pos=wx.DefaultPosition,
size=wx.DefaultSize,
choices=[],
style=0,
validator=wx.DefaultValidator,
name=NAME,
) -> None:
super().__init__(parent, id, pos, size, choices, style, validator, name)
self.__headers: set[str] = set()
# Ignore all key events i'm bored to handle the header selection
self.Bind(wx.EVT_KEY_DOWN, lambda event: None)
# Make sure that a header is never selected
self.Bind(wx.EVT_LISTBOX, self._on_listbox)
for _event in self.EVENTS:
self.Bind(_event, self._disable_header_selection)
# Append the items in our own way in order to add the TEXT_PREFIX
self.AppendItems(choices)
def _disable_header_selection(self, event) -> None:
"""Stop event propagation if the selected item is a header."""
row = self.HitTest(event.GetPosition())
event_skip = True
if row != wx.NOT_FOUND and self.GetString(row) in self.__headers:
event_skip = False
event.Skip(event_skip)
def _on_listbox(self, event) -> None:
"""Make sure no header is selected."""
if event.GetString() in self.__headers:
self.Deselect(event.GetSelection())
event.Skip()
def _add_prefix(self, string: str) -> str:
return self.TEXT_PREFIX + string
def _remove_prefix(self, string: str) -> str:
if string[: len(self.TEXT_PREFIX)] == self.TEXT_PREFIX:
return string[len(self.TEXT_PREFIX) :]
return string
# wx.ListBox methods
def FindString(self, string: str, **kwargs) -> int:
index = super().FindString(string, **kwargs)
if index == wx.NOT_FOUND:
# This time try with prefix
index = super().FindString(self._add_prefix(string), **kwargs)
return index
def GetStringSelection(self) -> str:
return self._remove_prefix(super().GetStringSelection())
def GetString(self, index: int) -> str:
if index < 0 or index >= self.GetCount():
# Return empty string based on the wx.ListBox docs
# for some reason parent GetString does not handle
# invalid indices
return ""
return self._remove_prefix(super().GetString(index))
def InsertItems(self, items: "list[str]", pos: int) -> None:
items = [self._add_prefix(item) for item in items]
super().InsertItems(items, pos)
def SetSelection(self, index: int) -> None:
if self.GetString(index) in self.__headers:
self.Deselect(self.GetSelection())
else:
super().SetSelection(index)
def SetString(self, index: int, string: str) -> None:
old_string = self.GetString(index)
if old_string in self.__headers and string != old_string:
self.__headers.remove(old_string)
self.__headers.add(string)
super().SetString(index, string)
def SetStringSelection(self, string: str) -> bool:
if string in self.__headers:
return False
self.SetSelection(
self.FindString(
string,
)
)
return True
# wx.ItemContainer methods
def AppendItems(self, strings: "list[str]", with_prefix: bool = True) -> None:
if with_prefix:
strings = [self._add_prefix(string) for string in strings]
super().AppendItems(strings)
def Clear(self) -> None:
self.__headers.clear()
super().Clear()
def Delete(self, index: int) -> None:
string: str = self.GetString(index)
if string in self.__headers:
self.__headers.remove(string)
super().Delete(index)
# Extra methods
def add_header(self, header_string: str) -> int:
self.__headers.add(header_string)
return super().Append(header_string)
def add_item(
self,
item: str,
with_prefix: bool = True,
clientData: "dict[str, str] | None" = None,
) -> None:
if with_prefix:
item = self._add_prefix(item)
super().Append(item, clientData)
def add_items(self, items: "list[str]", with_prefix: bool = True) -> None:
for item in items:
self.add_item(item, with_prefix)
# noinspection PyPep8Naming
class ListBoxComboPopup(wx.ComboPopup):
"""ListBoxWithHeaders as a popup"""
def __init__(
self, parent: "wx.ComboCtrl | None" = None, darkmode: bool = False
) -> None:
super().__init__()
self.__parent = parent
self.__listbox: "ListBoxWithHeaders | None" = None
self.__dark_mode: bool = darkmode
self.value = -1
def _on_motion(self, event) -> None:
row: int = self.__listbox.HitTest(event.GetPosition())
if row != wx.NOT_FOUND:
self.__listbox.SetSelection(row)
self.curitem = row if self.__listbox.IsSelected(row) else wx.NOT_FOUND
# noinspection PyUnusedLocal
def _on_left_down(self, event) -> None:
self.value = self.curitem
if self.value >= 0:
self.Dismiss()
# wx.ComboPopup methods
# noinspection PyAttributeOutsideInit
def Init(self) -> None:
self.value = self.curitem = -1
def Create(self, parent: wx.ComboCtrl, **kwargs) -> bool:
# Create components
self.__listbox = ListBoxWithHeaders(parent, style=wx.LB_SINGLE)
if self.__dark_mode:
self.__listbox.SetBackgroundColour(DARK_BACKGROUND_COLOUR)
self.__listbox.SetForegroundColour(DARK_FOREGROUND_COLOUR)
self.__listbox.Bind(wx.EVT_MOTION, self._on_motion)
self.__listbox.Bind(wx.EVT_LEFT_DOWN, self._on_left_down)
return True
def GetControl(self) -> "ListBoxWithHeaders | None":
return self.__listbox
def AddItem(
self,
item: str,
with_prefix: bool = True,
clientData: "dict[str, str] | None" = None,
) -> None:
self.__listbox.add_item(item, with_prefix, clientData)
def AddItems(self, items: "list[str]", with_prefix: bool = True) -> None:
self.__listbox.add_items(items, with_prefix)
def GetStringValue(self) -> str:
return self.__listbox.GetString(self.value)
def GetSelection(self) -> int:
return self.value
def SetSelection(self, index: int) -> None:
self.__listbox.SetSelection(index)
if self.__listbox.IsSelected(index):
self.value = index
self.__parent.SetValue(self.GetStringValue())
def SetStringSelection(self, string: str) -> None:
index: int = self.__listbox.FindString(
string,
)
self.__listbox.SetSelection(index)
if index != wx.NOT_FOUND and self.__listbox.GetSelection() == index:
self.value = index
self.SetSelection(self.value)
def Clear(self) -> None:
self.__parent.SetValue("")
self.__listbox.Clear()
def IsListEmpty(self) -> bool:
return self.__listbox.GetCount() == 0
def OnDismiss(self) -> None:
if self.value < 0:
self.value = 0
self.__listbox.SetSelection(self.value)
# noinspection PyPep8Naming
class ExtComboBox(wx.ComboBox):
def __init__(self, parent, max_items=-1, *args, **kwargs):
super().__init__(parent, *args, **kwargs)
assert max_items > 0 or max_items == -1
self.max_items = max_items
def Append(self, new_value):
if self.FindString(new_value) == wx.NOT_FOUND:
super().Append(new_value)
if self.max_items != -1 and self.GetCount() > self.max_items:
self.SetItems(self.GetStrings()[1:])
def SetValue(self, new_value):
index = self.FindString(new_value)
if index == wx.NOT_FOUND:
self.Append(new_value)
self.SetSelection(index)
def LoadMultiple(self, items_list):
for item in items_list:
self.Append(item)
class DoubleStageButton(wx.Button):
def __init__(self, parent, labels, bitmaps, bitmap_pos=wx.TOP, *args, **kwargs):
super().__init__(parent, *args, **kwargs)
assert isinstance(labels, tuple) and isinstance(bitmaps, tuple)
assert len(labels) == 2
assert len(bitmaps) in [0, 2]
self.labels = labels
self.bitmaps = bitmaps
self.bitmap_pos = bitmap_pos
self._stage = 0
self._set_layout()
def _set_layout(self):
self.SetLabel(self.labels[self._stage])
if len(self.bitmaps):
self.SetBitmap(self.bitmaps[self._stage], self.bitmap_pos)
def change_stage(self):
self._stage = 0 if self._stage else 1
self._set_layout()
def set_stage(self, new_stage):
assert new_stage in [0, 1]
self._stage = new_stage
self._set_layout()
class MessageDialog(wx.Dialog):
STYLE = (
wx.DEFAULT_DIALOG_STYLE
if IS_WINDOWS
else wx.DEFAULT_DIALOG_STYLE | wx.MAXIMIZE_BOX
)
def __init__(self, parent, message, title, _dark_mode=False):
super().__init__(parent, wx.ID_ANY, title, style=self.STYLE)
self.parent = parent
self.message = message
self._dark_mode = _dark_mode
# Create components
self.panel = wx.Panel(self)
self.buttons: dict[str, wx.Button] = {
"yes": wx.Button(self.panel, wx.ID_YES, _("Yes")),
"no": wx.Button(self.panel, wx.ID_NO, _("No")),
}
info_bmp = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_MESSAGE_BOX)
info_icon = wx.StaticBitmap(self.panel, wx.ID_ANY, info_bmp)
msg_text = wx.StaticText(self.panel, wx.ID_ANY, message)
# Set sizers
vertical_sizer = wx.BoxSizer(wx.VERTICAL)
message_sizer = wx.BoxSizer(wx.HORIZONTAL)
message_sizer.Add(info_icon)
message_sizer.AddSpacer(10)
message_sizer.Add(msg_text, flag=wx.EXPAND | wx.TOP | wx.BOTTOM, border=5)
vertical_sizer.Add(message_sizer, 1, wx.ALL, border=10)
buttons_sizer = wx.StdDialogButtonSizer()
for button in self.buttons.values():
button.Bind(wx.EVT_BUTTON, self._on_close)
buttons_sizer.AddButton(button)
self.buttons["no"].SetDefault()
buttons_sizer.Realize()
vertical_sizer.Add(buttons_sizer, flag=wx.EXPAND | wx.ALL, border=10)
self.panel.SetSizer(vertical_sizer)
width, height = self.panel.GetBestSize()
self.SetSize((width, int(height * 1.8)))
# Set Dark Theme
dark_mode(self.panel, self._dark_mode)
self.Center()
def _on_close(self, event):
self.EndModal(event.GetEventObject().GetId())
class ButtonsChoiceDialog(wx.Dialog):
STYLE = (
wx.DEFAULT_DIALOG_STYLE
if IS_WINDOWS
else wx.DEFAULT_DIALOG_STYLE | wx.MAXIMIZE_BOX
)
def __init__(self, parent, choices, message, title, _dark_mode=False):
super().__init__(parent, wx.ID_ANY, title, style=self.STYLE)
self._dark_mode = _dark_mode
self.buttons: dict[str, wx.Button] = {}
# Create components
self.panel = wx.Panel(self)
info_bmp = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_MESSAGE_BOX)
info_icon = wx.StaticBitmap(self.panel, wx.ID_ANY, info_bmp)
msg_text = wx.StaticText(self.panel, wx.ID_ANY, message)
self.buttons["cancel"] = wx.Button(self.panel, wx.ID_CANCEL, _("Cancel"))
for index, label in enumerate(choices):
key: str = str(index + 1)
self.buttons[key] = wx.Button(self.panel, int(key), label)
# Get the maximum button width & height
max_width = max_height = -1
for button in self.buttons.values():
button_width, button_height = button.GetSize()
if button_width > max_width:
max_width = button_width
if button_height > max_height:
max_height = button_height
max_width += 10
# Set buttons width & bind events
for button in self.buttons.values():
if button != self.buttons["cancel"]:
button.SetMinSize((max_width, max_height))
else:
# On Cancel button change only the height
button.SetMinSize((-1, max_height))
button.Bind(wx.EVT_BUTTON, self._on_close)
# Set sizers
vertical_sizer = wx.BoxSizer(wx.VERTICAL)
message_sizer = wx.BoxSizer(wx.HORIZONTAL)
message_sizer.Add(info_icon)
message_sizer.AddSpacer(10)
message_sizer.Add(msg_text, flag=wx.EXPAND | wx.TOP | wx.BOTTOM, border=5)
vertical_sizer.Add(message_sizer, 1, wx.ALL, border=10)
buttons_sizer = wx.BoxSizer(wx.HORIZONTAL)
for button in (self.buttons["1"], self.buttons["2"]):
buttons_sizer.Add(button)
buttons_sizer.AddSpacer(5)
buttons_sizer.AddSpacer(1)
buttons_sizer.Add(self.buttons["cancel"])
vertical_sizer.Add(buttons_sizer, flag=wx.EXPAND | wx.ALL, border=10)
self.panel.SetSizer(vertical_sizer)
width, height = self.panel.GetBestSize()
self.SetSize((width, int(height * 1.5)))
# Set Dark Theme
dark_mode(self.panel, self._dark_mode)
self.Center()
def _on_close(self, event):
self.EndModal(event.GetEventObject().GetId())
class ClipDialog(wx.Dialog):
FRAME_SIZE = (195, 170) if IS_WINDOWS else (350, 250)
CHECK_OPTIONS = ("--external-downloader", "--external-downloader-args")
def __init__(
self,
parent: "MainFrame",
download_item: "DownloadItem",
_dark_mode: bool = False,
):
super().__init__(
parent, wx.ID_ANY, title=_("Clip Multimedia"), style=wx.DEFAULT_DIALOG_STYLE
)
self.download_item = download_item
clip_start, clip_end = self._get_timespans()
self._dark_mode = _dark_mode
# Create components
self.panel = wx.Panel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
h_time_box = wx.BoxSizer(wx.HORIZONTAL)
start_label = wx.StaticText(self.panel, wx.ID_ANY, _("Clip start") + ":")
h_time_box.Add(start_label, 0, wx.ALIGN_CENTRE | wx.ALL, 5)
h_time_box.AddStretchSpacer(1)
self.clip_start = masked.TimeCtrl(
self.panel, wx.ID_ANY, value=clip_start, fmt24hr=True, name="startTime"
)
height = self.clip_start.GetSize().height
spin1 = wx.SpinButton(
self.panel, wx.ID_ANY, wx.DefaultPosition, (-1, height), wx.SP_VERTICAL
)
self.clip_start.BindSpinButton(spin1)
hbox1 = wx.BoxSizer(wx.HORIZONTAL)
hbox1.Add(self.clip_start, 0, wx.ALIGN_CENTRE)
hbox1.Add(spin1, 0, wx.ALIGN_CENTRE)
h_time_box.Add(hbox1, 0, wx.EXPAND | wx.ALL, 5)
sizer.Add(h_time_box, 0, wx.EXPAND | wx.ALL, 5)
h_time_box = wx.BoxSizer(wx.HORIZONTAL)
end_label = wx.StaticText(self.panel, wx.ID_ANY, _("Clip end") + ":")
h_time_box.Add(end_label, 0, wx.ALIGN_CENTRE | wx.ALL, 5)
h_time_box.AddStretchSpacer(1)
spin2 = wx.SpinButton(
self.panel, wx.ID_ANY, wx.DefaultPosition, (-1, height), wx.SP_VERTICAL
)
self.clip_end = masked.TimeCtrl(
self.panel,
wx.ID_ANY,
value=clip_end,
fmt24hr=True,
name="endTime",
spinButton=spin2,
)
hbox2 = wx.BoxSizer(wx.HORIZONTAL)
hbox2.Add(self.clip_end, 0, wx.ALIGN_CENTRE)
hbox2.Add(spin2, 0, wx.ALIGN_CENTRE)
h_time_box.Add(hbox2, 0, wx.EXPAND | wx.ALL, 5)
sizer.Add(h_time_box, 0, wx.EXPAND | wx.ALL, 5)
line = wx.StaticLine(
self.panel, wx.ID_ANY, size=(-1, -1), style=wx.LI_HORIZONTAL
)
sizer.Add(line, 0, wx.EXPAND | wx.TOP, 5)
buttons_sizer = wx.StdDialogButtonSizer()
btn = wx.Button(self.panel, wx.ID_OK)
btn.Bind(wx.EVT_BUTTON, self._on_close)
btn.SetDefault()
buttons_sizer.AddButton(btn)
btn = wx.Button(self.panel, wx.ID_CANCEL)
buttons_sizer.AddButton(btn)
buttons_sizer.Realize()
sizer.Add(buttons_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ALL, 5)
self.panel.SetSizer(sizer)
# Set Dark Theme
dark_mode(self.panel, self._dark_mode)
self.SetSize(self.FRAME_SIZE)
self.Center()
# TODO: Make better decision
self._clean_options()
def _clean_options(self):
"""
Clean the CHECK_OPTIONS from self.download_item.options
"""
options = []
for idx, opt in enumerate(self.download_item.options):
if opt in self.CHECK_OPTIONS:
try:
opt_arg = self.download_item.options[idx + 1]
if opt_arg == "ffmpeg" or "-ss" in opt_arg or "-to" in opt_arg:
self.download_item.options.pop(idx + 1)
except IndexError:
pass
continue
options.append(opt)
if options:
self.download_item.options = options
def _get_timespans(self) -> "tuple[str, str]":
"""
Get the TimeSpan if CHECK_OPTIONS[1] in self.download_item.options
Returns:
Tuple of strings with the clip_start and clip_end in format HH:MM:SS
"""
external_downloader_args: "str | None" = None
downloader_args: "list[str] | None" = None
clip_start = clip_end = index = 0
for idx, option in enumerate(self.download_item.options):
if self.CHECK_OPTIONS[1] == option:
try:
external_downloader_args = self.download_item.options[idx + 1]
index = idx
except IndexError:
# No exist timespans
self.download_item.options.pop(idx)
break
if external_downloader_args:
downloader_args = external_downloader_args.split()
if downloader_args and len(downloader_args) == 4:
# Clean quotes (simple/double)
try:
clip_start = int(downloader_args[1].strip("'\""))
clip_end = int(downloader_args[-1].strip("'\""))
except ValueError:
self.download_item.options.pop(index + 1)
self.download_item.options.pop(index)
wx_clip_start = str(timedelta(seconds=clip_start))
wx_clip_end = str(timedelta(seconds=clip_end))
return wx_clip_start, wx_clip_end
def _on_close(self, event):
_clip_start = int(self.clip_start.GetValue(as_wxTimeSpan=True).GetSeconds())
_clip_end = int(self.clip_end.GetValue(as_wxTimeSpan=True).GetSeconds())
if _clip_start > _clip_end or _clip_start == _clip_end:
wx.MessageBox(
_("Invalid timespan"), _("Warning"), wx.OK | wx.ICON_EXCLAMATION
)
return
self.EndModal(event.GetEventObject().GetId())
class ShutdownDialog(wx.Dialog):
STYLE = (
wx.DEFAULT_DIALOG_STYLE
if IS_WINDOWS
else wx.DEFAULT_DIALOG_STYLE | wx.MAXIMIZE_BOX
)
TIMER_INTERVAL = 1000 # milliseconds
def __init__(self, parent, timeout, message, *args, **kwargs):
super().__init__(parent, wx.ID_ANY, *args, style=self.STYLE, **kwargs)
assert timeout > 0
self.timeout = timeout
self.message = message
# Create components
panel = wx.Panel(self)
info_bmp = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_MESSAGE_BOX)
info_icon = wx.StaticBitmap(panel, wx.ID_ANY, info_bmp)
self.msg_text = msg_text = wx.StaticText(panel, wx.ID_ANY, self._get_message())
ok_button = wx.Button(panel, wx.ID_OK, _("OK"))
cancel_button = wx.Button(panel, wx.ID_CANCEL, _("Cancel"))
# Set layout
vertical_sizer = wx.BoxSizer(wx.VERTICAL)
message_sizer = wx.BoxSizer(wx.HORIZONTAL)
message_sizer.Add(info_icon)
message_sizer.AddSpacer((10, 10))
message_sizer.Add(msg_text, flag=wx.EXPAND)
vertical_sizer.Add(message_sizer, 1, wx.ALL, border=10)
buttons_sizer = wx.BoxSizer(wx.HORIZONTAL)
buttons_sizer.Add(ok_button)
buttons_sizer.AddSpacer((5, -1))
buttons_sizer.Add(cancel_button)
vertical_sizer.Add(buttons_sizer, flag=wx.ALIGN_RIGHT | wx.ALL, border=10)
panel.SetSizer(vertical_sizer)
width, height = panel.GetBestSize()
self.SetSize((width * 1.3, height * 1.3))
self.Center()
# Set up timer
self.timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self._on_timer, self.timer)
self.timer.Start(self.TIMER_INTERVAL)
def _get_message(self):
return self.message.format(self.timeout)
# noinspection PyUnusedLocal
def _on_timer(self, event):
self.timeout -= 1
self.msg_text.SetLabel(self._get_message())
if self.timeout <= 0:
self.EndModal(wx.ID_OK)
def Destroy(self):
self.timer.Stop()
return super().Destroy()
# noinspection PyUnresolvedReferences
class LogGUI(wx.Frame):
"""Simple window for reading the STDERR.
Attributes:
FRAME_SIZE (tuple): Tuple that holds the frame size (width, height).
Args:
parent (MainFrame): Frame parent.
"""
FRAME_SIZE: "tuple[int, int]" = (750, 200)
def __init__(self, parent: "MainFrame | OptionsFrame | None" = None):
super().__init__(parent, title=_("Log Viewer"), size=self.FRAME_SIZE)
self.parent = parent
self.app_icon: "wx.Icon | None" = self.parent.app_icon
if self.app_icon:
self.SetIcon(self.app_icon)
self.panel = wx.Panel(self)
self._text_area = wx.TextCtrl(
self.panel, style=wx.TE_MULTILINE | wx.TE_READONLY | wx.HSCROLL
)
sizer = wx.BoxSizer()
sizer.Add(self._text_area, 1, wx.EXPAND)
self.panel.SetSizerAndFit(sizer)
def load(self, filename: str):
"""Load file content on the text area."""
if Path(filename).exists():
self._text_area.LoadFile(filename)
|
""" Emits top.v's for various BUFHCE routing inputs. """
import os
import random
random.seed(int(os.getenv("SEED"), 16))
from prjxray import util
from prjxray.lut_maker import LutMaker
from prjxray.db import Database
from io import StringIO
CMT_XY_FUN = util.create_xy_fun(prefix='')
def read_site_to_cmt():
""" Yields clock sources and which CMT they route within. """
with open(os.path.join(os.getenv('FUZDIR'), 'build',
'cmt_regions.csv')) as f:
for l in f:
site, cmt, _ = l.strip().split(',')
yield (site, cmt)
class ClockSources(object):
""" Class for tracking clock sources.
Some clock sources can be routed to any CMT, for these, cmt='ANY'.
For clock sources that belong to a CMT, cmt should be set to the CMT of
the source.
"""
def __init__(self):
self.sources = {}
self.source_to_cmt = {}
self.used_sources_from_cmt = {}
def add_clock_source(self, source, cmt):
""" Adds a source from a specific CMT.
cmt='ANY' indicates that this source can be routed to any CMT.
"""
if cmt not in self.sources:
self.sources[cmt] = []
self.sources[cmt].append(source)
self.source_to_cmt[source] = cmt
def get_random_source(
self, cmt, uses_left_right_routing=False, no_repeats=False):
""" Get a random source that is routable to the specific CMT.
get_random_source will return a source that is either cmt='ANY',
cmt equal to the input CMT, or the adjecent CMT.
"""
choices = []
if cmt in self.sources:
choices.extend(self.sources[cmt])
if uses_left_right_routing:
x, y = CMT_XY_FUN(cmt)
if x % 2 == 0:
x += 1
else:
x -= 1
paired_cmt = 'X{}Y{}'.format(x, y)
if paired_cmt in self.sources:
for source in self.sources[paired_cmt]:
if 'BUFHCE' not in source:
choices.append(source)
random.shuffle(choices)
if not uses_left_right_routing:
return choices[0]
for source in choices:
source_cmt = self.source_to_cmt[source]
if source_cmt not in self.used_sources_from_cmt:
self.used_sources_from_cmt[source_cmt] = set()
if no_repeats and source in self.used_sources_from_cmt[source_cmt]:
continue
if len(self.used_sources_from_cmt[source_cmt]) >= 14:
continue
self.used_sources_from_cmt[source_cmt].add(source)
return source
return None
def get_paired_iobs(db, grid, tile_name):
""" The two IOB33M's above and below the HCLK row have dedicate clock lines.
"""
gridinfo = grid.gridinfo_at_tilename(tile_name)
loc = grid.loc_of_tilename(tile_name)
if gridinfo.tile_type.endswith('_L'):
inc = 1
lr = 'R'
else:
inc = -1
lr = 'L'
idx = 1
while True:
gridinfo = grid.gridinfo_at_loc((loc.grid_x + inc * idx, loc.grid_y))
if gridinfo.tile_type.startswith('HCLK_IOI'):
break
idx += 1
# Move from HCLK_IOI column to IOB column
idx += 1
for dy in [-1, -3, 2, 4]:
iob_loc = (loc.grid_x + inc * idx, loc.grid_y + dy)
gridinfo = grid.gridinfo_at_loc(iob_loc)
tile_name = grid.tilename_at_loc(iob_loc)
assert gridinfo.tile_type.startswith(lr + 'IOB'), (
gridinfo, lr + 'IOB')
for site, site_type in gridinfo.sites.items():
if site_type in ['IOB33M', 'IOB18M']:
yield tile_name, site, site_type[-3:-1]
def check_allowed(mmcm_pll_dir, cmt):
""" Check whether the CMT specified is in the allowed direction.
This function is designed to bias sources to either the left or right
input lines.
"""
if mmcm_pll_dir == 'BOTH':
return True
elif mmcm_pll_dir == 'ODD':
x, y = CMT_XY_FUN(cmt)
return (x & 1) == 1
elif mmcm_pll_dir == 'EVEN':
x, y = CMT_XY_FUN(cmt)
return (x & 1) == 0
else:
assert False, mmcm_pll_dir
def main():
"""
HCLK_CMT switch box has the follow inputs:
4 IOBs above and below
14 MMCM outputs
8 PLL outputs
4 PHASER_IN outputs
2 INT connections
and the following outputs:
3 PLLE2 inputs
2 BUFMR inputs
3 MMCM inputs
~2 MMCM -> BUFR???
"""
clock_sources = ClockSources()
adv_clock_sources = ClockSources()
site_to_cmt = dict(read_site_to_cmt())
db = Database(util.get_db_root())
grid = db.grid()
def gen_sites(desired_site_type):
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
for site, site_type in gridinfo.sites.items():
if site_type == desired_site_type:
yield tile_name, site
hclk_cmts = set()
ins = []
iobs = StringIO()
hclk_cmt_tiles = set()
for tile_name, site in gen_sites('BUFMRCE'):
cmt = site_to_cmt[site]
hclk_cmts.add(cmt)
hclk_cmt_tiles.add(tile_name)
mmcm_pll_only = random.randint(0, 1)
mmcm_pll_dir = random.choice(('ODD', 'EVEN', 'BOTH'))
print(
'// mmcm_pll_only {} mmcm_pll_dir {}'.format(
mmcm_pll_only, mmcm_pll_dir))
have_iob_clocks = random.random() > .1
iob_clks = {}
for tile_name in sorted(hclk_cmt_tiles):
for _, site, volt in get_paired_iobs(db, grid, tile_name):
iob_clock = 'clock_IBUF_{site}'.format(site=site)
cmt = site_to_cmt[site]
if cmt not in iob_clks:
iob_clks[cmt] = ['']
iob_clks[cmt].append(iob_clock)
ins.append('input clk_{site}'.format(site=site))
if have_iob_clocks:
if check_allowed(mmcm_pll_dir, cmt):
clock_sources.add_clock_source(iob_clock, cmt)
adv_clock_sources.add_clock_source(iob_clock, cmt)
print(
"""
(* KEEP, DONT_TOUCH, LOC = "{site}" *)
wire clock_IBUF_{site};
IBUF #( .IOSTANDARD("LVCMOS{volt}") ) ibuf_{site} (
.I(clk_{site}),
.O(clock_IBUF_{site})
);
""".format(volt=volt, site=site),
file=iobs)
print(
'''
module top({inputs});
(* KEEP, DONT_TOUCH *)
LUT6 dummy();
'''.format(inputs=', '.join(ins)))
print(iobs.getvalue())
luts = LutMaker()
wires = StringIO()
bufhs = StringIO()
for _, site in gen_sites('MMCME2_ADV'):
mmcm_clocks = [
'mmcm_clock_{site}_{idx}'.format(site=site, idx=idx)
for idx in range(13)
]
if check_allowed(mmcm_pll_dir, site_to_cmt[site]):
for clk in mmcm_clocks:
clock_sources.add_clock_source(clk, site_to_cmt[site])
print(
"""
wire cin1_{site}, cin2_{site}, clkfbin_{site}, {c0}, {c1}, {c2}, {c3}, {c4}, {c5};
(* KEEP, DONT_TOUCH, LOC = "{site}" *)
MMCME2_ADV pll_{site} (
.CLKIN1(cin1_{site}),
.CLKIN2(cin2_{site}),
.CLKFBIN(clkfbin_{site}),
.CLKOUT0({c0}),
.CLKOUT0B({c1}),
.CLKOUT1({c2}),
.CLKOUT1B({c3}),
.CLKOUT2({c4}),
.CLKOUT2B({c5}),
.CLKOUT3({c6}),
.CLKOUT3B({c7}),
.CLKOUT4({c8}),
.CLKOUT5({c9}),
.CLKOUT6({c10}),
.CLKFBOUT({c11}),
.CLKFBOUTB({c12})
);
""".format(
site=site,
c0=mmcm_clocks[0],
c1=mmcm_clocks[1],
c2=mmcm_clocks[2],
c3=mmcm_clocks[3],
c4=mmcm_clocks[4],
c5=mmcm_clocks[5],
c6=mmcm_clocks[6],
c7=mmcm_clocks[7],
c8=mmcm_clocks[8],
c9=mmcm_clocks[9],
c10=mmcm_clocks[10],
c11=mmcm_clocks[11],
c12=mmcm_clocks[12],
))
for _, site in gen_sites('PLLE2_ADV'):
pll_clocks = [
'pll_clock_{site}_{idx}'.format(site=site, idx=idx)
for idx in range(7)
]
if check_allowed(mmcm_pll_dir, site_to_cmt[site]):
for clk in pll_clocks:
clock_sources.add_clock_source(clk, site_to_cmt[site])
print(
"""
wire cin1_{site}, cin2_{site}, clkfbin_{site}, {c0}, {c1}, {c2}, {c3}, {c4}, {c5}, {c6};
(* KEEP, DONT_TOUCH, LOC = "{site}" *)
PLLE2_ADV pll_{site} (
.CLKIN1(cin1_{site}),
.CLKIN2(cin2_{site}),
.CLKFBIN(clkfbin_{site}),
.CLKOUT0({c0}),
.CLKOUT1({c1}),
.CLKOUT2({c2}),
.CLKOUT3({c3}),
.CLKOUT4({c4}),
.CLKOUT5({c5}),
.CLKFBOUT({c6})
);
""".format(
site=site,
c0=pll_clocks[0],
c1=pll_clocks[1],
c2=pll_clocks[2],
c3=pll_clocks[3],
c4=pll_clocks[4],
c5=pll_clocks[5],
c6=pll_clocks[6],
))
for tile_name, site in gen_sites('BUFHCE'):
print(
"""
wire I_{site};
wire O_{site};
(* KEEP, DONT_TOUCH, LOC = "{site}" *)
BUFHCE buf_{site} (
.I(I_{site}),
.O(O_{site})
);
""".format(site=site, ),
file=bufhs)
if site_to_cmt[site] in hclk_cmts:
if not mmcm_pll_only:
clock_sources.add_clock_source(
'O_{site}'.format(site=site), site_to_cmt[site])
adv_clock_sources.add_clock_source(
'O_{site}'.format(site=site), site_to_cmt[site])
hclks_used_by_cmt = {}
for cmt in site_to_cmt.values():
hclks_used_by_cmt[cmt] = set()
def check_hclk_src(src, src_cmt):
if len(hclks_used_by_cmt[src_cmt]
) >= 12 and src not in hclks_used_by_cmt[src_cmt]:
return None
else:
hclks_used_by_cmt[src_cmt].add(src)
return src
if random.random() > .10:
for tile_name, site in gen_sites('BUFHCE'):
wire_name = clock_sources.get_random_source(
site_to_cmt[site],
uses_left_right_routing=True,
no_repeats=mmcm_pll_only)
if wire_name is not None and 'BUFHCE' in wire_name:
# Looping a BUFHCE to a BUFHCE requires using a hclk in the
# CMT of the source
src_cmt = clock_sources.source_to_cmt[wire_name]
wire_name = check_hclk_src(wire_name, src_cmt)
if wire_name is None:
continue
print(
"""
assign I_{site} = {wire_name};""".format(
site=site,
wire_name=wire_name,
),
file=bufhs)
for tile_name, site in gen_sites('BUFMRCE'):
pass
for l in luts.create_wires_and_luts():
print(l)
print(wires.getvalue())
print(bufhs.getvalue())
for _, site in gen_sites('BUFR'):
adv_clock_sources.add_clock_source(
'O_{site}'.format(site=site), site_to_cmt[site])
print(
"""
wire O_{site};
(* KEEP, DONT_TOUCH, LOC = "{site}" *)
BUFR bufr_{site} (
.I({I}),
.O(O_{site})
);""".format(I=random.choice(iob_clks[site_to_cmt[site]]), site=site))
for _, site in gen_sites('PLLE2_ADV'):
for cin in ('cin1', 'cin2', 'clkfbin'):
if random.random() > .2:
src = adv_clock_sources.get_random_source(site_to_cmt[site])
src_cmt = adv_clock_sources.source_to_cmt[src]
if 'IBUF' not in src and 'BUFR' not in src:
# Clocks from input pins do not require HCLK's, all other
# sources route from a global row clock.
src = check_hclk_src(src, src_cmt)
if src is None:
continue
print(
"""
assign {cin}_{site} = {csrc};
""".format(cin=cin, site=site, csrc=src))
for _, site in gen_sites('MMCME2_ADV'):
for cin in ('cin1', 'cin2', 'clkfbin'):
if random.random() > .2:
src = adv_clock_sources.get_random_source(site_to_cmt[site])
src_cmt = adv_clock_sources.source_to_cmt[src]
if 'IBUF' not in src and 'BUFR' not in src:
# Clocks from input pins do not require HCLK's, all other
# sources route from a global row clock.
src = check_hclk_src(src, src_cmt)
if src is None:
continue
print(
"""
assign {cin}_{site} = {csrc};
""".format(cin=cin, site=site, csrc=src))
print("endmodule")
if __name__ == '__main__':
main()
|
import mock
import pytest
import requests
from wristband.providers.exceptions import DeployException
from wristband.providers.service_providers import DocktorServiceProvider
@mock.patch.object(DocktorServiceProvider, 'get_docktor_server_config')
@mock.patch('wristband.providers.service_providers.requests')
@mock.patch('wristband.providers.service_providers.App')
def test_docktor_provider_deploy_success(mocked_app,
mocked_requests,
mocked_get_docktor_server_config,
settings,
dummy_app_class):
settings.WEBSTORE_URL = 'https://webstore.com'
mocked_app.objects.get.return_value = dummy_app_class(
name='foo',
stage='bar',
security_zone='test'
)
mocked_get_docktor_server_config.return_value = {'username': 'john',
'password': 'password',
'uri': 'http://test.com'}
provider_under_test = DocktorServiceProvider('foo', 'bar')
provider_under_test.deploy('0.1.0')
calls = [mock.call().patch('http://test.com/apps/foo',
data={'slug_uri': 'https://webstore.com/apps/foo/foo_0.1.0.tgz'}),
mock.call().patch().raise_for_status()]
mocked_requests.has_calls(calls)
@mock.patch.object(DocktorServiceProvider, 'get_docktor_server_config')
@mock.patch('wristband.providers.service_providers.requests')
@mock.patch('wristband.providers.service_providers.App')
def test_deploy_failed(mocked_app,
mocked_requests,
mocked_get_docktor_server_config,
dummy_app_class):
mocked_app.objects.get.return_value = dummy_app_class(
name='foo',
stage='bar',
security_zone='test'
)
mocked_get_docktor_server_config.return_value = {'username': 'john',
'password': 'password',
'uri': 'http://test.com'}
mocked_requests.patch.return_value.raise_for_status.side_effect = requests.HTTPError('test_message')
provider_under_test = DocktorServiceProvider('foo', 'bar')
with pytest.raises(DeployException):
provider_under_test.deploy(123)
|
from nanome.util import Vector3, Quaternion, Logs
from . import _Base
from . import _helpers
class _Complex(_Base):
@classmethod
def _create(cls):
return cls()
def __init__(self):
super(_Complex, self).__init__()
#Molecular
self._name = "complex"
self._index_tag = 0
self._split_tag = ""
self._remarks = {}
#Rendering
self._boxed = False
self._locked = False
self._visible = True
self._computing = False
self._current_frame = 0
self._selected = False #selected on live
self._surface_dirty = False
self._surface_refresh_rate = -1.0 # Not used yet, future auto surface refresh
self._box_label = ""
#Transform
self._position = Vector3(0,0,0)
self._rotation = Quaternion(0,0,0,0)
self._molecules = []
self._parent = None
def _add_molecule(self, molecule):
self._molecules.append(molecule)
molecule._parent = self
def _remove_molecule(self, molecule):
self._molecules.remove(molecule)
molecule._parent = None
def _set_molecules(self, molecules):
self._molecules = molecules
for molecule in molecules:
molecule._parent = self
@Logs.deprecated()
def get_atom_iterator(self):
iterator = _Complex.AtomIterator(self)
return iter(iterator)
class AtomIterator(object):
def __init__(self, complex):
self._complex = complex
def __iter__(self):
self._molecule = iter(self._complex._molecules)
self._update_iter()
return self
def __next__(self):
while True:
try:
return next(self._chainAtom)
except StopIteration:
self._update_iter()
def _update_iter(self):
while True:
molecule = next(self._molecule)
try:
self._chainAtom = molecule.get_atom_iterator()
break
except StopIteration:
pass
def _shallow_copy(self, target = None):
if target == None:
complex = _Complex._create()
else:
complex = target
#Molecular
complex._name = self._name
complex._index_tag = self._index_tag
complex._split_tag = self._split_tag
complex._remarks = self._remarks
#Rendering
complex._boxed = self._boxed
complex._locked = self._locked
complex._visible = self._visible
complex._computing = self._computing
complex._current_frame = self._current_frame
complex._selected = self._selected
complex._surface_dirty = self._surface_dirty
complex._surface_refresh_rate = self._surface_refresh_rate
complex._box_label = self._box_label
#Transform
complex._position = self._position.get_copy()
complex._rotation = self._rotation.get_copy()
return complex
def _deep_copy(self):
return _helpers._copy._deep_copy_complex(self)
def _convert_to_conformers(self, force_conformers = None):
result = _helpers._conformer_helper.convert_to_conformers(self, None)
return result
def _convert_to_frames(self):
result = _helpers._conformer_helper.convert_to_frames(self)
return result
def __copy_received_complex(self, new_complex):
if new_complex != None:
new_complex._shallow_copy(self)
self._molecules = new_complex._molecules
|
#!/usr/bin/env python
#encoding=utf8
if __name__ == "__main__":
infile="/home/anthonylife/Doctor/Data/Douban/Event/eventInfo.csv"
outfile = "/home/anthonylife/Doctor/Code/MyPaperCode/EventRecommendation/data/doubanEventInfo.csv"
wfd = open(outfile, "w")
for line in open(infile):
line = line.replace("^M", "")
wfd.write(line)
wfd.close()
|
#
# PySNMP MIB module CISCO-IETF-SCTP-EXT-CAPABILITY (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-IETF-SCTP-EXT-CAPABILITY
# Produced by pysmi-0.3.4 at Wed May 1 12:01:04 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint")
ciscoAgentCapability, = mibBuilder.importSymbols("CISCO-SMI", "ciscoAgentCapability")
NotificationGroup, AgentCapabilities, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "AgentCapabilities", "ModuleCompliance")
iso, ObjectIdentity, TimeTicks, Gauge32, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, NotificationType, Counter32, Unsigned32, IpAddress, Integer32, ModuleIdentity, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "ObjectIdentity", "TimeTicks", "Gauge32", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "NotificationType", "Counter32", "Unsigned32", "IpAddress", "Integer32", "ModuleIdentity", "MibIdentifier")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ciscoSctpExtCapability = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 7, 220))
ciscoSctpExtCapability.setRevisions(('2002-01-21 00:00', '2001-11-30 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoSctpExtCapability.setRevisionsDescriptions(('Updated capabilities to support additional objects and a new notification.', 'Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoSctpExtCapability.setLastUpdated('200201210000Z')
if mibBuilder.loadTexts: ciscoSctpExtCapability.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoSctpExtCapability.setContactInfo(' Cisco Systems Customer Service Postal: 170 West Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-sctp@cisco.com')
if mibBuilder.loadTexts: ciscoSctpExtCapability.setDescription('Agent capabilities for the CISCO-IETF-SCTP-EXT-MIB.')
ciscoSctpExtCapabilityV12R024MB1 = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 220, 1))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoSctpExtCapabilityV12R024MB1 = ciscoSctpExtCapabilityV12R024MB1.setProductRelease('Cisco IOS 12.2(4)MB1')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoSctpExtCapabilityV12R024MB1 = ciscoSctpExtCapabilityV12R024MB1.setStatus('current')
if mibBuilder.loadTexts: ciscoSctpExtCapabilityV12R024MB1.setDescription('IOS 12.2(4)MB1 Cisco CISCO-IETF-SCTP-EXT-MIB.my User Agent MIB capabilities.')
ciscoSctpExtCapabilityV12R0204MB3 = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 220, 2))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoSctpExtCapabilityV12R0204MB3 = ciscoSctpExtCapabilityV12R0204MB3.setProductRelease('Cisco IOS 12.2(4)MB3')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoSctpExtCapabilityV12R0204MB3 = ciscoSctpExtCapabilityV12R0204MB3.setStatus('current')
if mibBuilder.loadTexts: ciscoSctpExtCapabilityV12R0204MB3.setDescription('IOS 12.2(4)MB3 Cisco CISCO-IETF-SCTP-EXT-MIB.my User Agent MIB capabilities.')
mibBuilder.exportSymbols("CISCO-IETF-SCTP-EXT-CAPABILITY", ciscoSctpExtCapability=ciscoSctpExtCapability, PYSNMP_MODULE_ID=ciscoSctpExtCapability, ciscoSctpExtCapabilityV12R024MB1=ciscoSctpExtCapabilityV12R024MB1, ciscoSctpExtCapabilityV12R0204MB3=ciscoSctpExtCapabilityV12R0204MB3)
|
from amaranth.build import *
import warnings
warnings.warn("instead of nmigen.build, use amaranth.build",
DeprecationWarning, stacklevel=2)
|
from django.utils.deprecation import MiddlewareMixin
from django.shortcuts import render, redirect
from django.urls import reverse
class LoginCheckMiddleWare(MiddlewareMixin):
def process_view(self, request, view_func, view_args, view_kwargs):
modulename = view_func.__module__
# print(modulename)
user = request.user
#Check whether the user is logged in or not
if user.is_authenticated:
if user.user_type == "1":
if modulename == "student_information_app.HodViews":
pass
elif modulename == "student_information_app.views" or modulename == "django.views.static":
pass
else:
return redirect("admin_home")
elif user.user_type == "2":
if modulename == "student_information_app.StaffViews":
pass
elif modulename == "student_information_app.views" or modulename == "django.views.static":
pass
else:
return redirect("staff_home")
elif user.user_type == "3":
if modulename == "student_information_app.StudentViews":
pass
elif modulename == "student_information_app.views" or modulename == "django.views.static":
pass
else:
return redirect("student_home")
else:
return redirect("login")
else:
if request.path == reverse("login") or request.path == reverse("doLogin") or modulename == 'student_information_app.views' or modulename == 'django.contrib.auth.views':
pass
else:
return redirect("login")
|
import shlex
import subprocess
p = subprocess.Popen(shlex.split('sudo echo 1'), stdout=subprocess.PIPE)
p = subprocess.Popen(shlex.split('sudo echo 1'), stdout=subprocess.PIPE)
|
def readConfig(configName="config.txt"):
"""
Reads the information from configuration file (config.txt)
Returns the following information in a list:
1) The name of the Excel file containing the designated Excel sheet
2) The name of the Excel sheet to read the data from
3) The name of the Word document to save the analysis to
4) A dictionary with the details of each question
Each entry is arranged as <question no.>: <question type>
5) A list of strings to leave out from the summary
6) The number of lines in the summary
>>> readConfig("Hello.txt")
Error: Hello.txt not found
'ERROR'
>>> readConfig("./config_testing/config-wrong(1).txt")
Error on line 1: Incorrect file type. Your filename should end in '.xlsx'
'ERROR'
>>> readConfig("./config_testing/config-wrong(5).txt")
Error on line 5: Please enter the number of lines you want in your summary
'ERROR'
>>> readConfig("./config_testing/config-wrong(7).txt")
Error on line 7: Incorrect file type. Your filename should end in '.docx'
'ERROR'
>>> readConfig("./config_testing/config-wrong(19++).txt")
Error on line 20: Please enter either 'demographic', 'numerical', 'categorical' or 'free-response'
'ERROR'
>>> readConfig("./config_testing/config-right.txt")
('responses_testing.xlsx', 'Form responses 1', 'response_analysis.docx', {'1': 'demographic', '2': 'numeric', '3': 'categorical', '4': 'free-response'}, ['nil', 'na', 'none', '-', '–', 'everything', 'nothing', 'your mum'], 5)
"""
try:
config = open(configName, "r", encoding="utf-8") #opens the config file. The utf-8 encoding is to ensure that the program will not crash when met with the dashes I used in the file
except:
print("Error: {} not found".format(configName))
return "ERROR" #to tell the main program about the error, so as to not run the rest of the program
try:
filename = str(config.readline().replace("\n", "").split(": ")[1]) #obtains the name of the Excel file
#the line is split to get only the second part such that the instruction won't be included
except:
print("Error on line 1: Please enter the name of a file")
return "ERROR"
else:
if ".xlsx" not in filename:
print("Error on line 1: Incorrect file type. Your filename should end in '.xlsx'")
return "ERROR"
try:
sheetname = str(config.readline().replace("\n", "").split(": ")[1])#obtains the name of the Excel sheet
except:
print("Error on line 2: Please enter a string")
return "ERROR"
config.readline() #on the config file, this is an empty line
try:
leaveOut = str(config.readline().replace("\n", "").split(": ")[1]).split(", ") #obtains the list of words to leave out
except:
print("Error on line 4: Please enter a list of strings to exclude from the summary")
return "ERROR"
try:
summLen = int(config.readline().replace("\n", "").split(": ")[1].strip()) #obtains the number of lines to have in the summary
except:
print("Error on line 5: Please enter the number of lines you want in your summary")
return "ERROR"
config.readline() #on the config file, this is an empty line
try:
docsname = str(config.readline().replace("\n", "").split(": ")[1]) #obtains the name of the Word document
except:
print("Error on line 7: Please enter the name of a file")
return "ERROR"
else:
if ".docx" not in docsname:
print("Error on line 7: Incorrect file type. Your filename should end in '.docx'")
return "ERROR"
for i in range(11): #on the config file, these 11 lines include instructions about the following section, along with a few empty lines
config.readline()
qnTypes = config.readlines() #reads the remaining of the document (the question types, arranged in the form of <question no.>: <question type>
questions = {} #initialises the output dictionary that will save the information about the questions
lineCount = 19 #keeps track of the line so as to produce the correct error message
for line in qnTypes: #iterates through the remaining lines (information about the question types)
qn = line.lower().replace("\n", "").split(": ") #splits up the question number and question type
if qn[1] not in ("demographic", "numeric", "categorical", "free-response"):
print("Error on line {}: Please enter either 'demographic', 'numerical', 'categorical' or 'free-response'".format(lineCount))
return "ERROR"
questions[qn[0]] = qn[1] #sets the dictionary such that key = question number and value = question type
lineCount += 1 #updates the counter
config.close() #closes config file
return filename, sheetname, docsname, questions, leaveOut, summLen #returns all the information collected from the config file
|
import wrapt
def keyword_optional(keyword, before=False, after=False, keep_keyword=False,
when_missing=False):
"""Execute a function before and after the decorated function if the keyword
is in the kwargs
Examples:
def do_thing():
# ... does something ...
@keyword_optional('_do_thing', before=do_thing)
def func(data):
return data
func(data, _do_thing=True)
"""
@wrapt.decorator
def _execute(wrapped, instance, args, kwargs):
do_it = (kwargs.get(keyword, when_missing)
if keep_keyword
else kwargs.pop(keyword, when_missing))
if before and do_it:
before()
result = wrapped(*args, **kwargs)
if after and do_it:
after()
return result
return _execute
|
import json
from loguru import logger
from spade.behaviour import State, FSMBehaviour
from simfleet.customer import CustomerStrategyBehaviour
from simfleet.fleetmanager import FleetManagerStrategyBehaviour
from simfleet.helpers import PathRequestException, distance_in_meters
from simfleet.protocol import REQUEST_PERFORMATIVE, ACCEPT_PERFORMATIVE, REFUSE_PERFORMATIVE, REQUEST_PROTOCOL, \
INFORM_PERFORMATIVE, CANCEL_PERFORMATIVE, PROPOSE_PERFORMATIVE, QUERY_PROTOCOL
from simfleet.transport import TransportStrategyBehaviour
from simfleet.utils import TRANSPORT_WAITING, TRANSPORT_WAITING_FOR_APPROVAL, TRANSPORT_MOVING_TO_CUSTOMER, \
TRANSPORT_NEEDS_CHARGING, TRANSPORT_MOVING_TO_STATION, TRANSPORT_IN_STATION_PLACE, TRANSPORT_CHARGING, \
TRANSPORT_CHARGED, CUSTOMER_WAITING, CUSTOMER_ASSIGNED
################################################################
# #
# FleetManager Strategy #
# #
################################################################
class DelegateRequestBehaviour(FleetManagerStrategyBehaviour):
"""
The default strategy for the FleetManager agent. By default it delegates all requests to all transports.
"""
async def run(self):
if not self.agent.registration:
await self.send_registration()
msg = await self.receive(timeout=5)
logger.debug("Manager received message: {}".format(msg))
if msg:
for transport in self.get_transport_agents().values():
msg.to = str(transport["jid"])
logger.debug("Manager sent request to transport {}".format(transport["name"]))
await self.send(msg)
################################################################
# #
# Transport Strategy #
# #
################################################################
class TransportWaitingState(TransportStrategyBehaviour, State):
async def on_start(self):
await super().on_start()
self.agent.status = TRANSPORT_WAITING
logger.debug("{} in Transport Waiting State".format(self.agent.jid))
async def run(self):
msg = await self.receive(timeout=60)
if not msg:
self.set_next_state(TRANSPORT_WAITING)
return
logger.debug("Transport {} received: {}".format(self.agent.jid, msg.body))
content = json.loads(msg.body)
performative = msg.get_metadata("performative")
if performative == REQUEST_PERFORMATIVE:
if not self.has_enough_autonomy(content["origin"], content["dest"]):
await self.cancel_proposal(content["customer_id"])
self.set_next_state(TRANSPORT_NEEDS_CHARGING)
return
else:
await self.send_proposal(content["customer_id"], {})
self.set_next_state(TRANSPORT_WAITING_FOR_APPROVAL)
return
else:
self.set_next_state(TRANSPORT_WAITING)
return
class TransportNeedsChargingState(TransportStrategyBehaviour, State):
async def on_start(self):
await super().on_start()
self.agent.status = TRANSPORT_NEEDS_CHARGING
logger.debug("{} in Transport Needs Charging State".format(self.agent.jid))
async def run(self):
if self.agent.stations is None or len(self.agent.stations) < 1 and not self.get(name="stations_requested"):
logger.info("Transport {} looking for a station.".format(self.agent.name))
self.set(name="stations_requested", value=True)
await self.send_get_stations()
msg = await self.receive(timeout=600)
if not msg:
self.set_next_state(TRANSPORT_NEEDS_CHARGING)
return
logger.debug("Transport received message: {}".format(msg))
try:
content = json.loads(msg.body)
except TypeError:
content = {}
performative = msg.get_metadata("performative")
protocol = msg.get_metadata("protocol")
if protocol == QUERY_PROTOCOL:
if performative == INFORM_PERFORMATIVE:
self.agent.stations = content
logger.info("Transport {} got list of current stations: {}".format(self.agent.name, len(
list(self.agent.stations.keys()))))
elif performative == CANCEL_PERFORMATIVE:
logger.info(
"Transport {} got a cancellation of request for stations information.".format(self.agent.name))
self.set(name="stations_requested", value=False)
self.set_next_state(TRANSPORT_NEEDS_CHARGING)
return
else:
self.set_next_state(TRANSPORT_NEEDS_CHARGING)
return
station_positions = []
for key in self.agent.stations.keys():
dic = self.agent.stations.get(key)
station_positions.append((dic['jid'], dic['position']))
closest_station = min(station_positions,
key=lambda x: distance_in_meters(x[1], self.agent.get_position()))
logger.debug("Closest station {}".format(closest_station))
station = closest_station[0]
self.agent.current_station_dest = (station, self.agent.stations[station]["position"])
logger.info("Transport {} selected station {}.".format(self.agent.name, station))
try:
station, position = self.agent.current_station_dest
await self.go_to_the_station(station, position)
self.set_next_state(TRANSPORT_MOVING_TO_STATION)
return
except PathRequestException:
logger.error("Transport {} could not get a path to station {}. Cancelling..."
.format(self.agent.name, station))
await self.cancel_proposal(station)
self.set_next_state(TRANSPORT_WAITING)
return
except Exception as e:
logger.error("Unexpected error in transport {}: {}".format(self.agent.name, e))
self.set_next_state(TRANSPORT_WAITING)
return
class TransportMovingToStationState(TransportStrategyBehaviour, State):
async def on_start(self):
await super().on_start()
self.agent.status = TRANSPORT_MOVING_TO_STATION
logger.debug("{} in Transport Moving to Station".format(self.agent.jid))
async def run(self):
if self.agent.get("in_station_place"):
logger.warning("Transport {} already in station place".format(self.agent.jid))
await self.agent.request_access_station()
return self.set_next_state(TRANSPORT_IN_STATION_PLACE)
self.agent.transport_in_station_place_event.clear() # new
self.agent.watch_value("in_station_place", self.agent.transport_in_station_place_callback)
await self.agent.transport_in_station_place_event.wait()
await self.agent.request_access_station() # new
return self.set_next_state(TRANSPORT_IN_STATION_PLACE)
class TransportInStationState(TransportStrategyBehaviour, State):
# car arrives to the station and waits in queue until receiving confirmation
async def on_start(self):
await super().on_start()
logger.debug("{} in Transport In Station Place State".format(self.agent.jid))
self.agent.status = TRANSPORT_IN_STATION_PLACE
async def run(self):
msg = await self.receive(timeout=60)
if not msg:
self.set_next_state(TRANSPORT_IN_STATION_PLACE)
return
content = json.loads(msg.body)
performative = msg.get_metadata("performative")
if performative == ACCEPT_PERFORMATIVE:
if content.get('station_id') is not None:
logger.debug("Transport {} received a message with ACCEPT_PERFORMATIVE from {}".format(self.agent.name,
content[
"station_id"]))
await self.charge_allowed()
self.set_next_state(TRANSPORT_CHARGING)
return
else:
# if the message I receive is not an ACCEPT, I keep waiting in the queue
self.set_next_state(TRANSPORT_IN_STATION_PLACE)
return
class TransportChargingState(TransportStrategyBehaviour, State):
# car charges in a station
async def on_start(self):
await super().on_start()
logger.debug("{} in Transport Charging State".format(self.agent.jid))
async def run(self):
# await "transport_charged" message
msg = await self.receive(timeout=60)
if not msg:
self.set_next_state(TRANSPORT_CHARGING)
return
content = json.loads(msg.body)
protocol = msg.get_metadata("protocol")
performative = msg.get_metadata("performative")
if protocol == REQUEST_PROTOCOL and performative == INFORM_PERFORMATIVE:
if content["status"] == TRANSPORT_CHARGED:
self.agent.transport_charged()
await self.agent.drop_station()
# canviar per un event?
self.set_next_state(TRANSPORT_WAITING)
return
else:
self.set_next_state(TRANSPORT_CHARGING)
return
class TransportWaitingForApprovalState(TransportStrategyBehaviour, State):
async def on_start(self):
await super().on_start()
self.agent.status = TRANSPORT_WAITING_FOR_APPROVAL
logger.debug("{} in Transport Waiting For Approval State".format(self.agent.jid))
async def run(self):
msg = await self.receive(timeout=60)
if not msg:
self.set_next_state(TRANSPORT_WAITING_FOR_APPROVAL)
return
content = json.loads(msg.body)
performative = msg.get_metadata("performative")
if performative == ACCEPT_PERFORMATIVE:
try:
logger.debug("Transport {} got accept from {}".format(self.agent.name,
content["customer_id"]))
# new version
self.agent.status = TRANSPORT_MOVING_TO_CUSTOMER
if not self.check_and_decrease_autonomy(content["origin"], content["dest"]):
await self.cancel_proposal(content["customer_id"])
self.set_next_state(TRANSPORT_NEEDS_CHARGING)
return
else:
await self.pick_up_customer(content["customer_id"], content["origin"], content["dest"])
self.set_next_state(TRANSPORT_MOVING_TO_CUSTOMER)
return
except PathRequestException:
logger.error("Transport {} could not get a path to customer {}. Cancelling..."
.format(self.agent.name, content["customer_id"]))
await self.cancel_proposal(content["customer_id"])
self.set_next_state(TRANSPORT_WAITING)
return
except Exception as e:
logger.error("Unexpected error in transport {}: {}".format(self.agent.name, e))
await self.cancel_proposal(content["customer_id"])
self.set_next_state(TRANSPORT_WAITING)
return
elif performative == REFUSE_PERFORMATIVE:
logger.debug("Transport {} got refusal from customer/station".format(self.agent.name))
self.set_next_state(TRANSPORT_WAITING)
return
else:
self.set_next_state(TRANSPORT_WAITING_FOR_APPROVAL)
return
class TransportMovingToCustomerState(TransportStrategyBehaviour, State):
async def on_start(self):
await super().on_start()
self.agent.status = TRANSPORT_MOVING_TO_CUSTOMER
logger.debug("{} in Transport Moving To Customer State".format(self.agent.jid))
async def run(self):
# Reset internal flag to False. coroutines calling
# wait() will block until set() is called
self.agent.customer_in_transport_event.clear()
# Registers an observer callback to be run when the "customer_in_transport" is changed
self.agent.watch_value("customer_in_transport", self.agent.customer_in_transport_callback)
# block behaviour until another coroutine calls set()
await self.agent.customer_in_transport_event.wait()
return self.set_next_state(TRANSPORT_WAITING)
class FSMTransportStrategyBehaviour(FSMBehaviour):
def setup(self):
# Create states
self.add_state(TRANSPORT_WAITING, TransportWaitingState(), initial=True)
self.add_state(TRANSPORT_NEEDS_CHARGING, TransportNeedsChargingState())
self.add_state(TRANSPORT_WAITING_FOR_APPROVAL, TransportWaitingForApprovalState())
self.add_state(TRANSPORT_MOVING_TO_CUSTOMER, TransportMovingToCustomerState())
self.add_state(TRANSPORT_MOVING_TO_STATION, TransportMovingToStationState())
self.add_state(TRANSPORT_IN_STATION_PLACE, TransportInStationState())
self.add_state(TRANSPORT_CHARGING, TransportChargingState())
# Create transitions
self.add_transition(TRANSPORT_WAITING, TRANSPORT_WAITING) # waiting for messages
self.add_transition(TRANSPORT_WAITING, TRANSPORT_WAITING_FOR_APPROVAL) # accepted by customer
self.add_transition(TRANSPORT_WAITING, TRANSPORT_NEEDS_CHARGING) # not enough charge
self.add_transition(TRANSPORT_WAITING_FOR_APPROVAL,
TRANSPORT_WAITING_FOR_APPROVAL) # waiting for approval message
self.add_transition(TRANSPORT_WAITING_FOR_APPROVAL, TRANSPORT_WAITING) # transport refused
self.add_transition(TRANSPORT_WAITING_FOR_APPROVAL, TRANSPORT_MOVING_TO_CUSTOMER) # going to pick up customer
self.add_transition(TRANSPORT_NEEDS_CHARGING, TRANSPORT_NEEDS_CHARGING) # waiting for station list
self.add_transition(TRANSPORT_NEEDS_CHARGING, TRANSPORT_MOVING_TO_STATION) # going to station
self.add_transition(TRANSPORT_NEEDS_CHARGING,
TRANSPORT_WAITING) # exception in go_to_the_station(station, position)
self.add_transition(TRANSPORT_MOVING_TO_STATION, TRANSPORT_IN_STATION_PLACE) # arrived to station
self.add_transition(TRANSPORT_IN_STATION_PLACE, TRANSPORT_IN_STATION_PLACE) # waiting in station queue
self.add_transition(TRANSPORT_IN_STATION_PLACE, TRANSPORT_CHARGING) # begin charging
self.add_transition(TRANSPORT_CHARGING, TRANSPORT_CHARGING) # waiting to finish charging
self.add_transition(TRANSPORT_CHARGING, TRANSPORT_WAITING) # restart strategy
self.add_transition(TRANSPORT_MOVING_TO_CUSTOMER, TRANSPORT_MOVING_TO_CUSTOMER)
self.add_transition(TRANSPORT_MOVING_TO_CUSTOMER,
TRANSPORT_WAITING) # picked up customer or arrived to destination ??
################################################################
# #
# Customer Strategy #
# #
################################################################
class AcceptFirstRequestBehaviour(CustomerStrategyBehaviour):
"""
The default strategy for the Customer agent. By default it accepts the first proposal it receives.
"""
async def run(self):
if self.agent.fleetmanagers is None:
await self.send_get_managers(self.agent.fleet_type)
msg = await self.receive(timeout=300)
if msg:
protocol = msg.get_metadata("protocol")
if protocol == QUERY_PROTOCOL:
performative = msg.get_metadata("performative")
if performative == INFORM_PERFORMATIVE:
self.agent.fleetmanagers = json.loads(msg.body)
logger.info("{} got fleet managers {}".format(self.agent.name, self.agent.fleetmanagers))
elif performative == CANCEL_PERFORMATIVE:
logger.info("{} got cancellation of request for {} information".format(self.agent.name,
self.agent.type_service))
return
if self.agent.status == CUSTOMER_WAITING:
await self.send_request(content={})
msg = await self.receive(timeout=5)
if msg:
performative = msg.get_metadata("performative")
transport_id = msg.sender
if performative == PROPOSE_PERFORMATIVE:
if self.agent.status == CUSTOMER_WAITING:
logger.debug(
"Customer {} received proposal from transport {}".format(self.agent.name, transport_id))
await self.accept_transport(transport_id)
self.agent.status = CUSTOMER_ASSIGNED
else:
await self.refuse_transport(transport_id)
elif performative == CANCEL_PERFORMATIVE:
if self.agent.transport_assigned == str(transport_id):
logger.warning(
"Customer {} received a CANCEL from Transport {}.".format(self.agent.name, transport_id))
self.agent.status = CUSTOMER_WAITING
|
"""Instruction running module."""
# Official Libraries
# My Modules
from sms.objs.basecode import BaseCode
from sms.syss import messages as msg
from sms.utils.log import logger
__all__ = (
'apply_instructions',
)
# Define Constants
PROC = 'INST RUNNER'
# Main
def apply_instructions(base_data: list) -> list:
assert isinstance(base_data, list)
logger.debug(msg.PROC_START.format(proc=PROC))
tmp = []
for record in base_data:
assert isinstance(record, BaseCode)
tmp.append(record)
logger.debug(msg.MSG_UNIMPLEMENT_PROC.format(proc=PROC))
logger.debug(msg.PROC_SUCCESS.format(proc=PROC))
return tmp
|
import sys
import itertools
from packet import *
from program import *
class Parser():
def __init__(self, filename):
self.warp_size = 4
self.dimensions = []
self.tg_thread_counter = 0
self.threadgroup_start_index = {}
self.threadgroup_end_index = {}
self.threadgroup_id = 0
self.filename = filename
self.index()
self.warp_counter = 0
self.instr_counter = 0
def index(self):
line_count = 0
thread_id = 0
threadgroup_id = 0
line = 1
with open(self.filename, "r") as fp:
while line:
line = fp.readline()
if line != "":
if line[0:2] == "JD":
self.dimensions = [int(x,16) for x in line.split(' ')[1:]]
self.tg_size = self.dimensions[3] * self.dimensions[4] * self.dimensions[5]
self.num_tgs = self.dimensions[0] * self.dimensions[1] * self.dimensions[2]
self.size = self.num_tgs * self.tg_size
self.warp_size = min(self.warp_size, self.tg_size)
elif line[0] == "T":
if (thread_id) % self.tg_size == 0 and thread_id != 0 and (((thread_id + 1) != self.size) or self.tg_size == 1):
self.threadgroup_end_index[threadgroup_id - 1] = line_count
self.threadgroup_start_index[threadgroup_id] = line_count
threadgroup_id += 1
elif thread_id == 0:
self.threadgroup_start_index[threadgroup_id] = line_count
threadgroup_id += 1
thread_id += 1
line_count += 1
self.threadgroup_end_index[threadgroup_id - 1] = line_count
def parse_all(self):
return self.parse(self.num_tgs)
def parse(self, n):
n = min(n, self.num_tgs - self.threadgroup_id)
trace = []
for i in list(self.threadgroup_start_index.keys())[self.threadgroup_id:self.threadgroup_id + n]:
with open(self.filename, "r") as fp:
trace.append([])
trace[-1].append(Warp(self.warp_counter, self.warp_size))
self.warp_counter += 1
warp_thread_counter = 0
tg_thread_counter = 0
for line in itertools.islice(fp, self.threadgroup_start_index[i], self.threadgroup_end_index[i]):
if line[0] == "T":
if warp_thread_counter == self.warp_size:
trace[-1].append(Warp(self.warp_counter, self.warp_size))
self.warp_counter += 1
warp_thread_counter = 1
tg_thread_counter +=1
trace[-1][-1].add_thread()
else:
trace[-1][-1].add_thread()
tg_thread_counter += 1
warp_thread_counter += 1
elif line[0] == "P":
entry = [int(x,16) for x in line.split(' ')[1:]]
pc = entry[0]
num_instructions = entry[1]
self.instr_counter += num_instructions * 2
num_deps = entry[2]
deps = entry[3:3+num_deps]
trace[-1][-1].threads[warp_thread_counter - 1].trace.append(InstructionPacket(pc, num_instructions, deps))
elif line[0] == "L":
entry = [int(x,16) for x in line.split(' ')[1:]]
bytes = entry[0]
address = entry[1]
pc = entry[2]
num_instructions = entry[3]
self.instr_counter += num_instructions * 2
num_deps = entry[4]
deps = entry[5:5+num_deps]
trace[-1][-1].threads[warp_thread_counter - 1].trace.append(LoadPacket(pc, num_instructions, deps, bytes, address))
elif line[0] == "S":
entry = [int(x,16) for x in line.split(' ')[1:]]
bytes = entry[0]
address = entry[1]
pc = entry[2]
num_instructions = entry[3]
self.instr_counter += num_instructions * 2
num_deps = entry[4]
deps = entry[5:5+num_deps]
trace[-1][-1].threads[warp_thread_counter - 1].trace.append(StorePacket(pc, num_instructions, deps, bytes, address))
elif line[0] == "B":
entry = [int(x,16) for x in line.split(' ')[1:]]
pc = entry[0]
num_instructions = entry[1]
self.instr_counter += num_instructions * 2
num_deps = entry[2]
deps = entry[3:3+num_deps]
trace[-1][-1].threads[warp_thread_counter - 1].trace.append(BarrierPacket(pc, num_instructions, deps))
self.threadgroup_id += n
return trace
def test():
p = Parser(sys.argv[1])
trace = p.parse(int(sys.argv[2]))
print("TEST")
for tg in trace:
print("TG")
for w in tg:
print(w.threads)
for t in w.threads:
print("Thread:")
for e in t.trace:
e.print()
if __name__ == "__main__":
test()
|
#!/usr/bin/env python
from itertools import imap
import logging
import os.path
import sqlite3
import sys
from tilecloud import Tile, TileCoord, consume
from tilecloud.filter.error import DropErrors
from tilecloud.filter.logger import Logger
from tilecloud.store.mbtiles import MBTilesTileStore
from tilecloud.store.renderingtheworld import RenderingTheWorldTileStore
from tilecloud.store.wmts import WMTSTileStore
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(os.path.basename(sys.argv[0]))
def subdivide(tile):
# Return true if tile should be subdivided.
# In our case, the WMTS server returns a 400 Bad Request error if there is no data in this tile.
# We also don't want to go any further than zoom level 15.
return tile.error is None and tile.tilecoord.z < 15
def main():
# Create our RenderingTheWorld tile store that will manage the queue and subdivision.
# We pass it the function that decides whether a tile should be subdivided, and an initial tile.
rendering_the_world_tilestore = RenderingTheWorldTileStore(subdivide, seeds=(Tile(TileCoord(0, 0, 0)),))
# Start the tilestream by getting a list of all tiles to be generated.
tilestream = rendering_the_world_tilestore.list()
tilestream = imap(Logger(logger, logging.INFO, "get %(tilecoord)s"), tilestream)
# Create the tile store that will generate our tiles, in this case it's a demo WMTS server at OpenGeo.
# Getting tiles from this store will either return the tile as a PNG file, or set an error on the tile
# if there are no features in this tile.
generate_tilestore = WMTSTileStore(
url="http://v2.suite.opengeo.org/geoserver/gwc/service/wmts/",
layer="medford:buildings",
style="_null",
format="image/png",
tile_matrix_set="EPSG:900913",
tile_matrix=lambda z: "EPSG:900913:{0:d}".format(z),
)
tilestream = generate_tilestore.get(tilestream)
tilestream = imap(Logger(logger, logging.INFO, "got %(tilecoord)s, error=%(error)s"), tilestream)
# Put the tile back into the RenderingTheWorld tile store. This check whether the tile should be
# subdivided, and, if so, adds the tile's children to the list of tiles to be generated.
tilestream = rendering_the_world_tilestore.put(tilestream)
# Get rid of tiles that returned an error (i.e. where there was no data).
tilestream = imap(DropErrors(), tilestream)
# Store the generated tiles in the output tile store, in our case a local MBTiles file.
output_tilestore = MBTilesTileStore(sqlite3.connect("medford_buildings.mbtiles"))
tilestream = output_tilestore.put(tilestream)
tilestream = imap(Logger(logger, logging.INFO, "saved %(tilecoord)s"), tilestream)
# Go!
consume(tilestream, None)
if __name__ == "__main__":
sys.exit(main())
|
from django.test import TestCase
from account.models import User
from main.constants import LANGUAGES
from main.models import Comment, Snippet, Tag, Ticket
class SnippetTestCase(TestCase):
@classmethod
def setUpTestData(cls) -> None:
cls.sample = {
'title': 'simple title',
'description': 'some description',
'body': 'code snippet',
'lang': LANGUAGES[0][1], # Arduino
}
def setUp(self) -> None:
self.snippet = Snippet.objects.create(**self.sample)
def test_if_obj_returns_same_fields(self):
fields = {
'title': self.snippet.title,
'description': self.snippet.description,
'body': self.snippet.body,
'lang': self.snippet.lang,
}
self.assertEqual(fields, self.sample)
def test_sid_length(self):
# nbyte is set to 5 by default
# which provides an id with 10 chars
self.assertEqual(len(self.snippet.id), 10)
class TagTestCase(TestCase):
@classmethod
def setUpTestData(cls) -> None:
cls.sample = {
'name': 'AngularLA',
'description': 'a framework built in LA',
}
def setUp(self) -> None:
self.tag = Tag.objects.create(**self.sample)
def test_if_obj_returns_same_fields(self):
fields = {
'name': self.tag.name,
'description': self.tag.description,
}
self.assertEqual(fields, self.sample)
class TicketTestCase(TestCase):
@classmethod
def setUpTestData(cls) -> None:
cls.ticket_sample = {
'title': 'Ticket Title',
'description': 'Ticket Description',
}
cls.user_sample = {
'username': 'johndoe',
'display_name': 'John Doe',
'email': 'john@doe.com',
'password': 'testPass123',
}
cls.tag_sample = {
'name': 'Tag Name',
'description': 'Tag Description',
}
def setUp(self) -> None:
_user = User.objects.create(**self.user_sample)
'''
Since the Ticket.tags is a ManyToMany field,
It needs to get saved immediately right after
It created.
'''
_tag = Tag(**self.tag_sample)
_tag.save()
self.ticket = Ticket.objects.create(**self.ticket_sample)
self.ticket.save()
'''
Then we add those pre-built Tag objects to
the Ticket object tags field. -> Ticket.tags
'''
self.ticket.tags.set([_tag])
self.ticket.created_by = _user
def test_obj_returns_same_fields(self):
fields = {
'title': self.ticket.title,
'description': self.ticket.description,
}
self.assertEqual(fields, self.ticket_sample)
def test_check_ticket_creator(self):
self.assertEqual(self.ticket.created_by.username,
self.user_sample['username'])
class CommentTestCase(TestCase):
@classmethod
def setUpTestData(cls) -> None:
cls.ticket_sample = {
'title': 'Ticket Title',
'description': 'Ticket Description',
}
cls.user_sample = {
'username': 'johndoe',
'display_name': 'John Doe',
'email': 'john@doe.com',
'password': 'testPass123',
}
cls.comment_sample = {
'body': 'Body of the comment',
}
def setUp(self) -> None:
self.ticket = Ticket.objects.create(**self.ticket_sample)
self.user = User.objects.create(**self.user_sample)
self.comment_sample['ticket'] = self.ticket
self.comment_sample['created_by'] = self.user
self.comment = Comment.objects.create(
**self.comment_sample
)
def test_if_obj_returns_same_fields(self):
fields = {
'body': self.comment.body,
'ticket': self.comment.ticket,
'created_by': self.comment.created_by,
}
self.assertEqual(fields, self.comment_sample)
def test_comment_user_authority(self):
self.assertEqual(self.comment.created_by, self.user)
|
from pycparser import c_parser, c_ast, c_generator
from copy import deepcopy
def rename_function_calls():
pass
def remove_input_port(func_def, ele_name, inports):
func_def.decl.name = ele_name
func_def.decl.type.type.declname = ele_name
stmts = func_def.body.block_items
new_stmts = []
port2args = {}
for stmt in stmts:
if type(stmt) == c_ast.Decl and type(stmt.init) == c_ast.FuncCall:
funccall = stmt.init
funcname = funccall.name.name
if funcname in inports:
if funccall.args:
raise Exception("Cannot pass an argument when retrieving data from an input port.")
myDecl = deepcopy(stmt)
myDecl.init = None
port2args[funcname] = myDecl
continue
new_stmts.append(stmt)
func_def.body.block_items = new_stmts
params = [port2args[x] for x in inports]
print func_def.decl.type.args
func_def.decl.type.args = c_ast.ParamList(params)
func_def.show()
def test():
src = r'''
run(int xxx) {
int i = in();
out(i+1);
}
'''
parser = c_parser.CParser()
ast = parser.parse(src)
print ast.ext
ast.show()
remove_input_port(ast.ext[0], "element", ["in"])
generator = c_generator.CGenerator()
print generator.visit(ast)
test()
|
### DAY 2 ###
### TASK 1 ###
# I downloaded and saved the input file as a .txt because I was getting a 400 Bad Request Error when trying to open it with urllib.request.urlopen
f = open("input.txt", "r")
raw_input = f.read()
# I want to convert my input into 3 lists corresponding to up, down and forward.
# I can do it using itertools and then numpy or pandas and work with arrays and/or dataframes,
# however my goal is to rely on the Python Standard Library as much as possible and avoid importing any libraries.
list_str = raw_input.split("\n")
# convert the input to a list of lists. Each element of the lists comprises a string (direction) and an integer (value)
directions_list = []
for i in range(len(list_str)):
sub_list = list_str[i].split(" ")
sub_list[1] = int(sub_list[1])
directions_list.append(sub_list)
horizontal = 0
depth = 0
# add isntructions
for sub_list in directions_list:
if sub_list[0] == "forward":
horizontal += sub_list[1]
elif sub_list[0] == "down":
depth += sub_list[1]
elif sub_list[0] == "up":
depth -= sub_list[1]
print(
f"If you multiply your final horizontal position by your final depth you get {horizontal * depth}."
)
|
import torch.nn as nn
class Discriminator(nn.Module):
def __init__(self, input_dim, wasserstein=False):
super(Discriminator, self).__init__()
self.model = nn.Sequential(
nn.Linear(input_dim, 2 * input_dim // 3),
nn.LeakyReLU(0.2),
nn.Linear(2 * input_dim // 3, input_dim // 3),
nn.LeakyReLU(0.2),
nn.Linear(input_dim // 3, 1),
)
if not wasserstein:
self.model.add_module("activation", nn.Sigmoid())
def forward(self, x):
return self.model(x)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.