blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fb125b831dfdd12ef020ba37e389a57e9312650c | 08379db5712432b34767d747b9f078ab30822d74 | /tdd/counter.py | 7b6c18b587d99d5bf211cdcbdafb50576a2a4e78 | [
"MIT"
] | permissive | scotttrumpy/Python-Fizzbuzz | 7fa739d3be5487e9e40f9a2285a96a2ed825b52b | c9bafd436a22533634bbf58458ce82942cb21268 | refs/heads/master | 2021-09-10T15:43:27.110072 | 2018-03-28T19:22:39 | 2018-03-28T19:22:39 | 125,870,390 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 314 | py | def fizzbuzz (count):
if count % 15 == 0:
return('Fizzbuzz')
elif count % 3 == 0:
return('Fizz')
elif count % 5 == 0:
return('Buzz')
else:
return count
def main():
for count in range(1,101):
print fizzbuzz(count)
if __name__=='__main__':
main()
| [
"trumpysm@dukes.jmu.edu"
] | trumpysm@dukes.jmu.edu |
64e7542df83df9bd0d6edf9f81dd3c5add9aef71 | 0800aac473cbb94f3ac263c202979498c326cf18 | /법인세_총설.py | a437c75324c85c0332211d27ad24fe8df470b893 | [] | no_license | DanielHennyKwon/TAX_LIM_JEONG | 8f12e072c044cd17646f196c17b51d1e0cae179e | a263b4e90f0ac78500382047bf7ae72380213ca8 | refs/heads/master | 2023-06-16T10:50:55.111407 | 2021-07-11T02:59:50 | 2021-07-11T02:59:50 | 384,847,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,432 | py | # -*- coding: utf-8 -*-
# 2018-12-24 권달현
import 결산의확정, 신고납부절차, 기한후신고, 수정신고, 경정청구, 법인의분류, 세금의종류, 실질과세, 소액주주, 대주주, 중소기업, 이월과세, 과세이연, 세무조정, 소득처분, 법인세비용, 세액계산_구조,세무조정_흐름도
_={
"결산의 확정":결산의확정.결산의확정,
"법인세의 신고납부절차":신고납부절차.법인세,
"기한후신고":기한후신고.법인세,
"수정신고":수정신고._,
"경정청구":경정청구._,
"법인세법상 법인의 분류":법인의분류.법인세,
"법인세의 종류":세금의종류.법인세,
"실질과세":실질과세.법인세,
"소액주주":소액주주.법인세,
"대주주":대주주.법인세,
"중소기업":중소기업._,
"이월과세":이월과세.법인세,
"과세이연":과세이연.법인세,
"세무조정 흐름도":세무조정_흐름도.법인세,
"세무조정":세무조정.법인세,
"소득처분":소득처분.법인세,
"법인의 각 사업연도소득과 과세표준 및 세액계산의 구조":세액계산_구조.법인세,
"법인세비용":법인세비용.법인세,
}
#___________________________________________________
제목='법인세 총설'
tax=_
import wx
class MyFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self,parent=None,title=제목)
self.SetSize(420,320*2)
self.mainPanel=wx.Panel(self)
self.expandButton=wx.Button(self.mainPanel,label='펼침')
self.tree=wx.TreeCtrl(self.mainPanel)
root=self.tree.AddRoot(제목)
for i in tax:
ii=self.tree.AppendItem(root,i)
for j in tax[i]:
jj=self.tree.AppendItem(ii,j)
for k in tax[i][j]:
kk=self.tree.AppendItem(jj,k)
for m in tax[i][j][k]:
mm=self.tree.AppendItem(kk,m)
for n in tax[i][j][k][m]:
nn=self.tree.AppendItem(mm,n)
for p in tax[i][j][k][m][n]:
pp=self.tree.AppendItem(nn,p)
for q in tax[i][j][k][m][n][p]:
qq=self.tree.AppendItem(pp,q)
for r in tax[i][j][k][m][n][p][q]:
rr=self.tree.AppendItem(qq,r)
self.staticText =wx.TextCtrl(self.mainPanel,style=wx.TE_MULTILINE)
self.vtBoxSizer=wx.BoxSizer(wx.VERTICAL)
self.vtBoxSizer.Add(self.expandButton,0,wx.EXPAND|wx.ALL,5)
self.vtBoxSizer.Add(self.tree ,5,wx.EXPAND|wx.ALL,5)
self.vtBoxSizer.Add(self.staticText ,0,wx.EXPAND|wx.ALL,5)
self.mainPanel.SetSizer(self.vtBoxSizer)
self.Bind(wx.EVT_BUTTON ,self.OnExpandButton,self.expandButton)
self.Bind(wx.EVT_TREE_SEL_CHANGED,self.OnNodeSelected,self.tree)
def OnExpandButton(self,e):
self.tree.ExpandAll()
def OnNodeSelected(self,e):
selected=self.tree.GetSelection()
self.staticText.SetLabel(self.tree.GetItemText(selected))
self.mainPanel.Layout()
if __name__=='__main__':
app=wx.App()
frame=MyFrame()
frame.Show()
app.MainLoop()
#___________________________________________________ | [
"cpahouse@naver.com"
] | cpahouse@naver.com |
b080960023d3de4b6813fe57e3f48af239f29069 | 836fcb1fb4db3d2b6d0d9b54c3f916bc599a0b62 | /ClassInterface2.py | 3842cd4a687fcb9ac5f6b695d29199ec4b6d536c | [] | no_license | opickers90/Python3-1 | cf8c544ee17107f535e4431fbe67eb4e218fff70 | 04b2338ddfb7c554cc123677a769b2f4dafbdc5b | refs/heads/master | 2020-04-19T11:01:35.793376 | 2019-02-02T11:11:04 | 2019-02-02T11:11:04 | 168,155,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | class InsurancePolicy:
def __init__(self, price_of_item):
self.price_of_insured_item = price_of_item
class VehicleInsurance(InsurancePolicy):
def get_rate(self):
return self.price_of_insured_item * .001
class HomeInsurance(InsurancePolicy):
def get_rate(self):
return self.price_of_insured_item * .00005
vehicle = VehicleInsurance(2000)
home = HomeInsurance(2000)
def InsuranceTotal(vehicle_or_home):
print(vehicle_or_home.get_rate())
for insurance in [vehicle, home]:
InsuranceTotal(insurance)
| [
"taufik@LI-320s"
] | taufik@LI-320s |
655a655620f983caafb5b12283dfc5b1c274d08d | de28880dd1c46d0ee2def7e46066d12185fc9a4b | /sketchRnn_clean_v3/tools/utils.py | 9458cfa2b2c2a28c3ca8f8fc65b2e8572ea10cad | [] | no_license | frederictamagnan/PredictDrumFillsInNativeInstrumentsSoundPack | c3712987352a152edf91e893e8af1b23fd17f495 | 2a19d43d5c153340f0a7a50e7314c4763a6089a4 | refs/heads/master | 2020-04-10T04:16:11.417914 | 2019-04-28T16:18:51 | 2019-04-28T16:18:51 | 160,793,133 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,282 | py | PATH = '/home/ftamagna/Documents/_AcademiaSinica/dataset/lpd_5/lpd_5_cleansed/'
PATH_TAGS = [
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Blues.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Country.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Electronic.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Folk.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Jazz.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Latin.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Metal.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_New-Age.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Pop.id', # 8
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Punk.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Rap.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Reggae.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_RnB.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Rock.id', # 13
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_World.id',
'/home/ftamagnan/dataset/id_lists/tagtraum/tagtraum_Unknown.id'
]
PATH_TAGS_ROCK = [
'/home/ftamagna/Documents/_AcademiaSinica/code/LabelDrumFills/id_lists/tagtraum/tagtraum_Rock.id',
]
import os
from random import randint
def tensor_to_numpy(array):
return array.cpu().data.numpy()
def random_file(filepath_dataset=PATH,path_tags=PATH_TAGS_ROCK):
all=[]
# ITERATE OVER THE TAG LISTS
for tag_i, tag in enumerate(path_tags):
print('>>' + tag[29:-3])
with open(tag, 'r') as f:
# ITERATE OVER THE FOLDER LISTS
for i, file in enumerate(f):
# (str(f))
# print('load files..{}/{}'.format(i + 1, number_files[tag_i]), end="\r")
file = file.rstrip()
middle = '/'.join(file[2:5]) + '/'
p = filepath_dataset + middle + file
for npz in os.listdir(p):
if 'label' not in npz and 'metrics' not in npz:
all.append((p+'/',npz))
pick=all[randint(0,len(all))]
return pick
| [
"frederic.tamagnan@gmail.com"
] | frederic.tamagnan@gmail.com |
4d5ad693ce20e9b97da95b0ef67213eb07bc3c1f | 03a10a552e3019e23efb38086b239659183b59c5 | /src/tiles.py | 9a62ef786790072c5d44c7e7c8a4ca34abcb3e98 | [
"MIT"
] | permissive | GreenXenith/zoria | 54a5c067a7d460a9126acf9c2085ad3ee5905fe1 | 30a16baab3643c820613a8c8669ee6235a2cd47c | refs/heads/master | 2023-02-22T14:19:34.674436 | 2021-01-29T22:03:44 | 2021-01-29T22:03:44 | 334,269,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | import pygame
global registered_tiles
registered_tiles = {}
# Content IDs are currently unused
# They would be used for map-saving
content_ids = []
content_id_map = {}
def register_tile(name, definition):
registered_tiles[name] = definition
content_id_map[name] = len(content_ids)
content_ids.append(name)
def get_content_id(name):
try:
return content_id_map[name]
except:
return None
def get_tile_from_content_id(id):
try:
return registered_tiles[content_ids[id]]
except:
return None
class Tile:
textures = ["none.png"]
solid = True
rotation = 0
def __init__(self, name, pos):
self.name = name
self.pos = pos
for key in registered_tiles[name]:
value = registered_tiles[name][key]
if not callable(value):
setattr(self, key, value)
def get(self, key):
try:
return getattr(self, key)
except:
return None
def set_rotation(self, rot):
self.rotation = rot
def get_rotation(self):
return self.rotation
def is_solid(self):
return self.get("solid") == True
def on_step(self, dtime, map, player):
if "on_step" in registered_tiles[self.name]:
registered_tiles[self.name]["on_step"](self, dtime, map, player)
| [
"24834740+GreenXenith@users.noreply.github.com"
] | 24834740+GreenXenith@users.noreply.github.com |
222c5178a7adba45b63bfa89f62fe5db08357f6f | 31a49331912b64586e4c811403d335d054ad0172 | /cardcalc/__init__.py | 44e34496838c665917e2a4dbfd607e95b2c0266c | [] | no_license | kuwoyuki/kadocalc | 1645827ec385f8d2c5744f5092c38328ad20e10d | cf2e4a8f6d5937c1210b854642eec0f3ac38343b | refs/heads/master | 2022-12-27T11:15:29.924433 | 2020-10-15T16:33:36 | 2020-10-15T16:33:36 | 298,285,986 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | from .core import app
def main():
app() | [
"kuwoyuki@gmail.com"
] | kuwoyuki@gmail.com |
3fabfb3eca9c98aa7cfd68daff67bb5e4667a1a7 | 60450fdbd7cf69a73e1a8aa2290691650829cf04 | /PlaneData/serializers.py | 3a5c0657b06e77895f422cb3e15eb8e6d85c1c8e | [] | no_license | les-patates/Hecate_Backend | 65a9fcfec69d110a5c66750086c9b4de810af45a | 091c3bf859bcb8e316f85175966ae981d24e70a7 | refs/heads/master | 2023-08-17T05:28:46.010833 | 2020-04-29T17:24:08 | 2020-04-29T17:24:08 | 259,773,172 | 0 | 0 | null | 2021-09-22T18:56:38 | 2020-04-28T23:14:21 | Python | UTF-8 | Python | false | false | 256 | py | from django.contrib.auth.models import User, Group
from rest_framework import serializers
from .models import Waypoint
class WaypointSerializer(serializers.ModelSerializer):
class Meta:
model = Waypoint
fields = ['lat','lon']
| [
"jomethun@gmx.ch"
] | jomethun@gmx.ch |
fb61ba1d526116dd254bbf94df08247b74f229fe | 5fadda7c33e1c4ba24edfaf0fbd68d19826941b9 | /problems/StroboNumber.py | 6947781f5e2d422069d66ef8976be90d9c95eacd | [] | no_license | jonu4u/DataStructuresInPython | ade71e8eb528bf382846512c812e11e9a05b9c51 | 8731e2ccfbda9323ea5c8629599806cd1c37c3bf | refs/heads/master | 2023-08-01T03:11:48.103722 | 2021-09-13T14:22:15 | 2021-09-13T14:22:15 | 304,824,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,144 | py | # 246. Strobogrammatic Number
#
# A strobogrammatic number is a number that looks the same when rotated 180 degrees (looked at upside down).
#
# Write a function to determine if a number is strobogrammatic. The number is represented as a string.
#
#
#
# Example 1:
#
# Input: num = "69"
# Output: true
# Example 2:
#
# Input: num = "88"
# Output: true
# Example 3:
#
# Input: num = "962"
# Output: false
# Example 4:
#
# Input: num = "1"
# Output: true
class Solution(object):
def isStrobogrammatic(self, num):
"""
:type num: str
:rtype: bool
"""
symmetry_map={"1":"1","6":"9","8":"8","9":"6","0":"0"}
new=""
for elem in num:
if elem in symmetry_map:
new=symmetry_map[elem]+new
else:
return False
if new==num:
return True
return False
# 247. Strobogrammatic Number II
#
# A strobogrammatic number is a number that looks the same when rotated 180 degrees (looked at upside down).
#
# Find all strobogrammatic numbers that are of length = n.
#
# Example:
#
# Input: n = 2
# Output: ["11","69","88","96"]
# In this solution we have to jump numbers when they are not in symmetry map
# This causes TLE in Leetcode when n=10
def findStrobogrammatic(self, n):
"""
:type n: int
:rtype: List[str]
"""
symmetry_map={"1":"1","6":"9","8":"8","9":"6","0":"0"}
out=[]
start=0
if n>1:
start=10**(n-1)
while start <10**n:
# When the first char is not in the map we jump 10^n-1 numbers
if str(start)[0] in symmetry_map:
start,is_strob=self.is_strob(str(start),symmetry_map,start)
if is_strob:
out.append(str(start-1))
else:
if str(start)[0]=="2":
start=start+(10**(n-1))*4
else:
start=start+10**(n-1)
return out
def is_strob(self,num,symmetry_map,loop_ctr):
new=""
for index,elem in enumerate(num):
if elem not in symmetry_map:
# When any char is not in the map we jump 10**(len(num)-index-1)) numbers
# If the elem is 2 we can straightaway jump 2,3,4,5
if elem=="2":
loop_ctr=loop_ctr+(10**(len(num)-index-1))*4
# When the number is odd the middle number can only contain 0 or 1
# to be strob so we skip 2-9 here
elif len(num)%2!=0 and elem==num[len(num)//2] and elem==2:
loop_ctr=loop_ctr+(10**(len(num)-index-1))*8
else:
loop_ctr=loop_ctr+(10**(len(num)-index-1))
return (loop_ctr,False)
else:
new=symmetry_map[elem]+new
if new==num:
return (loop_ctr+1,True)
return (loop_ctr+1,False)
# We have to write more effficient way of checking is_strob
# Start from middle if it contains 6 then next must be 9.Then fan out and it should have either
# of symmetry numbers in between or outside
# Check this out https://leetcode.com/problems/strobogrammatic-number-ii/discuss/934855/python3-faster-than-100
# def is_strob_eff(self,num,symmetry_map,loop_ctr):
# new=""
# for index,elem in enumerate(num):
# if elem not in symmetry_map:
# # When any char is not in the map we jump 10**(len(num)-index-1)) numbers
# # If the elem is 2 we can straightaway jump 2,3,4,5
# if elem=="2":
# loop_ctr=loop_ctr+(10**(len(num)-index-1))*4
# else:
# loop_ctr=loop_ctr+(10**(len(num)-index-1))
# return (loop_ctr,False)
# else:
# new=symmetry_map[elem]+new
# if new==num:
# return (loop_ctr+1,True)
# return (loop_ctr+1,False)
s=Solution()
# print(10**0)
from datetime import datetime
print(datetime.now())
print(s.findStrobogrammatic(9))
print(datetime.now())
| [
"j.deb@MacBook-Pro.local"
] | j.deb@MacBook-Pro.local |
096bc0ec339e35fa0708f8f760dd66381cdcf5eb | bd64c5926f5a6d17f823d48eb27070a9e4c93e49 | /main.py | f3bd374d376d3a9bb7954013e34465e803abb3d2 | [
"MIT"
] | permissive | ahamlinman/trello-reporter | 9768ef58ea6215f8e59ce38e4f40e751dee7e013 | 601e7f74592f89a6424bf20a38b63153f80091e1 | refs/heads/master | 2021-08-08T01:57:59.260557 | 2020-08-23T04:35:25 | 2020-08-23T04:38:36 | 214,364,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,073 | py | #!/usr/bin/env python3
from datetime import datetime, timedelta
from pprint import pprint
import argparse
import json
import os
from dateutil.parser import parse as parse_date
from mailer import send_email
from reporter import Reporter
from trello import TrelloClient
def older_than(date_str, delta_spec):
date = parse_date(date_str)
return datetime.now(tz=date.tzinfo) - date > timedelta(**delta_spec)
def build_report(config, trello):
reporter = Reporter()
for list_spec in config["lists"]:
trello_list = trello.list(list_spec["listId"])
old_cards = [
card
for card in trello_list["cards"]
if older_than(card["dateLastActivity"], list_spec["timeDelta"])
]
if not old_cards:
continue
reporter.add_section(trello_list["name"], [card["name"] for card in old_cards])
if not reporter.sections:
return None
return reporter.format(config["heading"])
def run_report(config, email=False):
trello = TrelloClient(os.getenv("TRELLO_KEY"), os.getenv("TRELLO_TOKEN"))
report_text = build_report(config, trello)
if report_text is None:
print("(nothing to report)")
return
if email:
result = send_email(config["emailAddress"], config["subject"], report_text)
pprint(result)
else:
print(report_text)
def lambda_handler(event, _context):
run_report(event, True)
def main():
parser = argparse.ArgumentParser(description="Report on old Trello cards.")
parser.add_argument(
"--config",
type=str,
default="config.json",
metavar="FILE",
help="path to the JSON config file " "(default: config.json)",
)
parser.add_argument(
"--email",
action="store_true",
help="send an email instead of printing the report",
)
args = parser.parse_args()
with open(args.config, "r") as config_file:
config = json.load(config_file)
run_report(config, args.email)
if __name__ == "__main__":
main()
| [
"alex@alexhamlin.co"
] | alex@alexhamlin.co |
17728735f1b7b3b8b62806ac4da7539060848021 | ee4f1849720fe1cce2e2269e60083532f886a3dd | /fblogin.py | 94ca3c22610decab20d7fd06f13b75bb0dd5eb13 | [] | no_license | lavakumar152/amzlogin | eaeebda6c9ab5870963e2b9048321e5ee1ad079f | f9de7e44f3cccf67d5b69bf8197423abd09849d0 | refs/heads/main | 2023-02-12T00:46:41.845023 | 2021-01-11T08:07:59 | 2021-01-11T08:07:59 | 328,585,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | from selenium import webdriver
from getpass import getpass
username = input('Enter your user name: ')
password = getpass('Enter your password : ')
driver = webdriver.Chrome('E:\\Udemy\\python\\webdriver\\chromedriver.exe')
driver.get('https://www.facebook.com/')
username_textbox = driver.find_element_by_id('email')
username_textbox.send_keys(username)
password_textbox = driver.find_element_by_id('pass')
password_textbox.send_keys(password)
login_button = driver.find_element_by_id('u_0_b')
login_button.submit()
| [
"lavakumar152@gmail.com"
] | lavakumar152@gmail.com |
996050481c43d0fc4419ad5c97ca943a18676fea | 08bb966a6eb75877429630787a7adcb090685cb9 | /web_scraping/link_scrapers/link_scraper_majcom.py | 8ef4bbd895461f08cedc85db64b0f8e976a6fd4e | [
"MIT"
] | permissive | RANDCorporation/policy2vec | 946dd6743b22bafa31926400695b5485136fe9b5 | a309217115b2163241313a239671b2ab84169b3e | refs/heads/master | 2023-04-09T11:10:46.303654 | 2021-04-20T23:41:11 | 2021-04-20T23:41:11 | 359,979,373 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,055 | py | # Network Analysis of Vaccination Strategies
# Copyright (C) 2020 by The RAND Corporation
# See LICENSE and README.md for information on usage and licensing
## Imports
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from selenium import webdriver
from time import sleep
import pandas as pd
#import tabula
import time
import os
import pynput
from pynput.keyboard import Key, Controller
import re
import math
import datetime
## Set-up
parent_url = "https://www.e-publishing.af.mil/Product-Index/"
options = webdriver.ChromeOptions()
profile = {"plugins.plugins_list": [{"enabled": False, "name": "Chrome PDF Viewer"}], "download.extensions_to_open": ""}
#option.add_argument(“ -- incognito”)
## set-up the chrome driver
from os.path import dirname, abspath
chromedriver_path = dirname(dirname(abspath(__file__))) + '/chromedriver'
driver = webdriver.Chrome(executable_path=chromedriver_path, options=options)
driver.get(parent_url)
keyboard = Controller()
## Get the AF MAJCOM-Level pubs
#MAJCOM
get_internet = driver.find_element_by_xpath('//*[@id="mvcContainer-449"]/div[2]/div[1]/div[1]/ul/li[4]/a')
get_internet.click()
time.sleep(3)
top_level_url = driver.current_url
print(top_level_url)
## scan through the different MAJCOMs
hrefs = []
titles = []
for m in range(1,11):
## navigate to the top-level page
driver.get(top_level_url)
## select the MAJCOM
get_internet = driver.find_element_by_xpath('//*[@id="cat-2"]/div/ul/li[' + str(m) + ']/a')
print('navigating to: ' + get_internet.text)
get_internet.click()
time.sleep(3)
## select All-Pubs
get_internet = driver.find_element_by_xpath('//*[@id="org-list"]/div[1]/ul/li[1]/a')
get_internet.click()
time.sleep(3)
## find out how many pages there are
s = driver.find_element_by_xpath('//*[@id="data_info"]').text
s = s[s.index('of'):]
num_docs = int(re.sub("[^0-9]", "", s))
num_pages = math.ceil(num_docs/10)
## scan through multiple pages (each of which shows 10 items)
print('scanning through %i pages' %num_pages)
count = 0
for j in range(1,num_pages+1):
## click on the appropriate page number
if j <= 4:
k = j
if j == 5:
k = 5
elif num_pages > 5 and 5 < j and j < num_pages-1:
k = 4
elif num_pages > 5 and j == num_pages-1:
k = 5
elif num_pages > 5 and j == num_pages:
k = 6
get_internet = driver.find_element_by_xpath('//*[@id="data_paginate"]/span/a[' + str(k) + ']')
get_internet.click()
## scan through a single page
for i in range(1,11):
## try/except since the last page will have less than 10 items
try:
element = driver.find_element_by_xpath('//*[@id="data"]/tbody/tr[' + str(i) + ']/td[1]/a')
title_xpath = '//*[@id="data"]/tbody/tr[' + str(i) + ']/td[2]'
titles.append(driver.find_element_by_xpath(title_xpath).text)
hrefs.append(element.get_attribute('href'))
count += 1
except:
pass
print('page: %i, links:%i ' %(j, count))
## Save the links and document titles as a csv file
import pandas as pd
if not os.path.exists('logs'):
os.makedirs('logs')
source_link = [parent_url]*len(hrefs)
source = ['Air Force E-Publishing']*len(hrefs)
date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
link_date = [date]*len(hrefs)
df = pd.DataFrame({'Title':titles, 'source':source, 'source link':source_link, 'link':hrefs, 'link date':link_date})
df.to_csv('logs/AF_epubs_majcom.csv', index=False)
### Save the links
#with open('links/AF_epubs_majcom.txt', 'w') as f:
# for item in hrefs:
# f.write("%s\n" % item)
## close the session
driver.quit()
| [
"gshartnett@gmail.com"
] | gshartnett@gmail.com |
a65776b895a6918affe05ddf0fd3dea283e97182 | 52a7fd3ef46cb0a29b42ab11386721ece0f51a56 | /env/lib/python2.7/site-packages/sqlalchemy_utils/expressions.py | 150c6fa6e53759ac18827ad54c0720835a405707 | [] | no_license | nicolas3355/AUBOOST | 95f8b2c0503fd1dfecdbceb9f1a0e88b786a3b4b | 6af5593ef85c675336850e7e1691cb267cb26315 | refs/heads/master | 2016-08-04T03:28:12.180742 | 2015-06-29T17:41:53 | 2015-06-29T17:41:53 | 38,261,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,300 | py | import sqlalchemy as sa
from sqlalchemy.sql import expression
from sqlalchemy.ext.compiler import compiles
from sqlalchemy_utils.types import TSVectorType
class tsvector_match(expression.FunctionElement):
type = sa.types.Unicode()
name = 'tsvector_match'
@compiles(tsvector_match)
def compile_tsvector_match(element, compiler, **kw):
args = list(element.clauses)
if len(args) < 2:
raise Exception(
"Function 'tsvector_match' expects atleast two arguments."
)
return '(%s) @@ %s' % (
compiler.process(args[0]),
compiler.process(args[1])
)
class to_tsquery(expression.FunctionElement):
type = sa.types.Unicode()
name = 'to_tsquery'
@compiles(to_tsquery)
def compile_to_tsquery(element, compiler, **kw):
if len(element.clauses) < 1:
raise Exception(
"Function 'to_tsquery' expects atleast one argument."
)
return 'to_tsquery(%s)' % (
', '.join(map(compiler.process, element.clauses))
)
class plainto_tsquery(expression.FunctionElement):
type = sa.types.Unicode()
name = 'plainto_tsquery'
@compiles(plainto_tsquery)
def compile_plainto_tsquery(element, compiler, **kw):
if len(element.clauses) < 1:
raise Exception(
"Function 'plainto_tsquery' expects atleast one argument."
)
return 'plainto_tsquery(%s)' % (
', '.join(map(compiler.process, element.clauses))
)
class tsvector_concat(expression.FunctionElement):
type = TSVectorType()
name = 'tsvector_concat'
@compiles(tsvector_concat)
def compile_tsvector_concat(element, compiler, **kw):
return ' || '.join(map(compiler.process, element.clauses))
class array_get(expression.FunctionElement):
name = 'array_get'
@compiles(array_get)
def compile_array_get(element, compiler, **kw):
args = list(element.clauses)
if len(args) != 2:
raise Exception(
"Function 'array_get' expects two arguments (%d given)." %
len(args)
)
if not hasattr(args[1], 'value') or not isinstance(args[1].value, int):
raise Exception(
"Second argument should be an integer."
)
return '(%s)[%s]' % (
compiler.process(args[0]),
sa.text(str(args[1].value + 1))
)
| [
"nicolaselhaddad.nh@gmail.com"
] | nicolaselhaddad.nh@gmail.com |
934dee79620e8df431610c508d8a4820ac2ae0be | 1e5591520924be7e2e6c86b120887ee3f8a387c9 | /mysite/urls.py | 1cb3674ad75a15cb78bcb638a2868c8455879c90 | [] | no_license | palcu/django-tutorial | 561f306a033f3c2607848c659e75986b1a312a32 | 83147eebd9de649fc1daf249976ba97872d0a489 | refs/heads/master | 2021-01-23T19:34:43.059386 | 2012-05-13T23:35:42 | 2012-05-13T23:35:42 | 4,317,853 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 896 | py | from django.conf.urls import patterns, include, url
from mysite.views import hello, current_datetime, hours_ahead, display_meta
from django.contrib import admin
from contact import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
('^hello/$', hello),
('^time/$', current_datetime),
('^meta/$', display_meta),
(r'^time/plus/(\d{1,2})/$', hours_ahead),
# (r'^search/$', views.search),
(r'^contact/$', views.contact),
# Examples:
# url(r'^$', 'mysite.views.home', name='home'),
# url(r'^mysite/', include('mysite.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| [
"alex.palcuie@gmail.com"
] | alex.palcuie@gmail.com |
2abd803bd4688f9318e9e4f81c38cd899ad095d3 | 4018d51c555d923d8b017f4460d19748c646ed82 | /web.py | 852d6f2bf34791d36105a386dd3d49e6fc26c0e7 | [] | no_license | fagan2888/hackernews | c046a680f2b5ee60257f4032f3957665a26c2852 | b64f813e566af46d4dcfb461285571c46d1a1603 | refs/heads/master | 2021-01-26T09:07:21.464909 | 2017-08-18T18:38:03 | 2017-08-18T18:38:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,564 | py | import os
import datetime
from flask import Flask, render_template, request
from flask.ext.moment import Moment
from flask.ext.pymongo import PyMongo
from utils import CATEGORIES
COLORS = [
'#D93B3B', '#7cb5ec', '#90ed7d', '#f7a35c',
'#8085e9', '#c015e9', '#2B9658', '#b2b2b2'
]
LIMIT = 30
app = Flask(__name__)
moment = Moment(app)
# Mongo setup
# mongodb://mongo:27017/hn_demo
app.config['MONGO_URI'] = os.environ['MONGO_URI']
mongo = PyMongo(app)
def get_statistics(posts):
data = {}
# Generate time intervals used to filter posts
now = datetime.datetime.now()
time_intervals = [(
(now-datetime.timedelta(hours=i)).replace(
minute=0,
second=0,
microsecond=0),
(now-datetime.timedelta(hours=i-1)).replace(
minute=0,
second=0,
microsecond=0))
for i in reversed(range(10))]
# Get posts count for each category in the time intervals defined
for start, end in time_intervals:
for category in CATEGORIES + ['random']:
if category not in data:
data[category] = []
data[category].append(posts.find({
'time': {
'$gte': start,
'$lte': end
},
'result.label': category
}).count())
return {
'data': data,
'time_intervals': time_intervals,
'colors': COLORS
}
def search_posts(posts, category, page):
selector = {'ranking': {'$ne': None}}
if category and category != 'all':
selector['result.label'] = category
return posts.find(selector).sort('ranking', 1).skip((page-1)*LIMIT).limit(LIMIT)
@app.route('/', methods=['GET'])
@app.route('/news', methods=['GET'])
def index():
page = request.args.get('p')
category = request.args.get('c') or 'all'
if not page:
page = 1
else:
page = int(page)
return render_template(
'index.html',
posts=search_posts(mongo.db.posts, category, page),
statistics=get_statistics(mongo.db.posts),
categories=CATEGORIES + ['random'],
page=page,
category=category
)
@app.route('/feed.xml', methods=['GET'])
def category_rss():
category = request.args.get('c') or 'all'
page = 1
return render_template(
'category_rss.xml',
posts=search_posts(mongo.db.posts, category, page),
category=category
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=9000, debug=True)
| [
"gonzalosaavedra@gmail.com"
] | gonzalosaavedra@gmail.com |
5467d81e7dab7d114aed26818274068ce59c6255 | 655e0c08f09b96ca7043d0a0e4fa266b3730b916 | /hardWork/kawaii_NLU/kawaii_nlu/utils/mitie_utils.py | 08f010f0a9cae8aa8859113d2ee01e892909838f | [] | no_license | alaa069/alaa.ai | 0904b0000a51bdb0400443171017c536bf4d2125 | a8f917b982b6190b613f30817f744bdac1dd7d21 | refs/heads/master | 2022-11-29T13:49:44.638306 | 2018-04-30T10:35:07 | 2018-04-30T10:35:07 | 126,217,739 | 0 | 1 | null | 2022-11-22T12:56:00 | 2018-03-21T17:39:15 | HTML | UTF-8 | Python | false | false | 2,410 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import typing
from builtins import str
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Text
from kawaii_nlu.components import Component
from kawaii_nlu.config import KawaiiNLUConfig
from kawaii_nlu.model import Metadata
if typing.TYPE_CHECKING:
import mitie
class MitieNLP(Component):
name = "nlp_mitie"
provides = ["mitie_feature_extractor"]
def __init__(self, mitie_file, extractor=None):
self.extractor = extractor
self.mitie_file = mitie_file
MitieNLP.ensure_proper_language_model(self.extractor)
@classmethod
def required_packages(cls):
# type: () -> List[Text]
return ["mitie"]
@classmethod
def create(cls, config):
import mitie
return MitieNLP(config["mitie_file"], mitie.total_word_feature_extractor(config["mitie_file"]))
@classmethod
def cache_key(cls, model_metadata):
# type: (Metadata) -> Optional[Text]
mitie_file = model_metadata.metadata.get("mitie_file", None)
if mitie_file is not None:
return cls.name + "-" + str(os.path.abspath(mitie_file))
else:
return None
def provide_context(self):
# type: () -> Dict[Text, Any]
return {"mitie_feature_extractor": self.extractor}
@staticmethod
def ensure_proper_language_model(extractor):
# type: (Optional[mitie.total_word_feature_extractor]) -> None
if extractor is None:
raise Exception("Failed to load MITIE feature extractor. Loading the model returned 'None'.")
@classmethod
def load(cls, model_dir=None, model_metadata=None, cached_component=None, **kwargs):
# type: (Text, Metadata, Optional[MitieNLP], **Any) -> MitieNLP
import mitie
if cached_component:
return cached_component
mitie_file = model_metadata.get("mitie_file")
return MitieNLP(mitie_file, mitie.total_word_feature_extractor(mitie_file))
def persist(self, model_dir):
# type: (Text) -> Dict[Text, Any]
return {
"mitie_feature_extractor_fingerprint": self.extractor.fingerprint,
"mitie_file": self.mitie_file
}
| [
"Alaa.UPPERSKILLS@Alaa-iMac.local"
] | Alaa.UPPERSKILLS@Alaa-iMac.local |
67e4cc76167b1004f444b7678282646e65aa18e4 | d37bbd53b579c62299dfb43f5e67f8c9c772e983 | /synpo/agent/__init__.py | c7d807dfdfd8a3f84e741dbd2b87531335c36ba8 | [
"MIT"
] | permissive | zhmiao/SynPo | 93c330a70fc52b5ca5c7944a25e16b4c9252cf90 | c84a590996ce139e6671bcd356f08664b9c15e8a | refs/heads/master | 2020-05-07T16:11:32.348007 | 2019-04-19T22:00:10 | 2019-04-19T22:00:10 | 180,672,471 | 0 | 0 | null | 2019-04-10T22:19:35 | 2019-04-10T22:19:35 | null | UTF-8 | Python | false | false | 26 | py | from .Grid_agent import *
| [
"frank.hexiang@gmail.com"
] | frank.hexiang@gmail.com |
bafea49166a97ebeb271524d10c9cb4120ce2024 | 952700bb1382d6551f8f584126a3bc8154f5ad1a | /node.py | 93e8f8360f0e7c7ecca02d1f137594e8946f5f0f | [] | no_license | liuyxpp/scriptslyx | 1842fc40e087f68bb41830aab065053212bffe48 | 1a19b302805fe0d5239467c957c79a7db8415b68 | refs/heads/master | 2021-05-19T03:27:02.134434 | 2012-12-14T05:45:23 | 2012-12-14T05:45:23 | 251,507,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,660 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
node.py
=======
Node management module based on RPYC.
Copyright (C) 2012 Yi-Xin Liu
"""
import os
import rpyc
import numpy as np
__version__ = 0.1
def get_nodes(nodes_file):
''' A list of nodes reading from file.
The format of the file:
node_01
node_02
node_03
i.e. one line for one node.
'''
if os.path.exists(nodes_file):
with open(nodes_file) as f:
# remove the last newline if it exists
return [line.strip() for line in f.readlines() if line.strip()]
return []
def get_free_cores(node):
''' Number of free cores in node '''
#cmd = "python -c 'import psutil;print psutil.cpu_percent(interval=1.0,percpu=True)'"
#p = psutil.Popen(['rsh',node,cmd],stdout=PIPE)
#out, dummy = p.communicate()
#time.sleep(1.5) # To avoid "protocal failure in circuit setup"
try:
conn = rpyc.classic.connect(node)
except Exception as e:
return 0
core_usage = conn.modules.psutil.cpu_percent(interval=1.0,percpu=True)
core_free = 100.0 - np.array(core_usage)
return np.sum(core_free > 60.0)
def get_node_with_free_core(nodes):
''' First node with more than 1 free cores '''
for node in nodes:
if get_free_cores(node) > 0:
return node
return ''
def test():
nodes = get_nodes('nodes')
print 'Nodes listed in file ./nodes: ', nodes
print 'First node with free cores in the nodes list: ',
print get_node_with_free_core(nodes)
print 'Number of free cores in c0118:', get_free_cores('c0118')
if __name__ == '__main__':
test()
| [
"liuyxpp@gmail.com"
] | liuyxpp@gmail.com |
26389f16f25e501b2e3754aa94cfb6e3ae4c2470 | 0e0e7d68a32df763bbeb2fd3bae99183897242be | /Deep-Learning/Lab1/Source/Logistic_Regression.py | cb2b2a45ec85b96c20a8221d3ca630e9e242a645 | [] | no_license | Akshayvamshi/Python | bf876c8b763c8583f34cbd5ec7ae6d668764bc81 | 5a9d9506f0bdd7d4efa37a2562b4e9e798fb17fe | refs/heads/master | 2021-01-23T14:33:56.722065 | 2018-06-06T01:15:49 | 2018-06-06T01:15:49 | 102,693,412 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,705 | py | import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
# Load the data
data = np.loadtxt('iris.txt', skiprows=1)
X_data = data[:,:2]
Y_data = data[:,2:] # Note this is a matrix
#-------------------------------------------------------------------------------
# Fit
#-------------------------------------------------------------------------------
# For feeding in data
x = tf.placeholder(tf.float32, [None, 2])
y = tf.placeholder(tf.float32, [None, 1])
# Model parameters
W = tf.Variable(tf.zeros([2, 1]))
b = tf.Variable([0.0])
# Define the model
logits = tf.matmul(x, W) + b
# Loss function
loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)
loss = tf.reduce_mean(loss)
# Accuracy
predict_op = tf.greater_equal(logits, tf.zeros_like(logits))
correct_op = tf.equal(tf.cast(predict_op, tf.float32), y)
accuracy_op = tf.reduce_mean(tf.cast(correct_op, tf.float32))
# Hyperparameters
learning_rate = 0.01
num_epochs = 100
# Optimizer
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
train_op = optimizer.minimize(loss)
# TF session
sess = tf.Session()
sess.run(tf.global_variables_initializer())
# Seed the random number generator for reproducibility
np.random.seed(0)
# Minimize the loss function
for epoch in range(num_epochs):
# Present each data point once in random order
idx = np.random.permutation(data.shape[0])
for i in idx:
feed_dict = {x: X_data[i:i+1], y: Y_data[i:i+1]}
sess.run(train_op, feed_dict)
if (epoch+1) % 10 == 0:
feed_dict = {x: X_data, y: Y_data}
accuracy = sess.run(accuracy_op, feed_dict)
print("After {} epochs, accuracy = {}".format(epoch+1, accuracy))
# Print the result
W_val, b_val = sess.run([W, b])
W_val = W_val[:,0]
b_val = b_val[0]
print("W =", W_val)
print("b =", b_val)
def predict(x_):
return 1 * sess.run(predict_op, {x: x_})
#-------------------------------------------------------------------------------
# Figure
#-------------------------------------------------------------------------------
# Model predictions
labels = predict(X_data)[:,0]
# Find indices for the two species
idx_0, = np.where(labels == 0)
idx_1, = np.where(labels == 1)
# Plot the data
plt.plot(X_data[idx_0,0], X_data[idx_0,1], 'bo', label='I. versicolor')
plt.plot(X_data[idx_1,0], X_data[idx_1,1], 'ro', label='I. virginica')
# Plot the separating hyperplane
x_sep = np.linspace(X_data[:,0].min(), X_data[:,0].max())
y_sep = (-b_val - W_val[0]*x_sep) / W_val[1]
plt.plot(x_sep, y_sep, 'm', label="Decision boundary")
# Legend
plt.legend()
# Axis labels
plt.xlabel("Sepal length (cm)")
plt.ylabel("Petal legnth (cm)")
#Plotting the figure
plt.show()
| [
"agr78@mail.umkc.edu"
] | agr78@mail.umkc.edu |
7a281b69c7dcfcffeaa9f04cf7fccff5074a4c5a | 5a39d03a86a1195a9e26da6a6911ebb56e1f8e95 | /users/urls.py | f55681a2d3d9d292c8ea4c45ab4be418d6709509 | [] | no_license | ben-truong-0324/Apella | 954e8d6014aabc24ca5d467ad2c9bd054b48fe85 | 4fcbf8a736a23f60506df57b7910c101acee97a6 | refs/heads/master | 2022-02-20T02:40:36.513074 | 2019-10-02T01:31:34 | 2019-10-02T01:31:34 | 212,232,155 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 732 | py | from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
app_name = "users"
urlpatterns = [
path('', views.index, name='index'),
path('register/', views.RegisterView.as_view(), name='register'),
# path('register/org', views.RegisterOrgView.as_view(), name='org_register'),
path('login/', views.LoginView.as_view(), name = 'login'),
path('logout/', views.logout_view, name='logout'),
path('org_autocomplete/', views.OrgAutocomplete.as_view(), name='org_autocomplete'),
path('<slug:username>/profile/', views.UserDetail.as_view(), name ='user_profile'),
path('<slug:username>/profile/update/', views.UpdateProfileView.as_view(), name ='profile_update'),
] | [
"ben.truong.0324@gmail.com"
] | ben.truong.0324@gmail.com |
a65f7dc13eb985b6af3bbd498866c5e3df68317a | 0542d44fa5dcdb60d0b9225471803979824edb50 | /pwnable.kr/uaf/uaf.py | a08bc22f6b715ee2fa4620e3e613040ffdae48ae | [] | no_license | Phantomn/CTF | 733e00135e884b5cc5aa1fdcf7819e68f27117da | 48e52619ade5a4f2686a5ab9171567e65044ba4d | refs/heads/master | 2021-10-28T10:49:45.046610 | 2021-10-26T01:26:07 | 2021-10-26T01:26:07 | 97,565,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | from pwn import *
''' Create file '''
size = 0x18
path = "/tmp/uaf_file"
data = p64(0x401588) * 3
with open(path, 'wb') as f:
f.write(data)
''' Spawn process '''
p = process(["/home/ubuntu/ctf/pwnable/uaf", str(size), path])
gdb.attach(p)
''' Inputs '''
def use():
p.sendline("1")
def after():
p.sendline("2")
def free():
p.sendline("3")
''' Exploit '''
if __name__ == '__main__':
free()
after()
after()
use()
p.interactive()
| [
"tmdvyr123@gmail.com"
] | tmdvyr123@gmail.com |
e9327ec7e2122b53dc3b6ba3560a822de0e43c08 | 740b4f75dbede3c7d81e2fbd55e81ae62fe88589 | /Mutuales/models.py | 8c847d0be6b02ac64179b7764475e59861de8415 | [] | no_license | juancastelli1/FederacionMut | e45ced865ab3082ceacb678458b7390c9502be54 | 48ec18b4ce92d39306b77b51e10c26e6991d314d | refs/heads/master | 2023-08-17T12:41:48.248851 | 2021-09-29T19:03:40 | 2021-09-29T19:03:40 | 411,431,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,849 | py | from django.db import models
from django.db.models.fields import AutoField
from backend.deptos import deptos_tucuman
# Create your models here.
"""
Construyo la entidad para los servicios
"""
class servicios(models.Model):
id_servicio=models.AutoField(primary_key=True)
servicio=models.CharField(max_length=20)
created=models.DateTimeField(auto_now_add=True)
updated=models.DateTimeField(auto_now_add=True)
class Meta:
db_table='servicios'
verbose_name='servicio'
verbose_name_plural='servicios'
ordering=['servicio']
def __str__(self):
cadena = str(self.id_servicio) + ' - ' + str(self.servicio)
return cadena
"""
Construyo la entidad para las omutuales
"""
class mutuales(models.Model):
id_mutual=models.AutoField(primary_key=True)
nombre=models.CharField(max_length=100)
sucursal=models.CharField(max_length=30, choices=deptos_tucuman)
##id_servicio=models.ForeignKey(servicios, on_delete=models.CASCADE)
created=models.DateTimeField(auto_now_add=True)
updated=models.DateTimeField(auto_now_add=True)
class Meta:
db_table='mutuales'
verbose_name='mutual'
verbose_name_plural='mutuales'
ordering=['id_mutual']
def __str__(self):
cadena = str(self.id_mutual) + ' - ' + str(self.nombre) + ' - ' + str(self.sucursal)
return cadena
class servicio_mutual(models.Model):
id_serv_mut = AutoField(primary_key=True)
id_mutual=models.ForeignKey(mutuales, on_delete=models.CASCADE)
id_servicio=models.ForeignKey(servicios, on_delete=models.CASCADE)
class Meta:
db_table='servicio_mutuales'
verbose_name='servicio_mutual'
verbose_name_plural='servicios_mutual'
ordering=['id_mutual']
unique_together = ('id_mutual', 'id_servicio',)
| [
"juancastelli12345@gmail.com"
] | juancastelli12345@gmail.com |
66770718a60af155f03c7f7a0921f56f40df6693 | 6c19aac22212af668ce7f645973a7ef371ef462f | /weddingapp/migrations/0007_auto_20160313_1233.py | 8cf5f4c260da99cad79d62b986e3baea8715f591 | [] | no_license | josephshambrook/wedding | 37ff3b69a9eb6f422aefbde819c6498ded467ea1 | b4ebbc89b095a300537bca12cf71bc33c66bf596 | refs/heads/master | 2020-04-12T01:42:20.132336 | 2017-09-14T15:45:48 | 2017-09-14T15:45:48 | 53,410,225 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 616 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-13 12:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('weddingapp', '0006_auto_20160313_1146'),
]
operations = [
migrations.RenameField(
model_name='invite',
old_name='group_number',
new_name='group_count',
),
migrations.AlterField(
model_name='invite',
name='code',
field=models.CharField(default=b'3886', max_length=6),
),
]
| [
"josephshambrook@googlemail.com"
] | josephshambrook@googlemail.com |
f578af94f827e03d57627805204eccd982cf649c | 6641608791103374fab4034ae254c6d3b35677be | /bookworm/src/oscar/apps/customer/views.py | 4859edc21040e75902e64213d2dd299215aede3b | [] | no_license | lbput/bookwormPUT | fdab4bc1845e685ddb67c3a29ccb63718388ecdf | 9a1ee125199519788c9637963cb75f55804813c5 | refs/heads/master | 2021-01-18T13:49:05.271426 | 2015-07-23T11:19:03 | 2015-07-23T11:19:03 | 39,562,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,760 | py | from django.shortcuts import get_object_or_404, redirect
from django.views import generic
from django.core.urlresolvers import reverse, reverse_lazy
from django.core.exceptions import ObjectDoesNotExist
from django import http
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import logout as auth_logout, login as auth_login
from django.contrib.sites.models import get_current_site
from django.conf import settings
from oscar.core.utils import safe_referrer
from oscar.views.generic import PostActionMixin
from oscar.apps.customer.utils import get_password_reset_url
from oscar.core.loading import (
get_class, get_profile_class, get_classes, get_model)
from oscar.core.compat import get_user_model
from . import signals
PageTitleMixin, RegisterUserMixin = get_classes(
'customer.mixins', ['PageTitleMixin', 'RegisterUserMixin'])
Dispatcher = get_class('customer.utils', 'Dispatcher')
EmailAuthenticationForm, EmailUserCreationForm, OrderSearchForm = get_classes(
'customer.forms', ['EmailAuthenticationForm', 'EmailUserCreationForm',
'OrderSearchForm'])
PasswordChangeForm = get_class('customer.forms', 'PasswordChangeForm')
ProfileForm, ConfirmPasswordForm = get_classes(
'customer.forms', ['ProfileForm', 'ConfirmPasswordForm'])
UserAddressForm = get_class('address.forms', 'UserAddressForm')
Order = get_model('order', 'Order')
Line = get_model('basket', 'Line')
Basket = get_model('basket', 'Basket')
UserAddress = get_model('address', 'UserAddress')
Email = get_model('customer', 'Email')
ProductAlert = get_model('customer', 'ProductAlert')
CommunicationEventType = get_model('customer', 'CommunicationEventType')
User = get_user_model()
# =======
# Account
# =======
class AccountSummaryView(generic.RedirectView):
"""
View that exists for legacy reasons and customisability. It commonly gets
called when the user clicks on "Account" in the navbar.
Oscar defaults to just redirecting to the profile summary page (and
that redirect can be configured via OSCAR_ACCOUNT_REDIRECT_URL), but
it's also likely you want to display an 'account overview' page or
such like. The presence of this view allows just that, without
having to change a lot of templates.
"""
pattern_name = settings.OSCAR_ACCOUNTS_REDIRECT_URL
class AccountRegistrationView(RegisterUserMixin, generic.FormView):
form_class = EmailUserCreationForm
template_name = 'customer/registration.html'
redirect_field_name = 'next'
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
return redirect(settings.LOGIN_REDIRECT_URL)
return super(AccountRegistrationView, self).get(
request, *args, **kwargs)
def get_logged_in_redirect(self):
return reverse('customer:summary')
def get_form_kwargs(self):
kwargs = super(AccountRegistrationView, self).get_form_kwargs()
kwargs['initial'] = {
'email': self.request.GET.get('email', ''),
'redirect_url': self.request.GET.get(self.redirect_field_name, '')
}
kwargs['host'] = self.request.get_host()
return kwargs
def get_context_data(self, *args, **kwargs):
ctx = super(AccountRegistrationView, self).get_context_data(
*args, **kwargs)
ctx['cancel_url'] = safe_referrer(self.request, '')
return ctx
def form_valid(self, form):
self.register_user(form)
return redirect(form.cleaned_data['redirect_url'])
class AccountAuthView(RegisterUserMixin, generic.TemplateView):
"""
This is actually a slightly odd double form view that allows a customer to
either login or register.
"""
template_name = 'customer/login_registration.html'
login_prefix, registration_prefix = 'login', 'registration'
login_form_class = EmailAuthenticationForm
registration_form_class = EmailUserCreationForm
redirect_field_name = 'next'
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
return redirect(settings.LOGIN_REDIRECT_URL)
return super(AccountAuthView, self).get(
request, *args, **kwargs)
def get_context_data(self, *args, **kwargs):
ctx = super(AccountAuthView, self).get_context_data(*args, **kwargs)
if 'login_form' not in kwargs:
ctx['login_form'] = self.get_login_form()
if 'registration_form' not in kwargs:
ctx['registration_form'] = self.get_registration_form()
return ctx
def post(self, request, *args, **kwargs):
# Use the name of the submit button to determine which form to validate
if u'login_submit' in request.POST:
return self.validate_login_form()
elif u'registration_submit' in request.POST:
return self.validate_registration_form()
return http.HttpResponseBadRequest()
# LOGIN
def get_login_form(self, bind_data=False):
return self.login_form_class(
**self.get_login_form_kwargs(bind_data))
def get_login_form_kwargs(self, bind_data=False):
kwargs = {}
kwargs['host'] = self.request.get_host()
kwargs['prefix'] = self.login_prefix
kwargs['initial'] = {
'redirect_url': self.request.GET.get(self.redirect_field_name, ''),
}
if bind_data and self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def validate_login_form(self):
form = self.get_login_form(bind_data=True)
if form.is_valid():
user = form.get_user()
# Grab a reference to the session ID before logging in
old_session_key = self.request.session.session_key
auth_login(self.request, form.get_user())
# Raise signal robustly (we don't want exceptions to crash the
# request handling). We use a custom signal as we want to track the
# session key before calling login (which cycles the session ID).
signals.user_logged_in.send_robust(
sender=self, request=self.request, user=user,
old_session_key=old_session_key)
msg = self.get_login_success_message(form)
messages.success(self.request, msg)
return redirect(self.get_login_success_url(form))
ctx = self.get_context_data(login_form=form)
return self.render_to_response(ctx)
def get_login_success_message(self, form):
return _("Welcome back")
def get_login_success_url(self, form):
redirect_url = form.cleaned_data['redirect_url']
if redirect_url:
return redirect_url
# Redirect staff members to dashboard as that's the most likely place
# they'll want to visit if they're logging in.
if self.request.user.is_staff:
return reverse('dashboard:index')
return settings.LOGIN_REDIRECT_URL
# REGISTRATION
def get_registration_form(self, bind_data=False):
return self.registration_form_class(
**self.get_registration_form_kwargs(bind_data))
def get_registration_form_kwargs(self, bind_data=False):
kwargs = {}
kwargs['host'] = self.request.get_host()
kwargs['prefix'] = self.registration_prefix
kwargs['initial'] = {
'redirect_url': self.request.GET.get(self.redirect_field_name, ''),
}
if bind_data and self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def validate_registration_form(self):
form = self.get_registration_form(bind_data=True)
if form.is_valid():
self.register_user(form)
msg = self.get_registration_success_message(form)
messages.success(self.request, msg)
return redirect(self.get_registration_success_url(form))
ctx = self.get_context_data(registration_form=form)
return self.render_to_response(ctx)
def get_registration_success_message(self, form):
return _("Thanks for registering!")
def get_registration_success_url(self, form):
redirect_url = form.cleaned_data['redirect_url']
if redirect_url:
return redirect_url
return settings.LOGIN_REDIRECT_URL
class LogoutView(generic.RedirectView):
url = settings.OSCAR_HOMEPAGE
permanent = False
def get(self, request, *args, **kwargs):
auth_logout(request)
response = super(LogoutView, self).get(request, *args, **kwargs)
for cookie in settings.OSCAR_COOKIES_DELETE_ON_LOGOUT:
response.delete_cookie(cookie)
return response
# =============
# Profile
# =============
class ProfileView(PageTitleMixin, generic.TemplateView):
template_name = 'customer/profile/profile.html'
page_title = _('Profile')
active_tab = 'profile'
def get_context_data(self, **kwargs):
ctx = super(ProfileView, self).get_context_data(**kwargs)
ctx['profile_fields'] = self.get_profile_fields(self.request.user)
return ctx
def get_profile_fields(self, user):
field_data = []
# Check for custom user model
for field_name in User._meta.additional_fields:
field_data.append(
self.get_model_field_data(user, field_name))
# Check for profile class
profile_class = get_profile_class()
if profile_class:
try:
profile = profile_class.objects.get(user=user)
except ObjectDoesNotExist:
profile = profile_class(user=user)
field_names = [f.name for f in profile._meta.local_fields]
for field_name in field_names:
if field_name in ('user', 'id'):
continue
field_data.append(
self.get_model_field_data(profile, field_name))
return field_data
def get_model_field_data(self, model_class, field_name):
"""
Extract the verbose name and value for a model's field value
"""
field = model_class._meta.get_field(field_name)
if field.choices:
value = getattr(model_class, 'get_%s_display' % field_name)()
else:
value = getattr(model_class, field_name)
return {
'name': getattr(field, 'verbose_name'),
'value': value,
}
class ProfileUpdateView(PageTitleMixin, generic.FormView):
form_class = ProfileForm
template_name = 'customer/profile/profile_form.html'
communication_type_code = 'EMAIL_CHANGED'
page_title = _('Edit Profile')
active_tab = 'profile'
success_url = reverse_lazy('customer:profile-view')
def get_form_kwargs(self):
kwargs = super(ProfileUpdateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
# Grab current user instance before we save form. We may need this to
# send a warning email if the email address is changed.
try:
old_user = User.objects.get(id=self.request.user.id)
except User.DoesNotExist:
old_user = None
form.save()
# We have to look up the email address from the form's
# cleaned data because the object created by form.save() can
# either be a user or profile instance depending whether a profile
# class has been specified by the AUTH_PROFILE_MODULE setting.
new_email = form.cleaned_data['email']
if old_user and new_email != old_user.email:
# Email address has changed - send a confirmation email to the old
# address including a password reset link in case this is a
# suspicious change.
ctx = {
'user': self.request.user,
'site': get_current_site(self.request),
'reset_url': get_password_reset_url(old_user),
'new_email': new_email,
}
msgs = CommunicationEventType.objects.get_and_render(
code=self.communication_type_code, context=ctx)
Dispatcher().dispatch_user_messages(old_user, msgs)
messages.success(self.request, _("Profile updated"))
return redirect(self.get_success_url())
class ProfileDeleteView(PageTitleMixin, generic.FormView):
form_class = ConfirmPasswordForm
template_name = 'customer/profile/profile_delete.html'
page_title = _('Delete profile')
active_tab = 'profile'
success_url = settings.OSCAR_HOMEPAGE
def get_form_kwargs(self):
kwargs = super(ProfileDeleteView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
self.request.user.delete()
messages.success(
self.request,
_("Your profile has now been deleted. Thanks for using the site."))
return redirect(self.get_success_url())
class ChangePasswordView(PageTitleMixin, generic.FormView):
form_class = PasswordChangeForm
template_name = 'customer/profile/change_password_form.html'
communication_type_code = 'PASSWORD_CHANGED'
page_title = _('Change Password')
active_tab = 'profile'
success_url = reverse_lazy('customer:profile-view')
def get_form_kwargs(self):
kwargs = super(ChangePasswordView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
messages.success(self.request, _("Password updated"))
ctx = {
'user': self.request.user,
'site': get_current_site(self.request),
'reset_url': get_password_reset_url(self.request.user),
}
msgs = CommunicationEventType.objects.get_and_render(
code=self.communication_type_code, context=ctx)
Dispatcher().dispatch_user_messages(self.request.user, msgs)
return redirect(self.get_success_url())
# =============
# Email history
# =============
class EmailHistoryView(PageTitleMixin, generic.ListView):
context_object_name = "emails"
template_name = 'customer/email/email_list.html'
paginate_by = 20
page_title = _('Email History')
active_tab = 'emails'
def get_queryset(self):
return Email._default_manager.filter(user=self.request.user)
class EmailDetailView(PageTitleMixin, generic.DetailView):
"""Customer email"""
template_name = "customer/email/email_detail.html"
context_object_name = 'email'
active_tab = 'emails'
def get_object(self, queryset=None):
return get_object_or_404(Email, user=self.request.user,
id=self.kwargs['email_id'])
def get_page_title(self):
"""Append email subject to page title"""
return u'%s: %s' % (_('Email'), self.object.subject)
# =============
# Order history
# =============
class OrderHistoryView(PageTitleMixin, generic.ListView):
"""
Customer order history
"""
context_object_name = "orders"
template_name = 'customer/order/order_list.html'
paginate_by = 20
model = Order
form_class = OrderSearchForm
page_title = _('Order History')
active_tab = 'orders'
def get(self, request, *args, **kwargs):
if 'date_from' in request.GET:
self.form = self.form_class(self.request.GET)
if not self.form.is_valid():
self.object_list = self.get_queryset()
ctx = self.get_context_data(object_list=self.object_list)
return self.render_to_response(ctx)
data = self.form.cleaned_data
# If the user has just entered an order number, try and look it up
# and redirect immediately to the order detail page.
if data['order_number'] and not (data['date_to'] or
data['date_from']):
try:
order = Order.objects.get(
number=data['order_number'], user=self.request.user)
except Order.DoesNotExist:
pass
else:
return redirect(
'customer:order', order_number=order.number)
else:
self.form = self.form_class()
return super(OrderHistoryView, self).get(request, *args, **kwargs)
def get_queryset(self):
qs = self.model._default_manager.filter(user=self.request.user)
if self.form.is_bound and self.form.is_valid():
qs = qs.filter(**self.form.get_filters())
return qs
def get_context_data(self, *args, **kwargs):
ctx = super(OrderHistoryView, self).get_context_data(*args, **kwargs)
ctx['form'] = self.form
return ctx
class OrderDetailView(PageTitleMixin, PostActionMixin, generic.DetailView):
model = Order
active_tab = 'orders'
def get_template_names(self):
return ["customer/order/order_detail.html"]
def get_page_title(self):
"""
Order number as page title
"""
return u'%s #%s' % (_('Order'), self.object.number)
def get_object(self, queryset=None):
return get_object_or_404(self.model, user=self.request.user,
number=self.kwargs['order_number'])
def do_reorder(self, order): # noqa (too complex (10))
"""
'Re-order' a previous order.
This puts the contents of the previous order into your basket
"""
# Collect lines to be added to the basket and any warnings for lines
# that are no longer available.
basket = self.request.basket
lines_to_add = []
warnings = []
for line in order.lines.all():
is_available, reason = line.is_available_to_reorder(
basket, self.request.strategy)
if is_available:
lines_to_add.append(line)
else:
warnings.append(reason)
# Check whether the number of items in the basket won't exceed the
# maximum.
total_quantity = sum([line.quantity for line in lines_to_add])
is_quantity_allowed, reason = basket.is_quantity_allowed(
total_quantity)
if not is_quantity_allowed:
messages.warning(self.request, reason)
self.response = redirect('customer:order-list')
return
# Add any warnings
for warning in warnings:
messages.warning(self.request, warning)
for line in lines_to_add:
options = []
for attribute in line.attributes.all():
if attribute.option:
options.append({
'option': attribute.option,
'value': attribute.value})
basket.add_product(line.product, line.quantity, options)
if len(lines_to_add) > 0:
self.response = redirect('basket:summary')
messages.info(
self.request,
_("All available lines from order %(number)s "
"have been added to your basket") % {'number': order.number})
else:
self.response = redirect('customer:order-list')
messages.warning(
self.request,
_("It is not possible to re-order order %(number)s "
"as none of its lines are available to purchase") %
{'number': order.number})
class OrderLineView(PostActionMixin, generic.DetailView):
"""Customer order line"""
def get_object(self, queryset=None):
order = get_object_or_404(Order, user=self.request.user,
number=self.kwargs['order_number'])
return order.lines.get(id=self.kwargs['line_id'])
def do_reorder(self, line):
self.response = redirect(
'customer:order', int(self.kwargs['order_number']))
basket = self.request.basket
line_available_to_reorder, reason = line.is_available_to_reorder(
basket, self.request.strategy)
if not line_available_to_reorder:
messages.warning(self.request, reason)
return
# We need to pass response to the get_or_create... method
# as a new basket might need to be created
self.response = redirect('basket:summary')
# Convert line attributes into basket options
options = []
for attribute in line.attributes.all():
if attribute.option:
options.append({'option': attribute.option,
'value': attribute.value})
basket.add_product(line.product, line.quantity, options)
if line.quantity > 1:
msg = _("%(qty)d copies of '%(product)s' have been added to your"
" basket") % {
'qty': line.quantity, 'product': line.product}
else:
msg = _("'%s' has been added to your basket") % line.product
messages.info(self.request, msg)
class AnonymousOrderDetailView(generic.DetailView):
model = Order
template_name = "customer/anon_order.html"
def get_object(self, queryset=None):
# Check URL hash matches that for order to prevent spoof attacks
order = get_object_or_404(self.model, user=None,
number=self.kwargs['order_number'])
if self.kwargs['hash'] != order.verification_hash():
raise http.Http404()
return order
# ------------
# Address book
# ------------
class AddressListView(PageTitleMixin, generic.ListView):
"""Customer address book"""
context_object_name = "addresses"
template_name = 'customer/address/address_list.html'
paginate_by = 40
active_tab = 'addresses'
page_title = _('Address Book')
def get_queryset(self):
"""Return customer's addresses"""
return UserAddress._default_manager.filter(user=self.request.user)
class AddressCreateView(PageTitleMixin, generic.CreateView):
form_class = UserAddressForm
model = UserAddress
template_name = 'customer/address/address_form.html'
active_tab = 'addresses'
page_title = _('Add a new address')
success_url = reverse_lazy('customer:address-list')
def get_form_kwargs(self):
kwargs = super(AddressCreateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_context_data(self, **kwargs):
ctx = super(AddressCreateView, self).get_context_data(**kwargs)
ctx['title'] = _('Add a new address')
return ctx
def get_success_url(self):
messages.success(self.request,
_("Address '%s' created") % self.object.summary)
return super(AddressCreateView, self).get_success_url()
class AddressUpdateView(PageTitleMixin, generic.UpdateView):
form_class = UserAddressForm
model = UserAddress
template_name = 'customer/address/address_form.html'
active_tab = 'addresses'
page_title = _('Edit address')
success_url = reverse_lazy('customer:address-list')
def get_form_kwargs(self):
kwargs = super(AddressUpdateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_context_data(self, **kwargs):
ctx = super(AddressUpdateView, self).get_context_data(**kwargs)
ctx['title'] = _('Edit address')
return ctx
def get_queryset(self):
return self.request.user.addresses.all()
def get_success_url(self):
messages.success(self.request,
_("Address '%s' updated") % self.object.summary)
return super(AddressUpdateView, self).get_success_url()
class AddressDeleteView(PageTitleMixin, generic.DeleteView):
model = UserAddress
template_name = "customer/address/address_delete.html"
page_title = _('Delete address?')
active_tab = 'addresses'
context_object_name = 'address'
success_url = reverse_lazy('customer:address-list')
def get_queryset(self):
return UserAddress._default_manager.filter(user=self.request.user)
def get_success_url(self):
messages.success(self.request,
_("Address '%s' deleted") % self.object.summary)
return super(AddressDeleteView, self).get_success_url()
class AddressChangeStatusView(generic.RedirectView):
"""
Sets an address as default_for_(billing|shipping)
"""
url = reverse_lazy('customer:address-list')
permanent = False
def get(self, request, pk=None, action=None, *args, **kwargs):
address = get_object_or_404(UserAddress, user=self.request.user,
pk=pk)
# We don't want the user to set an address as the default shipping
# address, though they should be able to set it as their billing
# address.
if address.country.is_shipping_country:
setattr(address, 'is_%s' % action, True)
elif action == 'default_for_billing':
setattr(address, 'is_default_for_billing', True)
else:
messages.error(request, _('We do not ship to this country'))
address.save()
return super(AddressChangeStatusView, self).get(
request, *args, **kwargs)
| [
"bybek5@gmail.com"
] | bybek5@gmail.com |
79743241092a68390901622aa5ad794b68d72ca6 | e60c2913e39fa596f0c7353c13e633fba9b20f3c | /NewVersion/api.py | d177dac6c10ec53595abc91af5700b3c7bfb99be | [] | no_license | kqia040/MIformulation | 0ecb6f5f58474af6278329f43c380f6add4d4175 | 1a8a32afbb3735c2db23dc54670370e2e6d4b01f | refs/heads/master | 2020-04-07T07:28:22.193506 | 2017-08-08T22:22:31 | 2017-08-08T22:22:31 | 56,551,866 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 11 10:51:28 2017
API.py
@author: Kun
"""
| [
"kundianqian@gmail.com"
] | kundianqian@gmail.com |
e52c2654ae2511689ba72992d5c50715beeeb6e2 | f4f1add74a2025eeed0c305bacb368449a354964 | /pybin/expRptAcqBalanceInfo.py | ef4ec5a43ec40549c0d41df83d348c20e15c76db | [] | no_license | wxbjava/sandrpt | c3c3b4bffa65144f76991c66aaea397cdf1223fa | 6d2a5f5a8284b9bbb75c13601b5d59453dd64a25 | refs/heads/master | 2020-08-23T00:25:38.306862 | 2019-09-16T04:55:48 | 2019-09-16T04:55:48 | null | 0 | 0 | null | null | null | null | GB18030 | Python | false | false | 21,611 | py | #!/home/acqbat/python36/bin/python3
#-*- coding:gb18030 -*-
#间联系统余额报表,按机构每个机构一个
import cx_Oracle
import sys
import os
from math import fabs
from openpyxl.workbook import Workbook
from utl.common import *
class MchtBalance:
def __init__(self, insIdCd):
self.initAmt = 0.0
self.finalAmt = 0.0
self.insIdCd = insIdCd
self.txnCount = 0 #商户交易笔数
self.txnAmt = 0.0 #商户交易总金额
self.txnCost = 0.0 #总成本
self.errAmt = 0.0 #差错
self.mchtFee = 0.0 #商户手续费
self.mchtStlmAmt = 0.0 #商户结算费
self.payTxnCount = 0 #代付笔数
self.payTxnAmt = 0.0 #代付金额
self.payUnknownCount = 0.0 #未知代付笔数
self.payUnknownAmt = 0.0 #未知代付金额
self.payPayTxnRtn = 0.0 #代付退回金额
def __get_balance_amt(self, db, stlmDate):
sql = "select sum(MCHT_A_PREV_BAL_AT + MCHT_B_PREV_BAL_AT - MCHT_C_PREV_BAL_AT) " \
"from TBL_SAND_BALANCE_INF where host_date ='%s' and INS_ID_CD ='%s'" % (getLastDay(stlmDate), self.insIdCd)
cursor = db.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.initAmt = toNumberFmt(x[0]/100)
sql = "select sum(MCHT_A_PREV_BAL_AT + MCHT_B_PREV_BAL_AT - MCHT_C_PREV_BAL_AT) " \
"from TBL_SAND_BALANCE_INF where host_date ='%s' and INS_ID_CD ='%s'" % (stlmDate, self.insIdCd)
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.finalAmt = toNumberFmt(x[0]/100)
cursor.close()
def __get_succ_txn(self, db, stlmDate):
sql = "select count(*), txn_num, nvl(sum(real_trans_amt),0), nvl(sum(ISS_FEE+SWT_FEE+PROD_FEE),0), " \
"nvl(sum(ERR_FEE),0), nvl(sum(mcht_fee),0) " \
"from TBL_STLM_TXN_BILL_DTL where " \
"CHECK_STA ='1' and host_date = '%s' " \
"and ins_id_cd = '%s' group by txn_num" % (stlmDate, self.insIdCd)
print(sql)
cursor = db.cursor()
cursor.execute(sql)
for ltTxn in cursor:
if ltTxn[1] == '1011':
# 消费
self.txnCount = ltTxn[0]
self.txnAmt = toNumberFmt(ltTxn[2])
self.txnCost = toNumberFmt(ltTxn[3])
self.mchtFee = toNumberFmt(ltTxn[5])
elif ltTxn[1] == '1801':
# 代付
self.payTxnCount = ltTxn[0]
self.payTxnAmt = fabs(ltTxn[2])
#计算代理商分润
sql = "select count(*), nvl(sum(trans_amt/100),0) from tbl_acq_txn_log where host_date ='%s' and " \
"txn_num ='1801' and substrb(ADDTNL_DATA,1,2) in ('04','05','06') and " \
"trans_state ='1' and company_cd ='%s'" % (stlmDate, self.insIdCd)
cursor.execute(sql)
x = cursor.fetchone()
self.payTxnCount = self.payTxnCount - x[0]
self.payTxnAmt = toNumberFmt(self.payTxnAmt - x[1])
cursor.close()
self.mchtStlmAmt = toNumberFmt(self.txnAmt - self.errAmt - self.mchtFee)
def __get_oth_txn(self, db, dbacc, stlmDate):
sql = "select count(*), sum(a.txn_amt) from tbl_err_chk_txn_dtl a " \
"left join tbl_mcht_inf b on a.CARD_ACCP_ID = b.mcht_cd " \
"left join tbl_acq_txn_log c on a.key_rsp = c.key_rsp " \
"where a.host_date ='%s' and a.chk_sta='4' and b.company_cd = '%s' " \
" and a.txn_num ='1801' and substr(c.ADDTNL_DATA,1,2) = '02'" % (stlmDate, self.insIdCd)
print(sql)
cursor = db.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.payUnknownCount = toNumberFmt(x[0])
self.payUnknownAmt = toNumberFmt(x[1])
cursor.close()
#查找非当日代付退回记录
sql = "select sum(a.TXN_AT - a.TXN_FEE_AT)/100 from " \
"(select * from t_txn_log where host_date ='%s' and TXN_NUM ='801012') a " \
"left join (select * from t_txn_log where TXN_NUM='801011') b " \
"on a.txn_key = b.txn_key where a.host_date != b.host_date and a.ACCP_BRH_ID = '%s' and " \
"length(trim(a.ext_acct_id)) = 15" % (stlmDate, self.insIdCd)
print(sql)
cursor = dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.payPayTxnRtn = toNumberFmt(x[0])
cursor.close()
def getAcctInfo(self, db, dbacc, stlmDate):
self.__get_balance_amt(db, stlmDate)
self.__get_succ_txn(db, stlmDate)
self.__get_oth_txn(db, dbacc, stlmDate)
class AgentBalance:
def __init__(self, insIdCd, dbbat, dbacc, stlmDate):
self.insIdCd = insIdCd
self.dbbat = dbbat
self.dbacc = dbacc
self.stlmDate = stlmDate
self.agentInitAmt = 0.0
self.agentFinalAmt = 0.0
self.agentPay = 0.0
self.agentPayUnknownCount = 0
self.agentPayUnknownAmt = 0
self.agentPayUnknownRtn = 0.0
self.agentIncome = 0.0
self.agentDelayIncome = 0.0
self.companyInitAmt = 0.0
self.companyFinalAmt = 0.0
self.companyIncome = 0.0
self.companyPay = 0.0
self.companyDelayIncome = 0.0
self.agentAcctId = self.__get_agent_acct_id()
self.companyAcctId = self.__get_company_acct_id()
def __get_agent_acct_id(self):
sql = "select ACCT_ID from t_acct_map where ext_acct_id ='%sA' and EXT_ACCT_TYPE ='0000000B'" % self.insIdCd
cursor = self.dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
cursor.close()
if x is not None:
return x[0]
else:
return '0'
def __get_company_acct_id(self):
sql = "select ACCT_ID from t_acct_map where ext_acct_id ='%sB' and EXT_ACCT_TYPE ='0000000B'" % self.insIdCd
cursor = self.dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
cursor.close()
if x is not None:
return x[0]
else:
return '0'
def __get_agent_balance_amt(self):
sql = "select sum(INS_B_PREV_BAL_AT - INS_C_PREV_BAL_AT) " \
"from TBL_SAND_BALANCE_INF where host_date ='%s' and INS_ID_CD ='%s'" % (getLastDay(self.stlmDate), self.insIdCd)
cursor = self.dbbat.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.agentInitAmt = toNumberFmt(x[0]/100)
sql = "select sum(INS_B_PREV_BAL_AT - INS_C_PREV_BAL_AT) " \
"from TBL_SAND_BALANCE_INF where host_date ='%s' and INS_ID_CD ='%s'" % (self.stlmDate, self.insIdCd)
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.agentFinalAmt = toNumberFmt(x[0]/100)
sql = "select sum(INS_B_PREV_BAL_AT - INS_B_PREV_AVAIL_AT) from " \
"TBL_SAND_BALANCE_INF where host_date ='%s' and INS_ID_CD ='%s'" % (self.stlmDate, self.insIdCd)
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.agentFinalLockAmt = toNumberFmt(x[0] / 100)
else :
self.agentFinalLockAmt = 0
cursor.close()
def __get_company_balance_amt(self):
sql = "select sum(ACQ_PREV_BAL_AT) " \
"from TBL_SAND_BALANCE_INF where host_date ='%s' and INS_ID_CD ='%s'" % (getLastDay(self.stlmDate), self.insIdCd)
cursor = self.dbbat.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.companyInitAmt = toNumberFmt(x[0]/100)
sql = "select sum(ACQ_PREV_BAL_AT) " \
"from TBL_SAND_BALANCE_INF where host_date ='%s' and INS_ID_CD ='%s'" % (self.stlmDate, self.insIdCd)
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.companyFinalAmt = toNumberFmt(x[0]/100)
cursor.close()
def __get_agent_income(self):
sql = "select sum(ALL_PROFITS) from " \
"TBL_INS_PROFITS_TXN_SUM where " \
"host_date <'%s' and INS_ID_CD ='%s' and to_char(REC_UPD_TS, 'YYYYMMDD') = '%s'" % \
(self.stlmDate, self.insIdCd, self.stlmDate)
print(sql)
cursor = self.dbbat.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.agentIncome = toNumberFmt(x[0])
cursor.close()
#计算交易日对应收入
sql = "select sum(ALL_PROFITS) from tbl_ins_profits_txn_sum where " \
"host_date <= '%s' and INS_ID_CD ='%s' and CHARGE_STA != '2'" % (self.stlmDate, self.insIdCd)
cursor = self.dbbat.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.agentDelayIncome = toNumberFmt(x[0])
cursor.close()
def __get_agent_pay(self):
sql = "select sum(TXN_AT/100) from t_txn_dtl " \
"where ACCEPT_DT ='%s' and acct_id ='%s' " \
"and ACCT_TYPE ='00000002' and INT_TXN_CD in ('01005','01033')" % \
(self.stlmDate, self.agentAcctId)
cursor = self.dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.agentPay = toNumberFmt(x[0])
sql = "select sum(TXN_AT/100) from t_txn_dtl " \
"where ACCEPT_DT ='%s' and acct_id ='%s' " \
"and ACCT_TYPE ='00000002' and INT_TXN_CD in ('01010','01034') and txn_part_cd like '%s%%'" % \
(self.stlmDate, self.agentAcctId, self.stlmDate[4:8])
cursor.execute(sql)
x = cursor.fetchone()
cursor.close()
if x is not None:
self.agentPay = self.agentPay - toNumberFmt(x[0])
def __get_agent_pay_unknown(self):
sql = "select count(*), sum(a.txn_amt) from tbl_err_chk_txn_dtl a " \
"left join tbl_mcht_inf b on a.CARD_ACCP_ID = b.mcht_cd " \
"left join tbl_acq_txn_log c on a.key_rsp = c.key_rsp " \
"where a.host_date ='%s' and a.chk_sta='4' and c.company_cd = '%s' " \
" and a.txn_num ='1801' and substr(c.ADDTNL_DATA,1,2) in ('04','05','06')" % (self.stlmDate, self.insIdCd)
print(sql)
cursor = self.dbbat.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.agentPayUnknownCount = toNumberFmt(x[0])
self.agentPayUnknownAmt = toNumberFmt(x[1])
cursor.close()
# 查找非当日代付退回记录
sql = "select sum(a.TXN_AT - a.TXN_FEE_AT)/100 from " \
"(select * from t_txn_log where host_date ='%s' and TXN_NUM in ('801010','801034')) a " \
"left join (select * from t_txn_log where TXN_NUM in ('801005','801033')) b " \
"on a.txn_key = b.txn_key where a.host_date != b.host_date and a.ACCP_BRH_ID = '%s' and " \
"a.acct_id = '%s'" % (self.stlmDate, self.insIdCd, self.agentAcctId)
print(sql)
cursor = self.dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.agentPayUnknownRtn = toNumberFmt(x[0])
cursor.close()
def __get_agent_lockamt(self):
#冻结
sql = "select sum(LOCK_AT)/100 from T_TXN_LOCK where " \
"host_date ='%s' and TXN_TYPE ='01' and acct_id ='%s'" % (self.stlmDate, self.agentAcctId)
self.agentLockAmt = 0
cursor = self.dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.agentLockAmt = toNumberFmt(x[0])
#解冻
sql = "select sum(LOCK_AT)/100 from T_TXN_LOCK where " \
"host_date ='%s' and TXN_TYPE ='02' and acct_id ='%s'" % (self.stlmDate, self.agentAcctId)
cursor.execute(sql)
x = cursor.fetchone()
if x[0] is not None:
self.agentLockAmt = toNumberFmt(self.agentLockAmt - x[0])
cursor.close()
def __get_company_income(self):
sql = "select sum(TXN_AT/100) from t_txn_dtl " \
"where ACCEPT_DT ='%s' and acct_id ='%s' " \
"and ACCT_TYPE ='00000002' and INT_TXN_CD='01004'" % \
(self.stlmDate, self.companyAcctId)
print(sql)
cursor = self.dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.companyIncome = toNumberFmt(x[0])
sql = "select nvl(sum(TXN_AT/100),0) from t_txn_dtl " \
"where ACCEPT_DT ='%s' and acct_id ='%s' " \
"and ACCT_TYPE ='00000002' and INT_TXN_CD='01003' and txn_part_cd not like '%%核销%%' " % \
(self.stlmDate, self.companyAcctId)
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.companyIncome = toNumberFmt(self.companyIncome - x[0])
cursor.close()
# 计算交易日对应收入
sql = "select sum(ALL_PROFITS) from TBL_SAND_ACQ_PROFITS where " \
"host_date = '%s' and INS_ID_CD ='%s'" % (self.stlmDate, self.insIdCd)
cursor = self.dbbat.cursor()
cursor.execute(sql)
x = cursor.fetchone()
if x is not None:
self.companyDelayIncome = toNumberFmt(x[0])
cursor.close()
def __get_company_pay(self):
sql = "select sum(TXN_AT/100) from t_txn_dtl " \
"where ACCEPT_DT ='%s' and acct_id ='%s' " \
"and ACCT_TYPE ='00000002' and CR_DB_CD='0' and txn_part_cd like '%%核销%%'" % \
(self.stlmDate, self.companyAcctId)
cursor = self.dbacc.cursor()
cursor.execute(sql)
x = cursor.fetchone()
cursor.close()
if x is not None:
self.companyPay = toNumberFmt(x[0])
def getAcctInfo(self):
self.__get_agent_balance_amt()
self.__get_agent_income()
self.__get_agent_pay()
self.__get_agent_lockamt()
self.__get_company_balance_amt()
self.__get_company_income()
self.__get_company_pay()
self.__get_agent_pay_unknown()
def insertDb(stlmDate, db, mchtBal, agentBal):
sql = "insert into TBL_RPT_INS_BALANCE_INF (host_date,ins_id_cd,mcht_init_at,txn_count,txn_amt,txn_cost,err_amt," \
"mcht_fee,mcht_stlm_amt,pay_txn_count,pay_txn_amt,pay_unknown_count,pay_unknown_amt,pay_txn_rtn_amt," \
"mcht_sys_diff_at,mcht_final_at,agent_init_at,agent_income,agent_err_amt,agent_pay,agent_pay_unknown_count," \
"agent_pay_unknown_amt,agent_pay_txn_rtn_amt,agent_delay_income,agent_lock_amt,agent_final_lock_amt," \
"agent_final_at,company_init_at,company_income,company_pay,company_delay_income,company_final_at) values (" \
":1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19," \
":20,:21,:22,:23,:24,:25,:26,:27,:28,:29,:30,:31,:32)"
cursor = db.cursor()
cursor.prepare(sql)
param = (stlmDate, mchtBal.insIdCd, mchtBal.initAmt, mchtBal.txnCount,mchtBal.txnAmt, mchtBal.txnCost,
mchtBal.errAmt, mchtBal.mchtFee,mchtBal.mchtStlmAmt, mchtBal.payTxnCount, mchtBal.payTxnAmt,
mchtBal.payUnknownCount, mchtBal.payUnknownAmt, mchtBal.payPayTxnRtn, 0, mchtBal.finalAmt,
agentBal.agentInitAmt, agentBal.agentIncome, 0, agentBal.agentPay,
agentBal.agentPayUnknownCount, agentBal.agentPayUnknownAmt, agentBal.agentPayUnknownRtn,
agentBal.agentDelayIncome, agentBal.agentLockAmt, agentBal.agentFinalLockAmt,
agentBal.agentFinalAmt,
agentBal.companyInitAmt, agentBal.companyIncome, agentBal.companyPay,
agentBal.companyDelayIncome, agentBal.companyFinalAmt)
cursor.execute(None, param)
cursor.close()
def genRptFunc(stlmDate, db, ws, mchtBal, agentBal):
i = 1
ws.cell(row=i, column=8).value = '间联系统余额报表'
i = i + 1
# 报表头
ws.cell(row=i, column=1).value = '交易日期'
ws.cell(row=i, column=2).value = '商户期初余额'
ws.cell(row=i, column=3).value = '交易笔数'
ws.cell(row=i, column=4).value = '交易金额'
ws.cell(row=i, column=5).value = '总成本'
ws.cell(row=i, column=6).value = '差错费'
ws.cell(row=i, column=7).value = '手续费'
ws.cell(row=i, column=8).value = '商户应出账'
ws.cell(row=i, column=9).value = '代付笔数'
ws.cell(row=i, column=10).value = '代付金额'
ws.cell(row=i, column=11).value = '代付未知笔数'
ws.cell(row=i, column=12).value = '代付未知金额'
ws.cell(row=i, column=13).value = '未知代付退回金额'
ws.cell(row=i, column=14).value = '商户期末余额'
ws.cell(row=i, column=15).value = '机构合作商期初余额'
ws.cell(row=i, column=16).value = '机构合作商收入'
ws.cell(row=i, column=17).value = '机构合作商差错费'
ws.cell(row=i, column=18).value = '机构合作商划款'
ws.cell(row=i, column=19).value = '机构合作商划款未知笔数'
ws.cell(row=i, column=20).value = '机构合作商划款未知金额'
ws.cell(row=i, column=21).value = '机构合作商划款未知金额退回'
ws.cell(row=i, column=22).value = '机构合作商待入账收入'
ws.cell(row=i, column=23).value = '机构合作商冻结解冻金额'
ws.cell(row=i, column=24).value = '机构合作商冻结总额'
ws.cell(row=i, column=25).value = '机构合作商期末余额'
ws.cell(row=i, column=26).value = '杉德收入期初余额'
ws.cell(row=i, column=27).value = '杉德收入'
ws.cell(row=i, column=28).value = '杉德收入划款'
ws.cell(row=i, column=29).value = '杉德待入账收入'
ws.cell(row=i, column=30).value = '杉德收入未结余额'
#值
i = i + 1
ws.cell(row=i, column=1).value = stlmDate
ws.cell(row=i, column=2).value = mchtBal.initAmt
ws.cell(row=i, column=3).value = mchtBal.txnCount
ws.cell(row=i, column=4).value = mchtBal.txnAmt
ws.cell(row=i, column=5).value = mchtBal.txnCost
ws.cell(row=i, column=6).value = mchtBal.errAmt
ws.cell(row=i, column=7).value = mchtBal.mchtFee
ws.cell(row=i, column=8).value = mchtBal.mchtStlmAmt
ws.cell(row=i, column=9).value = mchtBal.payTxnCount
ws.cell(row=i, column=10).value = mchtBal.payTxnAmt
ws.cell(row=i, column=11).value = mchtBal.payUnknownCount
ws.cell(row=i, column=12).value = mchtBal.payUnknownAmt
ws.cell(row=i, column=13).value = mchtBal.payPayTxnRtn
ws.cell(row=i, column=14).value = mchtBal.finalAmt
ws.cell(row=i, column=15).value = agentBal.agentInitAmt
ws.cell(row=i, column=16).value = agentBal.agentIncome
ws.cell(row=i, column=17).value = 0
ws.cell(row=i, column=18).value = agentBal.agentPay
ws.cell(row=i, column=19).value = agentBal.agentPayUnknownCount
ws.cell(row=i, column=20).value = agentBal.agentPayUnknownAmt
ws.cell(row=i, column=21).value = agentBal.agentPayUnknownRtn
ws.cell(row=i, column=22).value = agentBal.agentDelayIncome
ws.cell(row=i, column=23).value = agentBal.agentLockAmt
ws.cell(row=i, column=24).value = agentBal.agentFinalLockAmt
ws.cell(row=i, column=25).value = agentBal.agentFinalAmt
ws.cell(row=i, column=26).value = agentBal.companyInitAmt
ws.cell(row=i, column=27).value = agentBal.companyIncome
ws.cell(row=i, column=28).value = agentBal.companyPay
ws.cell(row=i, column=29).value = agentBal.companyDelayIncome
ws.cell(row=i, column=30).value = agentBal.companyFinalAmt
insertDb(stlmDate, db, mchtBal, agentBal)
def main():
# 数据库连接配置
dbbat = cx_Oracle.connect('%s/%s@%s' % (os.environ['DBUSER'], os.environ['DBPWD'], os.environ['TNSNAME']),encoding='gb18030')
dbacc = cx_Oracle.connect('%s/%s@%s' % (os.environ['ACCDBUSER'], os.environ['ACCDBPWD'], os.environ['TNSNAME']),
encoding='gb18030')
# 获取清算日
if len(sys.argv) == 1:
cursor = dbbat.cursor()
sql = "select BF_STLM_DATE from TBL_BAT_CUT_CTL"
cursor.execute(sql)
x = cursor.fetchone()
stlm_date = x[0]
cursor.close()
else:
stlm_date = sys.argv[1]
print('hostDate %s genRptAcqBalance begin' % stlm_date)
filePath = '%s/%s/' % (os.environ['RPT7HOME'], stlm_date)
#查找机构
sql = "select trim(INS_ID_CD) from TBL_INS_INF where INS_TP ='01'"
cursor = dbbat.cursor()
cursor.execute(sql)
for ltData in cursor:
if ltData[0] is not None:
#查看信息
insIdCd = ltData[0]
mchtBal = MchtBalance(insIdCd)
mchtBal.getAcctInfo(dbbat, dbacc, stlm_date)
agentBal = AgentBalance(insIdCd, dbbat, dbacc, stlm_date)
agentBal.getAcctInfo()
filename = filePath + 'AcqBalanceInf_%s_%s.xlsx' % (insIdCd,stlm_date)
wb = Workbook()
ws = wb.active
genRptFunc(stlm_date, dbbat, ws, mchtBal, agentBal)
wb.save(filename)
wb.close()
cursor.close()
dbbat.commit()
if __name__ == '__main__':
main()
| [
"1326469982@qq.com"
] | 1326469982@qq.com |
a8dc75408e3f6cd7d2d10f760352fdae307c9a2a | 0d1af99b330b3b09cca5535dd396875753048f4f | /tests/test_compose.py | 8cf29fe38423a68ad5b01fd82214ee4be29e0baf | [
"Apache-2.0"
] | permissive | owkin/MONAI | 9ef51e41c776f68e7b7dc87e17759b18e4a043b3 | a1c50211c916ee8d7c18d72f4e8a0721b403418a | refs/heads/master | 2022-12-29T16:16:36.305402 | 2020-09-29T09:58:01 | 2020-09-29T09:58:01 | 254,631,434 | 1 | 0 | Apache-2.0 | 2020-04-15T08:03:45 | 2020-04-10T12:40:13 | Python | UTF-8 | Python | false | false | 5,250 | py | # Copyright 2020 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from monai.data import DataLoader, Dataset
from monai.transforms import AddChannel, Compose, Randomizable
from monai.utils import set_determinism
class _RandXform(Randomizable):
def randomize(self):
self.val = self.R.random_sample()
def __call__(self, __unused):
self.randomize()
return self.val
class TestCompose(unittest.TestCase):
def test_empty_compose(self):
c = Compose()
i = 1
self.assertEqual(c(i), 1)
def test_non_dict_compose(self):
def a(i):
return i + "a"
def b(i):
return i + "b"
c = Compose([a, b, a, b])
self.assertEqual(c(""), "abab")
def test_dict_compose(self):
def a(d):
d = dict(d)
d["a"] += 1
return d
def b(d):
d = dict(d)
d["b"] += 1
return d
c = Compose([a, b, a, b, a])
self.assertDictEqual(c({"a": 0, "b": 0}), {"a": 3, "b": 2})
def test_list_dict_compose(self):
def a(d): # transform to handle dict data
d = dict(d)
d["a"] += 1
return d
def b(d): # transform to generate a batch list of data
d = dict(d)
d["b"] += 1
d = [d] * 5
return d
def c(d): # transform to handle dict data
d = dict(d)
d["c"] += 1
return d
transforms = Compose([a, a, b, c, c])
value = transforms({"a": 0, "b": 0, "c": 0})
for item in value:
self.assertDictEqual(item, {"a": 2, "b": 1, "c": 2})
def test_random_compose(self):
class _Acc(Randomizable):
self.rand = 0.0
def randomize(self, data=None):
self.rand = self.R.rand()
def __call__(self, data):
self.randomize()
return self.rand + data
c = Compose([_Acc(), _Acc()])
self.assertNotAlmostEqual(c(0), c(0))
c.set_random_state(123)
self.assertAlmostEqual(c(1), 1.61381597)
c.set_random_state(223)
c.randomize()
self.assertAlmostEqual(c(1), 1.90734751)
def test_randomize_warn(self):
class _RandomClass(Randomizable):
def randomize(self, foo1, foo2):
pass
c = Compose([_RandomClass(), _RandomClass()])
with self.assertWarns(Warning):
c.randomize()
def test_err_msg(self):
transforms = Compose([abs, AddChannel(), round])
with self.assertRaisesRegex(Exception, "AddChannel"):
transforms(42.1)
def test_data_loader(self):
xform_1 = Compose([_RandXform()])
train_ds = Dataset([1], transform=xform_1)
xform_1.set_random_state(123)
out_1 = train_ds[0]
self.assertAlmostEqual(out_1, 0.2045649)
xform_1.set_random_state(123)
train_loader = DataLoader(train_ds, num_workers=0)
out_1 = next(iter(train_loader))
self.assertAlmostEqual(out_1.cpu().item(), 0.2045649)
if sys.platform != "win32": # skip multi-worker tests on win32
set_determinism(seed=123)
train_loader = DataLoader(train_ds, num_workers=1)
out_1 = next(iter(train_loader))
self.assertAlmostEqual(out_1.cpu().item(), 0.0409280)
set_determinism(seed=123)
train_loader = DataLoader(train_ds, num_workers=2)
out_1 = next(iter(train_loader))
self.assertAlmostEqual(out_1.cpu().item(), 0.0409280)
set_determinism(None)
def test_data_loader_2(self):
xform_2 = Compose([_RandXform(), _RandXform()])
train_ds = Dataset([1], transform=xform_2)
xform_2.set_random_state(123)
out_2 = train_ds[0]
self.assertAlmostEqual(out_2, 0.4092510)
xform_2.set_random_state(123)
train_loader = DataLoader(train_ds, num_workers=0)
out_2 = next(iter(train_loader))
self.assertAlmostEqual(out_2.cpu().item(), 0.4092510)
if sys.platform != "win32": # skip multi-worker tests on win32
set_determinism(seed=123)
train_loader = DataLoader(train_ds, num_workers=1)
out_2 = next(iter(train_loader))
self.assertAlmostEqual(out_2.cpu().item(), 0.9892192)
set_determinism(seed=123)
train_loader = DataLoader(train_ds, num_workers=2)
out_1 = next(iter(train_loader))
self.assertAlmostEqual(out_1.cpu().item(), 0.9892192)
set_determinism(None)
if __name__ == "__main__":
unittest.main()
| [
"noreply@github.com"
] | noreply@github.com |
4f11e5a9b9a6bb26ab4f1092072e1ee5fbba1ab3 | 6407cca09a68be7ff28ad8ce31fa43af5d099eb3 | /oldbkp_mao_data/TrafficModels.py | ed28f706a7aa1debf781fa3b45465c1b133c7a7a | [] | no_license | hannnni/lkw_algo | 34936fdda7b6e10112845358ea03a6a17ecf1389 | f6cbd5a14cafe0203097499fd3b836a8cf326c42 | refs/heads/master | 2021-01-18T14:11:10.944867 | 2014-11-11T22:21:58 | 2014-11-11T22:21:58 | 26,492,296 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 39,002 | py |
import os
import string
import random
import csv
import numpy# as np
import scipy
import pygame
import math
global NetworkFileName
#, signalPlanFileName
#filename = 'networkTest.csv'
NetworkFileName = 'NetworkTestFeld.csv'
#signalPlanFileName = 'temporarySp.csv'
"""
Should predefine link Number or should be read from the excle sheet?
"""
global NumberOfLinks
NumberOfLinks = 158#or can be readed from excel tables????, wenn constant , then need to be checked!!!
global simulationCycle, cycleTime, simulationTime
simulationCycle = 3
cycleTime = 70
simulationTime = simulationCycle*cycleTime
#have to be the same as in test.py, or should all passed from there....
class linkInfor:
def __init__(self):
self.linkID = []
self.linkLength = []
self.linkCapacity = []
self.isTurning = []
self.isInLink = []
self.isOutLink = []
self.isSignalized = []
self.mergeFrom = []
self.divergeTo = []
self.extraDivergeTo = []
#just in case that all the variables dont get the initial empty value!!!!
def init_Link(self):
self.linkID = []
self.linkLength = []
self.linkCapacity = []
self.isTurning = []
self.isInLink = []
self.isOutLink = []
self.isSignalized = []
self.mergeFrom = []
self.divergeTo = []
self.extraDivergeTo = []
class CellTransmissionModel:
def __init__(self):
self.linkIDs = []
self.linkCapacitys = []
self.linkMaxFlows = []
self.currentStatuses = []
self.previousStatuses = []
self.currentFlows = []
self.previousFlows = []
#maybe not need if a list of sublist is used
#self.linkIndexForStatus = []
#self.linkIndexForFlow = []
#ID Index:
self.mergeFromIds = []
self.divergeToIds = []
self.extraDivergeToIds =[]
self.mergeFromIdsList = []
self.divergeToIdsList = []
self.extraDivergeToIdsList =[]
#IX Index:
self.mergeFromIxs = []
self.divergeToIxs = []
self.extraDivergeToIxs =[]
self.mergeFromIxsList = []
self.divergeToIxsList = []
self.extraDivergeToIxsList =[]
self.isTurning = []
self.isInLink = []
self.isOutLink = []
self.isSignalized = []
self.turningRatiosDiverge = []
self.turningRatiosExtraDiverge = []
self.signalPlans = []
self.networkLinks =[]
# variable for evaluation:
self.waitingTime = 0
self.overallWaitingTime = 0
def readNetwork(self):
with open(NetworkFileName, 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter=' ', quotechar='|')
self.networkLinks = range(NumberOfLinks)
for Ix in range(NumberOfLinks):
self.networkLinks[Ix] = linkInfor()
i = 0
for row in spamreader:
rowlist = row[0].split(';')
self.networkLinks[i].init_Link()
self.networkLinks[i].linkID = int(rowlist[0])
#print self.networkLinks[i].linkID
self.networkLinks[i].linkLength = int(rowlist[1])
#print self.networkLinks[i].linkLength
self.networkLinks[i].linkCapacity = int(rowlist[2])
#print 'No.of Link ' ,i
#print self.networkLinks[i].linkCapacity
self.networkLinks[i].isTurning = int(rowlist[3])
#print self.networkLinks[i].isTurning
self.networkLinks[i].isInLink = int(rowlist[4])
#print self.networkLinks[i].isInLink
self.networkLinks[i].isOutLink = int(rowlist[5])
#print self.networkLinks[i].isOutLink
self.networkLinks[i].isSignalized = int(rowlist[6])
#print self.networkLinks[i].isSignalized
mergeFromList = rowlist[7].split('|')
#print mergeFromList
self.networkLinks[i].mergeFrom = []
#print len(mergeFromList)
if mergeFromList[0]!= '':
for mergeLink in mergeFromList:
self.networkLinks[i].mergeFrom.append(int(mergeLink))
#print self.networkLinks[i].mergeFrom
divergeToList = rowlist[8].split('|')
self.networkLinks[i].divergeTo=[]
if divergeToList[0]!= '':
for divergeLink in divergeToList:
self.networkLinks[i].divergeTo.append(int(divergeLink))
#print 'self.networkLinks[i].divergeTo',self.networkLinks[i].divergeTo
extraDivergeToList = rowlist[9].split('|')
self.networkLinks[i].extraDivergeTo = []
if extraDivergeToList[0]!= '':
for exstraDivergeLink in extraDivergeToList:
self.networkLinks[i].extraDivergeTo.append(int(exstraDivergeLink))
#print self.networkLinks[i].extraDivergeTo
#
i = i+1
#print 'the No. of the link ',i
#return self.networkLinks # x, y = return(a, b), return more than 1 values
# temparary fix time plan, should be get this from GA
def readSignalPlans(self):
with open(signalPlanFileName, 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter=' ', quotechar='|')
self.signalPlans = [0]*len(self.extraDivergeToIds)
for l in range(len(self.signalPlans)):
self.signalPlans[l] = []
i = 0
for row in spamreader:
rowlist = row[0].split(';')
for j in range(len(rowlist)):
self.signalPlans[j].append(int(rowlist[j]))
i=i+1
#print 'self.signalPlans', self.signalPlans
def buildCTM(self):
networkForCTM = self.networkLinks
#i = 0
for networkLink in networkForCTM:
self.linkIDs.append(networkLink.linkID)
#print i
#i = i +1
#print networkLink.linkCapacity
self.linkCapacitys.append((networkLink.linkCapacity)*1.6)
self.linkMaxFlows.append((networkLink.linkCapacity)*0.5)
self.isTurning.append(networkLink.isTurning)
self.isInLink.append(networkLink.isInLink)
self.isOutLink.append(networkLink.isOutLink)
self.isSignalized.append(networkLink.isSignalized)
if networkLink.linkID >= 500:
travelSpeed = 5 #speed at turning links
else:
travelSpeed = 12.5 # speed at normal links
travelTime = round(networkLink.linkLength/travelSpeed)
cellNumber = int(travelTime + 2) #include 2 vitual cells
#self.linkIndexForStatus.append(overallLengthStatus)
#overallLengthStatus = cellNumber + overallLengthStatus
currentLinkStatus = []
for cell in range(0,cellNumber):
currentLinkStatus.append(0.0)
self.currentStatuses.append(currentLinkStatus)
#self.linkIndexForFlow.append(overallLengthFlow)
#overallLengthFlow = cellNumber + overallLengthFlow - 1
currentLinkFlow = []
for FlowNumber in range(0,cellNumber-1):
currentLinkFlow.append(0.0)
self.currentFlows.append(currentLinkFlow)
#need Ixs instead of IDs
if len(networkLink.mergeFrom) != 0:
self.mergeFromIds.append(networkLink.linkID)
self.mergeFromIdsList.append(networkLink.mergeFrom)
if len(networkLink.divergeTo) != 0:
self.divergeToIds.append(networkLink.linkID)
self.divergeToIdsList.append(networkLink.divergeTo)
#temporary calculation of turning ratia, should be readed from the file
turningRatioDiverge = []
NumberOfDivergeLinks = len(networkLink.divergeTo)
turningRatioDiverge = [1/float(NumberOfDivergeLinks) for linksnumber in range(NumberOfDivergeLinks)]
self.turningRatiosDiverge.append(turningRatioDiverge)
if len(networkLink.extraDivergeTo) != 0:
self.extraDivergeToIds.append(networkLink.linkID)
self.extraDivergeToIdsList.append(networkLink.extraDivergeTo)
#temporary calculation of turning ratia, should be readed from the file
turningRatioExtraDiverge = []
NumberOfExtraDivergeLinks = len(networkLink.extraDivergeTo)
turningRatioExtraDiverge = [1/float(NumberOfExtraDivergeLinks) for linksnumber in range(NumberOfExtraDivergeLinks)]
self.turningRatiosExtraDiverge.append(turningRatioExtraDiverge)
self.previousStatuses = self.currentStatuses
self.previousFlows = self.currentFlows
#convert link id to link ix in oder to find links easily!!!!
for ml in range(len(self.mergeFromIds)):
linkIx = self.linkIDs.index(self.mergeFromIds[ml])
self.mergeFromIxs.append(linkIx)
MIdsTemp = self.mergeFromIdsList[ml]
MIxTemp = [0]*len(self.mergeFromIdsList[ml])
for mln in range(len(MIxTemp)):
MIxTemp[mln] = self.linkIDs.index(MIdsTemp[mln])
self.mergeFromIxsList.append(MIxTemp)
for dl in range(len(self.divergeToIds)):
linkIx = self.linkIDs.index(self.divergeToIds[dl])
self.divergeToIxs.append(linkIx)
DIdsTemp = self.divergeToIdsList[dl]
DIxTemp = [0]*len(self.divergeToIdsList[dl])
for dln in range(len(DIxTemp)):
DIxTemp[dln] = self.linkIDs.index(DIdsTemp[dln])
self.divergeToIxsList.append(DIxTemp)
#print 'self.divergeToIxsList:', self.divergeToIxsList
for el in range(len(self.extraDivergeToIds)):
linkIx = self.linkIDs.index(self.extraDivergeToIds[el])
self.extraDivergeToIxs.append(linkIx)
EIdsTemp = self.extraDivergeToIdsList[el]
EIxTemp = [0]*len(self.extraDivergeToIdsList[el])
for eln in range(len(EIxTemp)):
EIxTemp[eln] = self.linkIDs.index(EIdsTemp[eln])
self.extraDivergeToIxsList.append(EIxTemp)
def initFlow(self,vehcleInputs):
j = 0
#print 'vehcleInputs',vehcleInputs
for i in range(len(self.isInLink)):
if self.isInLink[i] == 1:
self.previousStatuses[i][1] = round(float(vehcleInputs[j])/float(cycleTime),3)
#print 'self.previousStatuses[1][1] during',self.previousStatuses[1][1]
j = j+1
#print 'vehcleInputs[1]',vehcleInputs[1]
#print 'initFlow self.previousStatuses[5]',self.previousStatuses[5]
# assume evrery inlink get 0.1 flow every second
#need to be assigned according to the detector data for all the inlinks
def initFlow_0(self,vehcleInputs):
j = 0
for i in range(len(self.isInLink)):
if self.isInLink[i] == 1:
#print 'vehcleInputs[j]',vehcleInputs[j]
for k in range(1,len(self.previousStatuses[i])-1):
self.previousStatuses[i][k] = round(float(vehcleInputs[j])/float(cycleTime),3)
#print 'self.currentStatuses[i][1]',self.currentStatuses[i][1]
j = j+1
# assume evrery inlink get 0.1 flow every second
#need to be assigned according to the detector data for all the inlinks
def flowModel(self):
#new empty list for currenflow and status, think about it, which model will be calculated at the very beginning , initialize should be there
currentStatuses = [0]*len(self.currentStatuses)
currentFlows = [0]*len(self.currentFlows)
for nl in range(len(self.currentStatuses)):
currentStatuses[nl] = [0.0]*len(self.currentStatuses[nl])
currentFlows[nl] = [0.0]*len(self.currentFlows[nl])
previousStatuses = self.previousStatuses[:]
previousFlows = self.previousFlows[:]
self.previousFlows = self.currentFlows[:]
self.previousStatuses = self.currentStatuses[:]
linkCapacitys = self.linkCapacitys
linkMaxFlows = self.linkMaxFlows
#check the linkIndexForStatus ?= linkIndexForFlow
for l in range(len(self.currentStatuses)):
capacity = linkCapacitys[l]
maxFlow = linkMaxFlows[l]
previousStatus = previousStatuses[l]
previousFlow = previousFlows[l]
cellNumber = len(currentStatuses[l])
#calculate flows
for c in range(cellNumber-1):
if previousStatus[c]>maxFlow:
sigma = 0.45 # proprotion between backwavespeed and freeflow speed
else:
sigma = 1.0
currentFlows[l][c] = \
round(min(previousStatus[c], maxFlow, \
sigma*(capacity-previousStatus[c+1])),3)
#update
for c2 in range(1,cellNumber-1):
currentStatuses[l][c2] = \
round(previousStatus[c2] + \
currentFlows[l][c2-1] - currentFlows[l][c2],3)
self.currentStatuses = currentStatuses
self.currentFlows = currentFlows
def waitingTimeEvaluation(self):
currentFlows = self.currentFlows
priviousStatuses = self.previousStatuses
self.waitingTime = 0
for i in range(len(currentFlows)):
currentFlow = currentFlows[i]
priviousStatus = priviousStatuses[i]
for j in range(len(currentFlow)):
self.waitingTime = self.waitingTime + abs(priviousStatus[j]-currentFlow[j])
self.overallWaitingTime = self.overallWaitingTime + self.waitingTime
#print 'Waiting time:',self.waitingTime
def mergeModel(self):
#currentStatuses = self.currentStatuses
#currentFlow = self.currentFlow
previousStatuses = self.previousStatuses
#previousFlow = self.previousFlow
linkCapacitys = self.linkCapacitys
linkMaxFlows = self.linkMaxFlows
#linkIndexForStatus = self.linkIndexForStatus
#linkIndexForFlow = self.linkIndexForFlow
linkIDs = self.linkIDs
mergeFromIxs = self.mergeFromIxs
#print 'merge from links: ',mergeFromIxs
mergeFromIxsList = self.mergeFromIxsList
#print 'merge from linkslist: ',mergeFromIxsList
for ml in range(len(mergeFromIxs)):
mergeToLinkIx = mergeFromIxs[ml]
CapacityForMergeTo = linkCapacitys[mergeToLinkIx]
toBeDistributedLinks = mergeFromIxsList[ml][:]
receivingCapacity = CapacityForMergeTo - previousStatuses[mergeToLinkIx][1]
receivingMaxflow = linkMaxFlows[mergeToLinkIx]
restOfReceivingCapacity = max(receivingCapacity,receivingMaxflow)
for step in range(len(mergeFromIxsList)-1):
#Step 1 : calculation of weighting factors for tobedistributed links
weightFactors = []
totalCapacity = 0.0 #float take care 0.0, not 0
for tbl in range(len(toBeDistributedLinks)):
#print 'toBeDistributedLinks is:'
#print toBeDistributedLinks
toBeDistributedLinkIx = toBeDistributedLinks[tbl]
totalCapacity = totalCapacity + linkCapacitys[toBeDistributedLinkIx]
for tbl in range(len(toBeDistributedLinks)):
toBeDistributedLinkIx = toBeDistributedLinks[tbl]
weightFactors.append(linkCapacitys[toBeDistributedLinkIx]/totalCapacity)
#print 'weightfactors: ', weightFactors
#step 2 actually previousToBeDistributedLinks and toBeDistributedLinks are the same, one changes another also!!!!!
#have to use [:], then only get the value of it
previousNoLinks = len(toBeDistributedLinks)
toStayLinks = []
for tbl in range(previousNoLinks):
#print 'tbl', tbl
#print 'length of toBeDistributedLinks', toBeDistributedLinks
toBeDistributedLinkIx = toBeDistributedLinks[tbl]
toBeDistributedVolume = restOfReceivingCapacity * weightFactors[tbl]
#get value from the last second cell
toBesendVolume = previousStatuses[toBeDistributedLinkIx][-2]
#print 'to be send value: ',toBesendVolume
if toBeDistributedVolume >= toBesendVolume:
previousStatuses[toBeDistributedLinkIx][-1] = \
linkCapacitys[toBeDistributedLinkIx] - toBesendVolume
restOfReceivingCapacity = restOfReceivingCapacity - toBesendVolume
#del toBeDistributedLinks[tbl]
#toBedelLinks.append(toBeDistributedLinks[tbl])
#currentNoLinks = len(toBeDistributedLinks)
#print 'build virual cell for merge from links:', currentStatuses[toBeDistributedLinkIx][-1]
else:
toStayLinks.append(toBeDistributedLinks[tbl])
toBeDistributedLinks = toStayLinks
currentNoLinks = len(toBeDistributedLinks)
if currentNoLinks == previousNoLinks:
for tbl in range(len(toBeDistributedLinks)):
toBeDistributedLinkIx = toBeDistributedLinks[tbl]
toBeDistributedVolume = restOfReceivingCapacity * weightFactors[tbl]
previousStatuses[toBeDistributedLinkIx][-1] = \
linkCapacitys[toBeDistributedLinkIx] -toBeDistributedVolume
previousStatuses[mergeToLinkIx][0] = CapacityForMergeTo - previousStatuses[mergeToLinkIx][1]
break
#step 3
#current, previous........take care
#decide the receiving value,
if toBeDistributedLinks ==[]:
for tbl in range(len(mergeFromIxsList[ml])):# this is already changed.....!!!!!
toBeDistributedLinkIx = mergeFromIxsList[ml][tbl]
previousStatuses[mergeToLinkIx][0] = previousStatuses[mergeToLinkIx][0] + \
previousStatuses[toBeDistributedLinkIx][-2]
#print 'Merge linkIx:', mergeFromIxs[ml]
#print 'currentStatuses[ml][0] a ', currentStatuses[mergeToLinkIx][0]
break
else:
previousStatuses[mergeToLinkIx][0] = min(CapacityForMergeTo - previousStatuses[mergeToLinkIx][1], receivingMaxflow)
#print 'currentStatuses[ml][0] b ', currentStatuses[mergeToLinkIx][0]
self.previousStatuses = previousStatuses
#print 'Mergemodel ctm pre sta 4',self.previousStatuses[4]
def divergeModel(self):
previousStatuses = self.previousStatuses
linkMaxFlows = self.linkMaxFlows
linkCapacitys = self.linkCapacitys# still Number of Lanes
linkIDs = self.linkIDs
divergeToIxs = self.divergeToIxs
divergeToIxsList = self.divergeToIxsList
turningRatiosList = self.turningRatiosDiverge
for dl in range(len(divergeToIxs)):
divergeFromLinkIx = divergeToIxs[dl]
CapacityOfDivergeFrom = linkCapacitys[divergeFromLinkIx]
MaximalFlowofDivergeFrom = linkMaxFlows[divergeFromLinkIx]
turningRatios = turningRatiosList[dl]
toBeDistributedLinks = divergeToIxsList[dl]
restrictedSendingByOutgoings = MaximalFlowofDivergeFrom
#define the sending ability,
for ogl in range(len(toBeDistributedLinks)):
divergeToLinkIx = toBeDistributedLinks[ogl]
CapacityOfDivergeTo = linkCapacitys[divergeToLinkIx]
MaximalFlowofDivergeTo = linkMaxFlows[divergeToLinkIx]
restrictedSendingByOutgoings = restrictedSendingByOutgoings - \
max(MaximalFlowofDivergeTo - \
(CapacityOfDivergeTo - previousStatuses[divergeToLinkIx][1])/turningRatios[ogl], 0)
#build the virtual cell for incoming / diverge from links
overallsendingAbility = min(previousStatuses[divergeFromLinkIx][-2], max(restrictedSendingByOutgoings,0))
previousStatuses[divergeFromLinkIx][-1] = CapacityOfDivergeFrom - overallsendingAbility
#build the virtual cell for outgoing / diverge to links
for ogl in range(len(toBeDistributedLinks)):
divergeToLinkIx = toBeDistributedLinks[ogl]
CapacityOfDivergeTo = linkCapacitys[divergeToLinkIx]
MaximalFlowofDivergeTo = linkMaxFlows[divergeToLinkIx]
previousStatuses[divergeToLinkIx][0] = \
min(turningRatios[ogl]*overallsendingAbility,\
MaximalFlowofDivergeTo, \
CapacityOfDivergeTo - previousStatuses[divergeToLinkIx][1])
self.previousStatuses = previousStatuses
def ExtraDivergeModel(self, timeStep):
previousStatuses = self.previousStatuses
linkMaxFlows = self.linkMaxFlows
linkCapacitys = self.linkCapacitys# still Number of Lanes
extraDivergeToIxs = self.extraDivergeToIxs
extraDivergeToIxsList = self.extraDivergeToIxsList
turningRatiosList = self.turningRatiosExtraDiverge
signalPlans = self.signalPlans
timeStepInCycle = timeStep
for dl in range(len(extraDivergeToIxs)):
divergeFromLinkIx = extraDivergeToIxs[dl]
CapacityOfDivergeFrom = linkCapacitys[divergeFromLinkIx]
MaximalFlowofDivergeFrom = linkMaxFlows[divergeFromLinkIx]
turningRatios = turningRatiosList[dl]
toBeDistributedLinks = extraDivergeToIxsList[dl]
restrictedSendingByOutgoings = MaximalFlowofDivergeFrom
if signalPlans[dl][timeStepInCycle] == 0:
previousStatuses[divergeFromLinkIx][-1] = CapacityOfDivergeFrom
for ogl in range(len(toBeDistributedLinks)):
extraDivergeToLinkIx = toBeDistributedLinks[ogl]
previousStatuses[extraDivergeToLinkIx][0] = 0
else:
#define the sending ability,
for ogl in range(len(toBeDistributedLinks)):
extraDivergeToLinkIx = toBeDistributedLinks[ogl]
CapacityOfDivergeTo = linkCapacitys[extraDivergeToLinkIx]
MaximalFlowofDivergeTo = linkMaxFlows[extraDivergeToLinkIx]
restrictedSendingByOutgoings = restrictedSendingByOutgoings - \
max(MaximalFlowofDivergeTo - \
(CapacityOfDivergeTo - previousStatuses[extraDivergeToLinkIx][1])/turningRatios[ogl], 0)
#build the virtual cell for incoming / diverge from links
overallsendingAbility = min(previousStatuses[divergeFromLinkIx][-2], max(restrictedSendingByOutgoings,0))
previousStatuses[divergeFromLinkIx][-1] = CapacityOfDivergeFrom - overallsendingAbility
#build the virtual cell for outgoing / diverge to links
for ogl in range(len(toBeDistributedLinks)):
extraDivergeToLinkIx = toBeDistributedLinks[ogl]
CapacityOfDivergeTo = linkCapacitys[extraDivergeToLinkIx]
MaximalFlowofDivergeTo = linkMaxFlows[extraDivergeToLinkIx]
previousStatuses[extraDivergeToLinkIx][0] = \
min(turningRatios[ogl]*overallsendingAbility,\
MaximalFlowofDivergeTo, \
CapacityOfDivergeTo - previousStatuses[extraDivergeToLinkIx][1])
self.previousStatuses = previousStatuses
class CNSMixModel:
def __init__(self, ctm):
self.linkIDs = ctm.linkIDs
self.linkCapacitys = ctm.linkCapacitys
self.linkMaxFlows = ctm.linkMaxFlows
self.SpeedsList = []
#self.CTM_currentStatuses = ctm.currentStatuses
self.CTM_previousStatuses = ctm.previousStatuses
self.CTM_currentFlows = ctm.currentFlows
#self.currentStatuses = []
self.previousStatuses = []
#self.currentFlows = []
self.previousFlows = []
self.SpeedsList = [0]*len(self.CTM_previousStatuses)
self.currentStatuses = [0]*len(self.CTM_previousStatuses)
#print 'currentStatuses length', len(self.currentStatuses)
self.currentFlows = [0]*len(self.CTM_currentFlows)
for nl in range(len(self.currentStatuses)):
self.SpeedsList[nl] = [0]*len(self.CTM_previousStatuses[nl])
self.currentStatuses[nl] = [0]*len(self.CTM_previousStatuses[nl])
self.currentFlows[nl] = [0]*len(self.CTM_currentFlows[nl])
#ID Index:
self.mergeFromIds = ctm.mergeFromIds
self.divergeToIds = ctm.divergeToIds
self.extraDivergeToIds = ctm.extraDivergeToIds
self.mergeFromIdsList = ctm.mergeFromIdsList
self.divergeToIdsList = ctm.divergeToIdsList
self.extraDivergeToIdsList = ctm.extraDivergeToIdsList
#IX Index:
self.mergeFromIxs = ctm.mergeFromIxs
self.divergeToIxs = ctm.divergeToIxs
self.extraDivergeToIxs = ctm.extraDivergeToIxs
self.mergeFromIxsList = ctm.mergeFromIxsList
self.divergeToIxsList = ctm.divergeToIxsList
self.extraDivergeToIxsList = ctm.extraDivergeToIxsList
self.isTurning = ctm.isTurning
self.isInLink = ctm.isInLink
self.isOutLink = ctm.isOutLink
self.isSignalized = ctm.isSignalized
self.turningRatiosDiverge = ctm.turningRatiosDiverge
self.turningRatiosExtraDiverge = ctm.turningRatiosExtraDiverge
#variable for evaluation:
self.stops = 0
self.overallStops = 0
def SpeedDiriving(self,ctm,timeStep):
# to decide if HGV can move forward:
# and initialize the statues and flows
self.CTM_previousStatuses = ctm.previousStatuses
self.CTM_currentFlows = ctm.currentFlows
self.previousStatuses = self.currentStatuses[:]
self.previousFlows = self.currentFlows[:]
self.SpeedsList = [0]*len(self.CTM_previousStatuses)
self.currentStatuses = [0]*len(self.CTM_previousStatuses)
self.currentFlows = [0]*len(self.CTM_currentFlows)
for nl in range(len(self.currentStatuses)):
self.SpeedsList[nl] = [0]*len(self.CTM_previousStatuses[nl])
self.currentStatuses[nl] = [0]*len(self.CTM_previousStatuses[nl])
self.currentFlows[nl] = [0]*len(self.CTM_currentFlows[nl])
for nl in range(len(self.SpeedsList)):
CTM_currentFlow = self.CTM_currentFlows[nl]
CTM_previousStatus = self.CTM_previousStatuses[nl]
maxFlow = self.linkMaxFlows[nl]
for nc in range(1,len(self.SpeedsList[nl])):
if CTM_currentFlow[nc-1] >= CTM_previousStatus[nc-1]*0.95 or math.fabs(CTM_currentFlow[nc-1]- maxFlow)<0.05:
self.SpeedsList[nl][nc] = 1
else:
self.SpeedsList[nl][nc] = 0
extraDivergeToIxs = ctm.extraDivergeToIxs
#extraDivergeToIxsList = ctm.extraDivergeToIxsList
#print 'extraDivergeToIxs',extraDivergeToIxs
#print 'extraDivergeToIxsList',extraDivergeToIxsList
#turningRatiosList = self.turningRatiosExtraDiverge
signalPlans = ctm.signalPlans
timeStepInCycle = timeStep
for dl in range(len(extraDivergeToIxs)):
divergeFromLinkIx = extraDivergeToIxs[dl]
if signalPlans[dl][timeStepInCycle] == 0:
self.SpeedsList[divergeFromLinkIx][-1] = 0
else:
self.SpeedsList[divergeFromLinkIx][-1] = 1
def HGVPositioning(self,HGVInputs):
#print 'HGVPositioning'
# here just some random input for test
#randomNumber = random.randint(0,100)
#if randomNumber>49:
#--------------------------------------------------------------------------------------------------------------manually data supply!!!!
HGVInputLinkIxs = [3,5,9,14,15,17,22,25,29,35,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,\
58,59,60,61,62,63,64,65,66,67,72,74,81,86,88,92,96,99,101,103,104,106,111,113,119,124,125,137,143,145,146,151]#need to check...................................................
for i in range(len(HGVInputLinkIxs)):
if len(HGVInputs[i])!=0:
linkIx = HGVInputLinkIxs[i]
HGVPositions = [0]*len(HGVInputs[i])
#print 'linkIx',linkIx
#print 'len(self.previousStatuses)',len(self.previousStatuses)
cellNumber = len(self.previousStatuses[linkIx])
for j in range(len(HGVPositions)):
HGVPositions[j] = int(HGVInputs[i][j]*cellNumber)
self.previousStatuses[linkIx][HGVPositions[j]] = self.previousStatuses[linkIx][HGVPositions[j]] + 1
def flowModel(self):
#calculate flow
for nl in range(len(self.currentFlows)):
maxFlow = int(self.linkMaxFlows[nl]*2)
for nc in range(1,len(self.SpeedsList[nl])):
if self.SpeedsList[nl][nc] == 1:
self.currentFlows[nl][nc-1] = max(0,int(min(self.previousStatuses[nl][nc-1], maxFlow, maxFlow-self.previousStatuses[nl][nc])))
else:
self.currentFlows[nl][nc-1] = 0
for ncc in range(1,len(self.SpeedsList[nl])-1):
self.currentStatuses[nl][ncc] = max(0,self.previousStatuses[nl][ncc] + self.currentFlows[nl][ncc-1] - self.currentFlows[nl][ncc])
def stopsEvaluation(self):
self.stops = 0
for i in range(len(self.previousFlows)):
for j in range(1,len(self.previousFlows[i])):
self.stops = self.stops - min((self.currentFlows[i][j]-self.previousFlows[i][j-1]),0)
self.overallStops = self.stops + self.overallStops
def initVcell(self):
for nl in range(len(self.previousStatuses)):
self.previousStatuses[nl][0] = 0
self.previousStatuses[nl][-1] = 0
def mergeModel(self):
for nl in range(len(self.mergeFromIxs)):
mergeToLinkIx = self.mergeFromIxs[nl]
mergeFromLinksIxList = self.mergeFromIxsList[nl]
mergeVolume = 0
maxMergeVolume = 0
restVolume = 0
capacity_mtl = int(self.linkMaxFlows[mergeToLinkIx]*2)
for nn in range(len(mergeFromLinksIxList)):
mergeFromLinkIx = mergeFromLinksIxList[nn]
mergeVolume = mergeVolume + self.previousStatuses[mergeFromLinkIx][-2]
if self.SpeedsList[mergeToLinkIx][1] ==1:
if mergeVolume <= capacity_mtl - self.previousStatuses[mergeToLinkIx][1]:
self.previousStatuses[mergeToLinkIx][0] = mergeVolume
for nn in range(len(mergeFromLinksIxList)):
mergeFromLinkIx = mergeFromLinksIxList[nn]
capacity_mfl = int(self.linkMaxFlows[mergeFromLinkIx]*2)
self.previousStatuses[mergeFromLinkIx][-1] = max(0,capacity_mfl - self.previousStatuses[mergeFromLinkIx][-2])
else:
maxMergeVolume = capacity_mtl - self.previousStatuses[mergeToLinkIx][1]
restVolume = maxMergeVolume
receivedVolume = 0
itN = 0
while restVolume>=1:
for nn in range(len(mergeFromLinksIxList)):
mergeFromLinkIx = random.randint(0,len(mergeFromLinksIxList)-1)
capacity_mfl = int(self.linkMaxFlows[mergeFromLinkIx]*2)
if self.previousStatuses[mergeFromLinkIx][-1] < capacity_mfl:
self.previousStatuses[mergeFromLinkIx][-1] = self.previousStatuses[mergeFromLinkIx][-1] + 1
restVolume = restVolume -1
receivedVolume = restVolume +1
itN = itN+1
if itN>len(mergeFromLinksIxList):
break
self.previousStatuses[mergeToLinkIx][0] = receivedVolume
else:
self.previousStatuses[mergeToLinkIx][0] = 0
for nn in range(len(mergeFromLinksIxList)):
mergeFromLinkIx = mergeFromLinksIxList[nn]
capacity_mfl = int(self.linkMaxFlows[mergeFromLinkIx]*2)
self.previousStatuses[mergeFromLinkIx][-1] = capacity_mfl
def divergeModel(self):
for nl in range(len(self.divergeToIxs)):
divergeFromLinkIx = self.divergeToIxs[nl]
divergeTolinksIxList = self.divergeToIxsList[nl]
turningRatios = self.turningRatiosDiverge[nl]
allValue = 0.0
distributedVehicles = 0
for nn in range(len(turningRatios)):
allValue = allValue + turningRatios[nn]
spiltPoints = [0]*len(turningRatios)
splitPoint = 0.0
for nn in range(len(turningRatios)):
spiltPoints[nn] = splitPoint + turningRatios[nn]/allValue
splitPoint = splitPoint + turningRatios[nn]/allValue
#print 'split point', spiltPoints
NoOfVehicles = 0
NoOfVehicles = self.previousStatuses[divergeFromLinkIx][-2]
irN = 0
while NoOfVehicles>=1:
if irN >=len(turningRatios):
break
irN = irN+1
randomDecision = random.random()
for nn in range(len(turningRatios)):
if randomDecision < spiltPoints[nn]:
distributedLinkIx = divergeTolinksIxList[nn]
capacity = int(self.linkMaxFlows[distributedLinkIx]*2)
if self.previousStatuses[distributedLinkIx][0] < capacity:
self.previousStatuses[distributedLinkIx][0] =\
self.previousStatuses[distributedLinkIx][0] + 1
NoOfVehicles = NoOfVehicles -1
break
distributedVehicles = self.previousStatuses[divergeFromLinkIx][-2] - NoOfVehicles
self.previousStatuses[divergeFromLinkIx][-1] = int(self.linkMaxFlows[divergeFromLinkIx]*2) - distributedVehicles
def extraDivergemodel(self):
#the same as diverge
for nl in range(len(self.extraDivergeToIxs)):
divergeFromLinkIx = self.extraDivergeToIxs[nl]
divergeTolinksIxList = self.extraDivergeToIxsList[nl]
turningRatios = self.turningRatiosExtraDiverge[nl]
allValue = 0.0
distributedVehicles = 0
if self.SpeedsList[divergeFromLinkIx][-1] == 1:
for nn in range(len(turningRatios)):
allValue = allValue + turningRatios[nn]
spiltPoints = [0]*len(turningRatios)
splitPoint = 0.0
for nn in range(len(turningRatios)):
spiltPoints[nn] = splitPoint + turningRatios[nn]/allValue
splitPoint = splitPoint + turningRatios[nn]/allValue
NoOfVehicles = 0
NoOfVehicles = self.previousStatuses[divergeFromLinkIx][-2]
testno = 0
while NoOfVehicles>=1:
if testno >= len(turningRatios):
break
testno = testno +1
randomDecision = random.random()
for nn in range(len(turningRatios)):
if randomDecision < spiltPoints[nn]:
distributedLinkIx = divergeTolinksIxList[nn]
capacity = int(self.linkMaxFlows[distributedLinkIx]*2)
if self.previousStatuses[distributedLinkIx][0] < capacity:
self.previousStatuses[distributedLinkIx][0] =\
self.previousStatuses[distributedLinkIx][0] + 1
NoOfVehicles = NoOfVehicles -1
break
distributedVehicles = self.previousStatuses[divergeFromLinkIx][-2] - NoOfVehicles
self.previousStatuses[divergeFromLinkIx][-1] = int(self.linkMaxFlows[divergeFromLinkIx]*2) - distributedVehicles
else:
self.previousStatuses[divergeFromLinkIx][-1] = int(self.linkMaxFlows[divergeFromLinkIx]*2)
for nn in range(len(turningRatios)):
distributedLinkIx = divergeTolinksIxList[nn]
self.previousStatuses[distributedLinkIx][0] = 0
def pretest():
ctm = CellTransmissionModel()
ctm.readNetwork()
ctm.buildCTM()
return ctm
def maintest(sps,vehcleInputs,HGVInputs):
#os.system('cls')
ctm = CellTransmissionModel()
ctm.readNetwork()
ctm.buildCTM()
#ctm.readSignalPlans()#get signal plan from GA
ctm.signalPlans = sps
#print 'ctm.signalPlans', ctm.signalPlans
mcns = CNSMixModel(ctm)
#get HGV from Vissim
ctm.initFlow_0(vehcleInputs)
for timeStep in range(simulationTime):
ctm.initFlow(vehcleInputs) #get traffic demand from vissim
ctm.mergeModel()
ctm.divergeModel()
ctm.ExtraDivergeModel(timeStep)
ctm.flowModel()
ctm.waitingTimeEvaluation()
mcns.SpeedDiriving(ctm,timeStep)
if timeStep == 0:
mcns.HGVPositioning(HGVInputs)
mcns.initVcell()
mcns.mergeModel()
mcns.divergeModel()
mcns.extraDivergemodel()
mcns.flowModel()
mcns.stopsEvaluation()
#mcns.stopsEvaluation()
return ctm.overallWaitingTime, mcns.overallStops
#(overallWaitingTime, mcns.overallStops) = maintest(sps)
if __name__ == '__main__':
maintest() | [
"johannes@vonoswald.de"
] | johannes@vonoswald.de |
f0d66222bd2e878efcee349fcddf1454f95dbff7 | 1b550fe52e49d49d60caa1b7e8720b132a19d0ff | /sns_topic/sns_topic/lib/python2.7/site-packages/ansible/modules/network/meraki/meraki_network.py | ebc207b6c2f8de3373cdcdce76fceec29779f997 | [] | no_license | manuprathapan/Ansible | c25e44248f73cf1c37af3c3a3b74bb1f49f8211e | 6dcc840cd623ae73ec6f5043f8a35ed2c261df0f | refs/heads/master | 2020-03-22T06:49:26.797231 | 2018-07-12T03:05:31 | 2018-07-12T03:05:31 | 139,660,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,487 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Kevin Breit (@kbreit) <kevin.breit@kevinbreit.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: meraki_network
short_description: Manage networks in the Meraki cloud
version_added: "2.6"
description:
- Allows for creation, management, and visibility into networks within Meraki.
options:
auth_key:
description:
- Authentication key provided by the dashboard. Required if environmental variable MERAKI_KEY is not set.
state:
description:
- Create or modify an organization.
choices: [absent, present, query]
default: present
net_name:
description:
- Name of a network.
aliases: [name, network]
net_id:
description:
- ID number of a network.
org_name:
description:
- Name of organization associated to a network.
org_id:
description:
- ID of organization associated to a network.
type:
description:
- Type of network device network manages.
- Required when creating a network.
choices: [appliance, combined, switch, wireless]
aliases: [net_type]
tags:
description:
- Comma delimited list of tags to assign to network.
timezone:
description:
- Timezone associated to network.
- See U(https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) for a list of valid timezones.
author:
- Kevin Breit (@kbreit)
extends_documentation_fragment: meraki
'''
EXAMPLES = r'''
- name: List all networks associated to the YourOrg organization
meraki_network:
auth_key: abc12345
status: query
org_name: YourOrg
delegate_to: localhost
- name: Query network named MyNet in the YourOrg organization
meraki_network:
auth_key: abc12345
status: query
org_name: YourOrg
net_name: MyNet
delegate_to: localhost
- name: Create network named MyNet in the YourOrg organization
meraki_network:
auth_key: abc12345
status: present
org_name: YourOrg
net_name: MyNet
type: switch
timezone: America/Chicago
tags: production, chicago
'''
RETURN = r'''
data:
description: Information about the created or manipulated object.
returned: info
type: list
sample:
[
{
"id": "N_12345",
"name": "YourNetwork",
"organizationId": "0987654321",
"tags": " production ",
"timeZone": "America/Chicago",
"type": "switch"
}
]
'''
import os
from ansible.module_utils.basic import AnsibleModule, json, env_fallback
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_native
from ansible.module_utils.network.meraki.meraki import MerakiModule, meraki_argument_spec
def is_net_valid(meraki, net_name, data):
for n in data:
if n['name'] == net_name:
return True
return False
def construct_tags(tags):
''' Assumes tags are a comma separated list '''
if tags is not None:
tags = tags.replace(' ', '')
tags = tags.split(',')
tag_list = str()
for t in tags:
tag_list = tag_list + " " + t
tag_list = tag_list + " "
return tag_list
return None
def main():
# define the available arguments/parameters that a user can pass to
# the module
argument_spec = meraki_argument_spec()
argument_spec.update(
net_id=dict(type='str'),
type=dict(type='str', choices=['wireless', 'switch', 'appliance', 'combined'], aliases=['net_type']),
tags=dict(type='str'),
timezone=dict(type='str'),
net_name=dict(type='str', aliases=['name', 'network']),
state=dict(type='str', choices=['present', 'query', 'absent'], default='present'),
)
# the AnsibleModule object will be our abstraction working with Ansible
# this includes instantiation, a couple of common attr would be the
# args/params passed to the execution, as well as if the module
# supports check mode
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=False,
)
meraki = MerakiModule(module, function='network')
module.params['follow_redirects'] = 'all'
payload = None
create_urls = {'network': '/organizations/{org_id}/networks'}
update_urls = {'network': '/networks/{net_id}'}
delete_urls = {'network': '/networks/{net_id}'}
meraki.url_catalog['create'] = create_urls
meraki.url_catalog['update'] = update_urls
meraki.url_catalog['delete'] = delete_urls
if not meraki.params['org_name'] and not meraki.params['org_id']:
meraki.fail_json(msg='org_name or org_id parameters are required')
if meraki.params['state'] != 'query':
if not meraki.params['net_name'] or meraki.params['net_id']:
meraki.fail_json(msg='net_name or net_id is required for present or absent states')
if meraki.params['net_name'] and meraki.params['net_id']:
meraki.fail_json(msg='net_name and net_id are mutually exclusive')
# if the user is working with this module in only check mode we do not
# want to make any changes to the environment, just return the current
# state with no modifications
if module.check_mode:
return meraki.result
# Construct payload
if meraki.params['state'] == 'present':
payload = {'name': meraki.params['net_name'],
'type': meraki.params['type'],
}
if meraki.params['tags']:
payload['tags'] = construct_tags(meraki.params['tags'])
if meraki.params['timezone']:
payload['timeZone'] = meraki.params['timezone']
if meraki.params['type'] == 'combined':
payload['type'] = 'switch wireless appliance'
# manipulate or modify the state as needed (this is going to be the
# part where your module will do what it needs to do)
if meraki.params['org_name']:
nets = meraki.get_nets(org_name=meraki.params['org_name'])
elif meraki.params['org_id']:
nets = meraki.get_nets(org_id=meraki.params['org_id'])
if meraki.params['state'] == 'query':
if not meraki.params['net_name'] and not meraki.params['net_id']:
meraki.result['data'] = nets
elif meraki.params['net_name'] or meraki.params['net_id'] is not None:
meraki.result['data'] = meraki.get_net(meraki.params['org_name'],
meraki.params['net_name'],
nets
)
elif meraki.params['state'] == 'present':
if meraki.params['net_name']: # FIXME: Idempotency check is ugly here, improve
if is_net_valid(meraki, meraki.params['net_name'], nets) is False:
if meraki.params['org_name']: # FIXME: This can be cleaned up...maybe
path = meraki.construct_path('create',
org_name=meraki.params['org_name']
)
elif meraki.params['org_id']:
path = meraki.construct_path('create',
org_id=meraki.params['org_id']
)
r = meraki.request(path,
method='POST',
payload=json.dumps(payload)
)
meraki.result['data'] = r
meraki.result['changed'] = True
else:
net = meraki.get_net(meraki.params['org_name'], meraki.params['net_name'], data=nets)
if meraki.is_update_required(net, payload):
path = meraki.construct_path('update',
net_id=meraki.get_net_id(net_name=meraki.params['net_name'], data=nets)
)
r = meraki.request(path,
method='PUT',
payload=json.dumps(payload))
meraki.result['data'] = r
meraki.result['changed'] = True
elif meraki.params['state'] == 'absent':
if is_net_valid(meraki, meraki.params['net_name'], nets) is True:
net_id = meraki.get_net_id(org_name=meraki.params['org_name'],
net_name=meraki.params['net_name'],
data=nets)
path = meraki.construct_path('delete', net_id=net_id)
r = meraki.request(path, method='DELETE')
if meraki.status == 204:
meraki.result['changed'] = True
# in the event of a successful module execution, you will want to
# simple AnsibleModule.exit_json(), passing the key/value results
meraki.exit_json(**meraki.result)
if __name__ == '__main__':
main()
| [
"manuprathapan92@gmail.com"
] | manuprathapan92@gmail.com |
006cb2b9f46f18032a261ca6c59b587635b5bb5c | 56209eb057fb7f21b8632ae1138df0d2b6174105 | /rnnt_np.py | a2540d15cbffc22cf14c8f6b788fe1abd334705c | [] | no_license | HawkAaron/RNN-Transducer | 28776079e013d811377b780c2fc65ace9f91e3fe | 786fa75ff65c8ce859183d3c67aa408ff7fdef13 | refs/heads/graves2013 | 2021-06-09T02:42:55.598925 | 2019-04-18T00:24:48 | 2019-04-18T00:24:48 | 128,898,651 | 141 | 36 | null | 2021-06-07T15:39:34 | 2018-04-10T08:20:19 | Python | UTF-8 | Python | false | false | 6,892 | py | import mxnet as mx
import numpy as np
def forward_pass(log_probs, labels, blank):
T, U, _ = log_probs.shape
alphas = np.zeros((T, U))
for t in range(1, T):
alphas[t, 0] = alphas[t-1, 0] + log_probs[t-1, 0, blank]
for u in range(1, U):
alphas[0, u] = alphas[0, u-1] + log_probs[0, u-1, labels[u-1]]
for t in range(1, T):
for u in range(1, U):
no_emit = alphas[t-1, u] + log_probs[t-1, u, blank]
emit = alphas[t, u-1] + log_probs[t, u-1, labels[u-1]]
alphas[t, u] = np.logaddexp(emit, no_emit)
loglike = alphas[T-1, U-1] + log_probs[T-1, U-1, blank]
return alphas, loglike
def backward_pass(log_probs, labels, blank):
T, U, _ = log_probs.shape
betas = np.zeros((T, U))
betas[T-1, U-1] = log_probs[T-1, U-1, blank]
for t in reversed(range(T-1)):
betas[t, U-1] = betas[t+1, U-1] + log_probs[t, U-1, blank]
for u in reversed(range(U-1)):
betas[T-1, u] = betas[T-1, u+1] + log_probs[T-1, u, labels[u]]
for t in reversed(range(T-1)):
for u in reversed(range(U-1)):
no_emit = betas[t+1, u] + log_probs[t, u, blank]
emit = betas[t, u+1] + log_probs[t, u, labels[u]]
betas[t, u] = np.logaddexp(emit, no_emit)
return betas, betas[0, 0]
def compute_gradient(log_probs, alphas, betas, labels, blank):
T, U, _ = log_probs.shape
grads = np.full(log_probs.shape, -float("inf"))
log_like = betas[0, 0]
grads[T-1, U-1, blank] = alphas[T-1, U-1]
grads[:T-1, :, blank] = alphas[:T-1, :] + betas[1:, :]
for u, l in enumerate(labels):
grads[:, u, l] = alphas[:, u] + betas[:, u+1]
grads = -np.exp(grads + log_probs - log_like)
return grads
def transduce(log_probs, labels, blank=0):
"""
Args:
log_probs: 3D array with shape
[input len, output len + 1, vocab size]
labels: 1D array with shape [output time steps]
Returns:
float: The negative log-likelihood
3D array: Gradients with respect to the
unnormalized input actications
"""
alphas, ll_forward = forward_pass(log_probs, labels, blank)
betas, ll_backward = backward_pass(log_probs, labels, blank)
grads = compute_gradient(log_probs, alphas, betas, labels, blank)
return -ll_forward, grads
def transduce_batch(log_probs, labels, flen, glen, blank=0):
grads = np.zeros_like(log_probs)
costs = []
# TODO parallel loop
for b in range(log_probs.shape[0]):
t = int(flen[b])
u = int(glen[b]) + 1
ll, g = transduce(log_probs[b, :t, :u, :], labels[b, :u-1], blank)
grads[b, :t, :u, :] = g
costs.append(ll)
return costs, grads
class RNNTransducer(mx.operator.CustomOp):
"""The implementation of RNN Transducer loss functions.
To make it usable for real-world cases, this class has two policies below.
1. This class computes forward and backward variables in the log domain.
2. This class do not apply the softmax function to inputs, since the gradient calculation will be easily overflow.
"""
def __init__(self, blank):
self.blank = blank
def forward(self, is_train, req, in_data, out_data, aux):
'''
`log_ytu`: am & pm joint probability, layout 'BTUV'
`y`: label sequence (blank, y1, ..., yU), layout 'BU'
`flen`: acoustic model outputs sequence true length <= T
`glen`: label sequence length <= U
'''
log_ytu, y, flen, glen = in_data
loss, grad = transduce_batch(log_ytu.asnumpy(), y.asnumpy().astype(np.int32), flen.asnumpy(), glen.asnumpy(), self.blank)
self.saved_tensors = mx.nd.array(grad, ctx=log_ytu.context),
self.assign(out_data[0], req[0], mx.nd.array(loss, ctx=log_ytu.context))
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
grad, = self.saved_tensors
self.assign(in_grad[0], req[0], grad)
@mx.operator.register('Transducer')
class RNNTransducerProp(mx.operator.CustomOpProp):
def __init__(self, blank=0):
super(RNNTransducerProp, self).__init__()
self.blank = int(blank)
def list_arguments(self):
return ['log_ytu', 'label', 'flen', 'glen']
def list_outputs(self):
return ['output']
def infer_shape(self, in_shapes):
return in_shapes, ((in_shapes[1][0],),), ()
def create_operator(self, ctx, shapes, dtypes):
return RNNTransducer(self.blank)
class RNNTLoss(mx.gluon.loss.Loss):
def __init__(self, blank=0, weight=None, **kwargs):
batch_axis = 0
self.blank = blank
super(RNNTLoss, self).__init__(weight, batch_axis, **kwargs)
def hybrid_forward(self, F, log_ytu, label, flen, glen):
loss = F.Custom(log_ytu, label, flen, glen, blank=self.blank, op_type='Transducer')
return loss
if __name__ == '__main__':
T = 400; U = 300; B = 8; V = 50
ctx = mx.cpu()
def joint_test():
log_ytu = mx.nd.log_softmax(mx.nd.random_uniform(-10, 10, shape=(B, T, U+1, V), ctx=ctx, dtype=np.float32), axis=3)
y = mx.nd.random_uniform(1, V, shape=(B, U), ctx=ctx).astype('i')
flen = mx.nd.full(B, T, ctx=ctx, dtype='i')
glen = mx.nd.full(B, U, ctx=ctx, dtype='i')
log_ytu.attach_grad()
with mx.autograd.record():
loss = mx.nd.Custom(log_ytu, y, flen, glen, op_type='Transducer')
loss.backward()
print(log_ytu.grad)
print(loss)
def seperate_test():
f = mx.nd.random_uniform(-10, 10, shape=(B, T, V), ctx=ctx, dtype=np.float32)
g = mx.nd.random_uniform(-10, 10, shape=(B, U+1, V), ctx=ctx, dtype=np.float32)
y = mx.nd.random_uniform(1, V, shape=(B, U), ctx=ctx).astype('i')
flen = mx.nd.full(B, T, ctx=ctx, dtype='i')
glen = mx.nd.full(B, U, ctx=ctx, dtype='i')
f.attach_grad()
g.attach_grad()
with mx.autograd.record():
f1 = mx.nd.expand_dims(f, axis=2)
g1 = mx.nd.expand_dims(g, axis=1)
log_ytu = mx.nd.log_softmax(f1 + g1, axis=3)
loss = mx.nd.Custom(log_ytu, y, flen, glen, op_type='Transducer', blank=0)
loss.backward()
print(f.grad)
print(g.grad)
print(loss)
def loss_test():
log_ytu = mx.nd.log_softmax(mx.nd.random_uniform(-10, 10, shape=(B, T, U+1, V), ctx=ctx, dtype=np.float32), axis=3)
y = mx.nd.random_uniform(1, V, shape=(B, U), ctx=ctx).astype('i')
flen = mx.nd.full(B, T, ctx=ctx, dtype='i')
glen = mx.nd.full(B, U, ctx=ctx, dtype='i')
log_ytu.attach_grad()
criterion = RNNTLoss()
with mx.autograd.record():
loss = criterion(log_ytu, y, flen, glen)
loss.backward()
print(log_ytu.grad)
print(loss)
# seperate_test()
loss_test() | [
"mingkunhuang95@gmail.com"
] | mingkunhuang95@gmail.com |
a803d90e6974dbac351285f2b44837f2bf0c9aa2 | 8f733bd95941d11dd46bab9cabb23c002c7e6208 | /test.py | 57c2a5ac616cf4295b6c6f4da3aa974ad18201eb | [] | no_license | tangtang586/demo | 93f56780794b1970965c2b51c0a11c5840f84919 | 31cd8f5c1dc386e0aec5cdee8f968028b50240f0 | refs/heads/master | 2020-04-03T10:06:45.977151 | 2020-03-27T09:16:53 | 2020-03-27T09:16:53 | 62,878,310 | 0 | 0 | null | 2020-03-27T09:17:17 | 2016-07-08T09:47:30 | Python | UTF-8 | Python | false | false | 22 | py | 真的是TMD的神烦
| [
"noreply@github.com"
] | noreply@github.com |
12842639126a0725c5a3f29b76fba07a7c33537c | 2de1d9ed502001d4a8b749c57f6a31a2599a39e5 | /hw1/hw1_3.py | 4bcbf730a70c31f61e27a78e9c2898673d57d396 | [] | no_license | Ekaterina-sol/Python_Basics | 346e6cd5b0492ccb954ded4eea8fd0890be5fef1 | 0c09c6a21402b52958f47718cb596ad3e3f99d56 | refs/heads/main | 2023-02-25T19:20:05.587474 | 2021-01-29T17:41:56 | 2021-01-29T17:42:53 | 323,563,776 | 0 | 0 | null | 2021-01-29T17:44:08 | 2020-12-22T08:17:15 | Python | UTF-8 | Python | false | false | 276 | py | user_number = int(input("Введите число от 1 до 9: "))
tens = user_number*10 + user_number
hundreds = user_number*100 + tens
sum = user_number + tens + hundreds
print("Посчитаем", user_number, "+", tens, "+", hundreds, ". Сумма равна: ", sum) | [
"ekaterina.solovyeva.al@gmail.com"
] | ekaterina.solovyeva.al@gmail.com |
89ef0f289c85d226163ba523bd7b07c15c176acd | f295e88da66d8ffad3fb86e95e16689046c9b712 | /LightsOut.py | 3ccf67ba7110fb4f3c8d65c17610e381aaf7d502 | [] | no_license | detlefgrohs/NeoTrellisM4 | 1567a7f1a6f14921cfdd6eb8d9a14c7f09e8ff4f | bbec5e97bf3949a006156c94e3b4f0d78da58fe5 | refs/heads/master | 2020-04-13T22:16:06.653300 | 2019-01-02T00:07:43 | 2019-01-02T00:07:43 | 163,474,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,160 | py | import random
class LightsOutBoard:
lightOffColor = (0, 0, 0)
lighOnColor = (255, 255, 255)
def __init__(self, xSize, ySize, initialValue = False):
self.xSize = xSize
self.ySize = ySize
self.Board = [ [ initialValue for y in range(ySize) ] for x in range(xSize) ]
def displayBoard(self):
print("Board: %s" % ("Won" if self.hasWon() else "!Won"))
for y in range(self.ySize):
line = "%d: " % y
for x in range(self.xSize):
if self.Board[x][y]:
line = line + "X"
else:
line = line + " "
print(line)
def generateRandomBoard(self):
for x in range(self.xSize):
for y in range(self.ySize):
self.Board[x][y] = random.choice([True, False])
def countOfSetCells(self):
count = 0
for x in range(self.xSize):
for y in range(self.ySize):
if self.Board[x][y]:
count = count + 1
return count
def hasWon(self, allSet = False):
count = self.countOfSetCells()
if count == 0 and allSet == False:
return True
if count == (self.xSize * self.ySize) and allSet == True:
return True
return False
def toggleCell(self, x, y, toggleSurrounding = True):
if x in range(self.xSize) and y in range(self.ySize):
self.Board[x][y] = False if self.Board[x][y] else True
if toggleSurrounding:
self.toggleCell(x, y-1, False)
self.toggleCell(x, y+1, False)
self.toggleCell(x-1, y, False)
self.toggleCell(x+1, y, False)
board = LightsOutBoard(8, 4)
board.displayBoard()
board.toggleCell(0,0)
board.displayBoard()
board.toggleCell(1,1)
board.displayBoard()
board.generateRandomBoard()
board.displayBoard()
board.toggleCell(0,0)
board.displayBoard()
board.toggleCell(1,1)
board.displayBoard()
board = LightsOutBoard(8,4,1)
board.displayBoard()
print(board.hasWon(True))
| [
"detlef.grohs@gmail.com"
] | detlef.grohs@gmail.com |
0e2e19efd181694bd82a31e6ea9bd4fd1ccb7faf | 248d20fa6c37afc1501b47398451bf15dc8f0165 | /ryosuke/chapter04/knock38.py | 5e72a2bf383a6cd35a7e272d4ed196b6769cd017 | [] | no_license | tmu-nlp/100knock2016 | 20e9efd4698f59918aa850ba40163906f13dcb7f | d5f7a76286cb95bb374ff98bc0c9db3c796d113d | refs/heads/master | 2021-01-21T02:20:57.121371 | 2016-08-03T01:49:38 | 2016-08-03T01:49:38 | 55,942,482 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | from knock30 import get_sentences
from collections import Counter
import matplotlib.pyplot as plt
vocab = Counter()
for sentence in get_sentences():
vocab += Counter(m['surface'] for m in sentence)
names, freqs = zip(*vocab.most_common())
plt.hist(freqs, bins=len(set(freqs)))
plt.show()
| [
"tmcit.miyazaki@gmail.com"
] | tmcit.miyazaki@gmail.com |
94c7b2eb88cd995d7c0f4c091e29db9530a2b6b5 | 151977c8d1f5d92307ca7341ec6c5d0dd5ec1d55 | /pra/p4/server.py | cb939d5c90d40291714ad76eb1278ef4042e7978 | [] | no_license | wenrenzhejie/qq | d748f89733968168222fd286aaca2974f83efb41 | 1fa9a792a0001c43097599247c4929c833705dd9 | refs/heads/master | 2020-05-27T13:26:52.143088 | 2019-05-29T02:37:20 | 2019-05-29T02:37:20 | 188,639,204 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,440 | py | import socket
import threading
import queue
import json
ip=''
port=10086
users=[]#存放所有用户
que=queue.Queue()#存放消息
lock=threading.Lock()#防止多线程并发放置消息错误
def receiveMessage(conn,addr):
print("一个客户端连接进来",conn,addr)
users.append(conn)
try:
while True:
data = conn.recv(1024)
data = data.decode()
deposit(addr, data)
except Exception as e:
deleteUsers(conn)
conn.close()
def sendMessage():
while True:
if not que.empty():
data = que.get()
from1 = data[0]
data = list(data)
data = json.dumps(data)
print(type(data))
for c in users:
c.send(data.encode())
def deposit(addr, data):
try:
lock.acquire()
que.put((addr, data))
finally:
lock.release()
def deleteUsers(conn):
a = 0
for i in users:
if i == conn:
users.pop(a)
print("剩余在线用户:",users)
return
a += 1
def main():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((ip, port))
s.listen()
send = threading.Thread(target=sendMessage)
send.start()
while True:
conn, addr = s.accept()
r = threading.Thread(target=receiveMessage, args=(conn, addr))
r.start()
main()
| [
"xin@qq.com"
] | xin@qq.com |
319111e049d54da135e6530ac56f21f03c5dffd6 | 9a0731e0f9e9ce47a3c0124cab8a4187c9f0cbfe | /program/script/__init__.py | b15b59965ea5ef0f101dea017bef60f4b8effeb6 | [] | no_license | hinohi/LSBattle | e88c393304e521b094eef5be6213c2092dea3bff | fbde754c5aee3250e42ac3f9ab5a5e4a4379ce92 | refs/heads/master | 2021-11-27T04:38:53.691383 | 2021-10-09T06:09:59 | 2021-10-09T06:09:59 | 36,002,188 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,723 | py | #coding: utf8
import os as _os
import glob as _glob
import sys as _sys
# import traceback as _traceback
# import logging as _logging
from program import const
# _logging.basicConfig(filename=_os.path.join(const.path, 'script.log'), filemode='w',
# level=_logging.WARNING,
# format='%(levelname)s:%(message)s')
from .common import Block
from .parser import Parser
from .game import Game
from .ui import UI
from .enemy import Enemy
from .player import Player
from .world import World
def make_default_script():
out_path = _os.path.dirname(_sys.argv[0])
f = open(_os.path.join(out_path, "default.script"), "w")
indent = " "
def pri(block, depth=0):
f.write(indent*depth + block.__class__.__name__ + " {\n")
depth += 1
l = [a for a in dir(block) if not a.startswith("_")]
l.sort(key=lambda a:isinstance(block[a], Block)*1 + isinstance(block[a], list)*1)
width = 40
for name in l:
if "_"+name+"_obj" in block:
f.write(indent*depth + name + " {\n")
for nex in block[name]:
pri(nex, depth+1)
f.write(indent*depth + "}\n")
elif isinstance(block[name], Block):
pri(block[name], depth)
else:
s = indent*depth + name + " = " + str(block[name]).replace("\n", "\\n")
n = len(s)
if "_"+name+"_func" in block:
s += " "*(width-n if n < width else 0) + " # " + block["_"+name+"_func"].__name__
else:
s += " "*(width-n if n < width else 0) + " # eval"
f.write(s + "\n")
depth -= 1
f.write(indent*depth + "}\n")
for block in _kws.values():
pri(block)
# print "make default script: OK"
game = Game()
ui = UI()
player = Player()
enemy = Enemy()
world = World()
_kws = {"Game":game,
"UI":ui,
"Player":player,
"Enemy":enemy,
"World":world}
_parser = Parser()
for _name in _glob.iglob(_os.path.join(const.SCRIPT_DIR, "*.script")):
# _logging.info("parse '%s' file", _name)
try:
_parser.parse(open(_name), **_kws)
except:pass
# _s = _traceback.format_exc()
# _logging.error(_s)
for _name in _glob.iglob(_os.path.join(_os.path.dirname(_sys.argv[0]), "*.script")):
# _logging.info("parse '%s' file", _name)
try:
_parser.parse(open(_name), **_kws)
except:pass
# _s = _traceback.format_exc()
# _logging.error(_s)
for _key in _kws:
if hasattr(_kws[_key], "_check"):
_kws[_key]._check()
if game.output_script:
make_default_script()
| [
"42.daiju@gmail.com"
] | 42.daiju@gmail.com |
55e1e7a588baa73d05b3389603a8340cd56c645d | 0b16c5abb90e90f84faa646416147ce791a3c32c | /users/views.py | f7d254faa334e7956240ec56692004ac3131d1fd | [] | no_license | meicx/test | 36760deeeb2cde3d6889f9b5f48d582a8a667ea8 | c540349a31a1e87feef9837424b8ebdecaf38354 | refs/heads/master | 2022-12-10T12:12:51.112368 | 2018-12-19T04:03:32 | 2018-12-19T04:03:32 | 161,119,161 | 0 | 0 | null | 2022-12-08T02:27:59 | 2018-12-10T04:58:37 | CSS | UTF-8 | Python | false | false | 7,749 | py | # -*- coding:utf-8 -*-
from django.shortcuts import render, render_to_response
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.backends import ModelBackend
from .models import UserProfile, EmailRevifyRecord
from django.db.models import Q
from django.views.generic.base import View
from .forms import LoginForm, RegisterForm, ModifyPwdForm, UserInfoForm
from django.contrib.auth.hashers import make_password
from utils.send_mail import send_register_email
from django.http import HttpResponse, HttpResponseRedirect
import json
title = "在线考试系统"
phoneNumber = "15068895421"
def user_index(request):
"""
用户首页
:param request:
:return:
"""
leftbar = False
return render_to_response('index.html', locals())
class LoginView(View):
"""用户登录"""
def get(self, request):
login_form = LoginForm()
return render(request, "login.html", {"login_form": login_form, "title": title, "phoneNumber": phoneNumber})
def post(self, request):
login_form = LoginForm(request.POST)
if login_form.is_valid():
user_name = request.POST.get("username", "")
user_password = request.POST.get("password", "")
user = authenticate(username=user_name, password=user_password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/')
else:
return render(request, "login.html", {"login_form": login_form, "msg": "用户名或密码错误"})
else:
return render(request, "login.html", {"login_form": login_form, "msg": "用户名或密码错误"})
else:
return render(request, "login.html", {"login_form": login_form})
class LogoutView(View):
"""用户注销"""
def get(self, request):
logout(request)
login_form = LoginForm()
return render(request, "login.html", {"login_form": login_form, "title": title, "phoneNumber": phoneNumber})
class RegisterView(View):
"""用户注册"""
def get(self, request):
register_form = RegisterForm()
return render(request, "register.html", {"register_form": register_form, "title": title})
def post(self, request):
register_form = RegisterForm(request.POST)
if register_form.is_valid():
# user_email = request.POST.get("email", "")
user_name = request.POST.get("username", "")
role = request.POST.get("role", 0)
if UserProfile.objects.filter(username=user_name):
return render(request, "register.html",
{"title": title, "register_form": register_form, "msg": u"该学号已经被注册"});
user_password = request.POST.get("password", "")
user_profile = UserProfile()
user_profile.username = user_name
user_profile.user_class = ''
# user_profile.email = user_email
user_profile.is_active = True
user_profile.role = role
user_profile.password = make_password(user_password)
# print("name:",user_name,",email",user_email,",role:",role,",password:",user_password,",password",make_password(user_password))
user_profile.save()
# send_register_email(user_email, "register")
return render(request, "register.html", {"title": title, "msg": u"注册成功,请登录"})
else:
return render(request, "register.html", {"register_form": register_form, "title": title})
class UserCenterView(View):
"""用户中心"""
def get(self, request):
user = request.user
user_form = UserInfoForm()
user_form.nick_name = user.nick_name
user_form.birthday = user.birthday
user_form.gender = user.gender
user_form.mobile = user.mobile
return render(request, "user-center.html",
{"user_form": user_form, "title": title, "sysuser": user, "phoneNumber": phoneNumber, "birthday": str(user.birthday)})
def post(self, request):
username = request.user
user = UserProfile.objects.get(username=username)
print request.POST
if user.role == 0:
user.nick_name = request.POST.get("nick_name")
else:
user.nick_name = request.POST.get("nick_name")
user.user_class = request.POST.get("user_class")
user.save()
return render(request, "alert.html", {"score": 0, "title": "修改成功", "msg": "修改个人信息成功", "to": "/user_center/"})
# 调试完成
class CustomBackend(ModelBackend):
"""
#使用自定义邮箱登陆
到settings中配置
AUTHENTICATION_BACKENDS =(
'users.views.CustomBackend',#元组中只有一个元素要加逗号!!!!
)
"""
def authenticate(self, username=None, password=None, **kwargs):
try:
user = UserProfile.objects.get(Q(username=username)|Q(email=username))
if user.check_password(password):
return user
except Exception as e:
return None
# 调试完成
class ActiveUserView(View):
def get(self, request, active_code):
all_code = EmailRevifyRecord.objects.filter(code=active_code)
if all_code:
for record in all_code:
if record.is_alive:
email = record.email
user = UserProfile.objects.get(email=email)
username = user.username
user.is_active = True #激活用户
record.is_alive = False #设置当前验证码失效
user.save()
return render(request, "active_succcess.html", {"title": title, "username": username})
return render(request, "active_fail.html", {"title": title})
else:
return render(request, "active_fail.html", {"title": title})
class ModifyPwd(View):
def post(self,request):
modify_form = ModifyPwdForm(request.POST)
if modify_form.is_valid():
pwd1 = modify_form.cleaned_data.get('password1', '')
pwd2 = modify_form.cleaned_data.get('password2', '')
if pwd1 != pwd2:
return HttpResponse(json.dumps({'status':'failed', 'msg':u'两次密码不一致'}))
else:
request.user.password = make_password(pwd1)
request.user.save()
return HttpResponse(json.dumps({'status':'success'}))
else:
return HttpResponse(json.dumps(modify_form.errors))
# 404调试完成
def page_not_found(request):
# 全局404处理函数
from django.shortcuts import render_to_response
response = render_to_response('404.html', {"title": title})
response.status_code = 404
return response
# 500调试完成
def page_error(request):
# 全局500处理函数
from django.shortcuts import render_to_response
response=render_to_response('500.html', {"title":title})
response.status_code = 500
return response
def createstudent(request,num):
num=int(num)
for i in range(0,num):
user_profile = UserProfile()
username=1000000600+i
print(username)
user_profile.username = username
user_profile.email = username.__str__()+"@qq.com"
user_profile.is_active = True
user_profile.role = '1' #学生1,老师0
user_profile.password = make_password("123456")
user_profile.save()
send_register_email(user_profile.email, "register")
return HttpResponse("创建成功") | [
"meichenxi@baidu.com"
] | meichenxi@baidu.com |
9d4ac7b14c1eeaa1ac8a0bcafd7f4750f587e06a | d04790f13e1c465ea108536780da58416b1754b1 | /scripts/switch_panel.py | d48b9aa6a8ea4f93e2ac8f57292f4cececddf2ae | [] | no_license | lordofbuckwheat/devenv | 8d6c2157bca5c9b9e22cf80cdb1edb0cc82bbacf | 494134ce65dffa948f50b651739be3c236be6ee1 | refs/heads/master | 2021-07-10T03:19:50.854771 | 2021-03-07T07:40:25 | 2021-03-07T07:40:25 | 227,970,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 823 | py | import sys
admin = 8 if len(sys.argv) < 2 else sys.argv[1]
with open('/root/app/supertvbit/public/panel/src/panel_config.json', 'w') as f:
f.write(f'''{{
"SITE_URL": "http://public.tvbit.local:10080/",
"PUBLIC_HOST": "http://public.tvbit.local:10080/",
"API_URL": "http://go.tvbit.local:182{admin}5/",
"WEBSOCKET_URL": "ws://go.tvbit.local:182{admin}5/ws",
"WEBSOCKET_ADMIN_URL": "ws://go.tvbit.local:182{admin}5/ws-admin",
"SECURE_PUBLIC_HOST": "https://public.tvbit.local:10443/",
"SECURE_API_URL": "https://go.tvbit.local:182{admin}6/",
"SECURE_WEBSOCKET_URL": "wss://go.tvbit.local:182{admin}6/ws",
"SECURE_WEBSOCKET_ADMIN_URL": "wss://go.tvbit.local:182{admin}6/ws-admin",
"DOCS_URL": "docs",
"PANEL_FEATURES": []
}}''')
| [
"lord.of.buckwheat@gmail.com"
] | lord.of.buckwheat@gmail.com |
ecd47b4026118e945daed5202c65fd1cf9e782d7 | 1f84effff99c38c7e2cc37d8651f4789ff508f85 | /btbai2.6.py | 1e3dacbd61bce039825dd71b9dd97edb890610af | [] | no_license | CaoVanHieu/hoanghieu1999 | 6059f20e39bed7eb9b1aca275bef005f31661d03 | 36cbc769fea8ba9b27f5606b30bf79c3edb20eae | refs/heads/master | 2020-04-28T05:57:39.937037 | 2020-02-05T15:14:12 | 2020-02-05T15:14:12 | 175,038,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | j=[]
for i in range (2000,3201):
if (i%7==0) and (i%5!=0):
j.append(str(i))
print (','.join(j)) | [
"noreply@github.com"
] | noreply@github.com |
d55bae828df6d68ea9093e2c30465db7626bc148 | 304a64ab15dc445de42cfcb57f955ad6699291c1 | /FlaskBlogly3/app.py | 54e0a504782c99afcefc49ec89c9f3288d1a9f50 | [] | no_license | hahabib51/blogly3 | 16388834cb9d72eede6166ba5110c714fecf01b2 | ef7132e45e4011be9d046093a481976de35c059a | refs/heads/main | 2023-04-21T16:40:47.754707 | 2021-05-11T01:32:05 | 2021-05-11T01:32:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,074 | py | from flask import Flask, request, redirect, render_template, flash
from flask_debugtoolbar import DebugToolbarExtension
from models import db, connect_db, User, Post, Tag
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = "postgres:///blogly"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = 'tellnoone'
toolbar = DebugToolbarExtension(app)
connect_db(app)
db.create_all()
@app.route('/')
def root():
"""Show recent list of posts, most-recent first."""
posts = Post.query.order_by(Post.created_at.desc()).limit(5).all()
return render_template("posts/homepage.html", posts=posts)
@app.errorhandler(404)
def page_not_found(e):
"""Show 404 NOT FOUND page."""
return render_template('404.html'), 404
# User route
@app.route('/users')
def users_index():
"""Show a page with info on all users"""
users = User.query.order_by(User.last_name, User.first_name).all()
return render_template('users/index.html', users=users)
@app.route('/users/new', methods=["GET"])
def users_new_form():
"""Show a form to create a new user"""
return render_template('users/new.html')
@app.route("/users/new", methods=["POST"])
def users_new():
"""Handle form submission for creating a new user"""
new_user = User(
first_name=request.form['first_name'],
last_name=request.form['last_name'],
image_url=request.form['image_url'] or None)
db.session.add(new_user)
db.session.commit()
flash(f"User {new_user.full_name} added.")
return redirect("/users")
@app.route('/users/<int:user_id>')
def users_show(user_id):
"""Show a page with info on a specific user"""
user = User.query.get_or_404(user_id)
return render_template('users/show.html', user=user)
@app.route('/users/<int:user_id>/edit')
def users_edit(user_id):
"""Show a form to edit an existing user"""
user = User.query.get_or_404(user_id)
return render_template('users/edit.html', user=user)
@app.route('/users/<int:user_id>/edit', methods=["POST"])
def users_update(user_id):
"""Handle form submission for updating an existing user"""
user = User.query.get_or_404(user_id)
user.first_name = request.form['first_name']
user.last_name = request.form['last_name']
user.image_url = request.form['image_url']
db.session.add(user)
db.session.commit()
flash(f"User {user.full_name} edited.")
return redirect("/users")
@app.route('/users/<int:user_id>/delete', methods=["POST"])
def users_destroy(user_id):
"""Handle form submission for deleting an existing user"""
user = User.query.get_or_404(user_id)
db.session.delete(user)
db.session.commit()
flash(f"User {user.full_name} deleted.")
return redirect("/users")
# Posts Route
@app.route('/users/<int:user_id>/posts/new')
def posts_new_form(user_id):
"""Show a form to create a new post for a specific user"""
user = User.query.get_or_404(user_id)
tags = Tag.query.all()
return render_template('posts/new.html', user=user, tags=tags)
@app.route('/users/<int:user_id>/posts/new', methods=["POST"])
def posts_new(user_id):
"""Handle form submission for creating a new post for a specific user"""
user = User.query.get_or_404(user_id)
tag_ids = [int(num) for num in request.form.getlist("tags")]
tags = Tag.query.filter(Tag.id.in_(tag_ids)).all()
new_post = Post(title=request.form['title'],
content=request.form['content'],
user=user,
tags=tags)
db.session.add(new_post)
db.session.commit()
flash(f"Post '{new_post.title}' added.")
return redirect(f"/users/{user_id}")
@app.route('/posts/<int:post_id>')
def posts_show(post_id):
"""Show a page with info on a specific post"""
post = Post.query.get_or_404(post_id)
return render_template('posts/show.html', post=post)
@app.route('/posts/<int:post_id>/edit')
def posts_edit(post_id):
"""Show a form to edit an existing post"""
post = Post.query.get_or_404(post_id)
tags = Tag.query.all()
return render_template('posts/edit.html', post=post, tags=tags)
@app.route('/posts/<int:post_id>/edit', methods=["POST"])
def posts_update(post_id):
"""Handle form submission for updating an existing post"""
post = Post.query.get_or_404(post_id)
post.title = request.form['title']
post.content = request.form['content']
tag_ids = [int(num) for num in request.form.getlist("tags")]
post.tags = Tag.query.filter(Tag.id.in_(tag_ids)).all()
db.session.add(post)
db.session.commit()
flash(f"Post '{post.title}' edited.")
return redirect(f"/users/{post.user_id}")
@app.route('/posts/<int:post_id>/delete', methods=["POST"])
def posts_destroy(post_id):
"""Handle form submission for deleting an existing post"""
post = Post.query.get_or_404(post_id)
db.session.delete(post)
db.session.commit()
flash(f"Post '{post.title} deleted.")
return redirect(f"/users/{post.user_id}")
# Tags Route
@app.route('/tags')
def tags_index():
"""Show a page with info on all tags"""
tags = Tag.query.all()
return render_template('tags/index.html', tags=tags)
@app.route('/tags/new')
def tags_new_form():
"""Show a form to create a new tag"""
posts = Post.query.all()
return render_template('tags/new.html', posts=posts)
@app.route("/tags/new", methods=["POST"])
def tags_new():
"""Handle form submission for creating a new tag"""
post_ids = [int(num) for num in request.form.getlist("posts")]
posts = Post.query.filter(Post.id.in_(post_ids)).all()
new_tag = Tag(name=request.form['name'], posts=posts)
db.session.add(new_tag)
db.session.commit()
flash(f"Tag '{new_tag.name}' added.")
return redirect("/tags")
@app.route('/tags/<int:tag_id>')
def tags_show(tag_id):
"""Show a page with info on a specific tag"""
tag = Tag.query.get_or_404(tag_id)
return render_template('tags/show.html', tag=tag)
@app.route('/tags/<int:tag_id>/edit')
def tags_edit_form(tag_id):
"""Show a form to edit an existing tag"""
tag = Tag.query.get_or_404(tag_id)
posts = Post.query.all()
return render_template('tags/edit.html', tag=tag, posts=posts)
@app.route('/tags/<int:tag_id>/edit', methods=["POST"])
def tags_edit(tag_id):
"""Handle form submission for updating an existing tag"""
tag = Tag.query.get_or_404(tag_id)
tag.name = request.form['name']
post_ids = [int(num) for num in request.form.getlist("posts")]
tag.posts = Post.query.filter(Post.id.in_(post_ids)).all()
db.session.add(tag)
db.session.commit()
flash(f"Tag '{tag.name}' edited.")
return redirect("/tags")
@app.route('/tags/<int:tag_id>/delete', methods=["POST"])
def tags_destroy(tag_id):
"""Handle form submission for deleting an existing tag"""
tag = Tag.query.get_or_404(tag_id)
db.session.delete(tag)
db.session.commit()
flash(f"Tag '{tag.name}' deleted.")
return redirect("/tags")
| [
"79046805+hahabib51@users.noreply.github.com"
] | 79046805+hahabib51@users.noreply.github.com |
66f614fc294e9d8c94babbbce4963368e0136402 | 35b460a5e72e3cb40681861c38dc6d5df1ae9b92 | /CodeFights/Arcade/Intro/islandOfKnowledge/minesweeper.py | 7a778a54e8fe37740efbc970e191ddc6ef1ca2ae | [] | no_license | robgoyal/CodingChallenges | 9c5f3457a213cf54193a78058f74fcf085ef25bc | 0aa99d1aa7b566a754471501945de26644558d7c | refs/heads/master | 2021-06-23T09:09:17.085873 | 2019-03-04T04:04:59 | 2019-03-04T04:04:59 | 94,391,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,653 | py | # Name: minesweeper.py
# Author: Robin Goyal
# Last-Modified: July 12, 2017
# Purpose: Give an array of true and false values with true indicating a mine
# return an array of the same length indicating number of surrounding
# mines at each position
# Note: Could've optimized the solution but a pure brute force implementation
def minesweeper(matrix):
grid = []
for row in range(len(matrix)):
gridRow = []
for col in range(len(matrix[0])):
count = 0
# Top Row
if (row == 0):
if (col == 0): # Top-Left corner
count = [matrix[row][col+1], matrix[row+1][col], matrix[row+1][col+1]].count(True)
elif (col == len(matrix[0]) - 1): # Top-Right corner
count = [matrix[row][col-1], matrix[row+1][col], matrix[row+1][col-1]].count(True)
else: # Middle Columns in top Row
count = [matrix[row][col-1], matrix[row][col+1]].count(True) \
+ matrix[row+1][col-1:col+2].count(True)
# Bottom Row
elif (row == len(matrix) -1):
if (col == 0): # Bottom-Left corner
count = [matrix[row][col+1], matrix[row-1][col], matrix[row-1][col+1]].count(True)
elif (col == len(matrix[0]) - 1): # Bottom-Right corner
count = [matrix[row][col-1], matrix[row-1][col], matrix[row-1][col-1]].count(True)
else: # Middle Columns in bottom Row
count = [matrix[row][col-1], matrix[row][col+1]].count(True) \
+ matrix[row-1][col-1:col+2].count(True)
# Middle Rows
else:
if (col == 0): # Left most column
count = matrix[row-1][col:col+2].count(True) + [matrix[row][col+1]].count(True) \
+ matrix[row+1][col:col+2].count(True)
elif (col == len(matrix[0]) -1): # Right most column
count = matrix[row-1][col-1:col+1].count(True) + [matrix[row][col-1]].count(True) \
+ matrix[row+1][col-1:col+1].count(True)
else: # Middle columns
count = matrix[row-1][col-1:col+2].count(True) + matrix[row+1][col-1:col+2].count(True) + \
[matrix[row][col-1], matrix[row][col+1]].count(True)
gridRow.append(count)
grid.append(tempRow)
return grid | [
"goyal.rob@gmail.com"
] | goyal.rob@gmail.com |
9c701a415e29b4a82c0017f3810bdb84dfa1e3aa | 2968989d94468d4d0a15c748690ec72664f61e80 | /utils/file_locker.py | 346a12ca3760bfa39a2c6cdd40afabbd69a34bd2 | [
"Apache-2.0"
] | permissive | vtppplusgo/fastsync | 40547706bc13f2685b8f688fde9ee4afe73582d6 | 625045d0bbc2a8d86a07d72078eb30423f4d970d | refs/heads/master | 2020-05-18T23:36:38.489934 | 2017-11-07T03:46:25 | 2017-11-07T03:46:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,583 | py | #!/usr/bin/python2.7
#-*- coding: utf-8 -*-
# file_locker.py
#
# Author: zhangliang@shinezone.com
#
# Refer:
# https://github.com/HiSPARC/station-software/blob/master/user/pythonshared/plock.py
#
# init created: 2016-05-25
# last updated: 2016-07-13
#
#######################################################################
import portalocker as plock
def close_file_nothrow(fd):
if not fd is None:
try:
fd.close()
except:
pass
def remove_file_nothrow(fname):
import os
if file_exists(fname):
try:
os.remove(fname)
except OSError:
pass
except:
pass
class FileLocker(object):
def __init__(self, filename):
self.locked = False
self.stream_lock = None
try:
# Prevent multiple extensions on the lock file (Only handles the normal "*.log" case.)
if filename.endswith(".log"):
self.lock_file = filename[:-4] + ".lock"
else:
self.lock_file = filename + ".lock"
self.stream_lock = open(self.lock_file, "w")
self.filename = filename
except:
stream_lock = self.stream_lock
self.stream_lock = None
close_file_nothrow(stream_lock)
finally:
if self.stream_lock:
self.locked = False
pass
def __del__(self):
self.cleanup()
pass
def cleanup(self):
self.unlock()
stream_lock = self.stream_lock
self.stream_lock = None
if stream_lock:
close_file_nothrow(stream_lock)
remove_file_nothrow(self.lock_file)
pass
def lock(self, nonblock = True):
if self.stream_lock:
try:
if not self.locked:
if nonblock:
plock.lock(self.stream_lock, plock.LOCK_EX | plock.LOCK_NB)
else:
plock.lock(self.stream_lock, plock.LOCK_EX)
self.locked = True
return self.locked
except Exception as ex:
# failed to lock file
return False
else:
return False
def unlock(self):
if self.stream_lock:
try:
if self.locked:
plock.unlock(self.stream_lock)
self.locked = False
except:
pass
#######################################################################
| [
"350137278@qq.com"
] | 350137278@qq.com |
d4769b30907c3a34945bf0e943d4d457b9e32c58 | 0f5568c11053123c45570fc1e519d5a43d72a73b | /scripts/gui6w_dronepublishers.py | cdd401dd1bff04abe003c20cb85efc3063094790 | [] | no_license | AnandaNN/LT_tracker_w_gui | 71d77244553d4c7ae050811faf65fe83e9e18df1 | d4b9b3b690145b1fc01c76daafe2f7961d5a7678 | refs/heads/master | 2023-01-07T22:06:49.879972 | 2020-10-26T09:33:49 | 2020-10-26T09:33:49 | 291,665,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,799 | py | #!/usr/bin/env python2
from Tkinter import Tk, Label, Button, Frame
import tkMessageBox
import rospy
from PIL import Image
from PIL import ImageTk
import cv2
from geometry_msgs.msg import Pose, Twist
from sensor_msgs.msg import Image as SensorImage
from std_msgs.msg import Bool, Empty, UInt8
import base64
from cv_bridge import CvBridge, CvBridgeError
import numpy
from PIL import ImageFile
import os
import numpy as np
"""
Code for the gui, which enables the user to view the image from the drone, control the drone,
and allows the user to select a target point on the live image.
"""
pos_x = -1
pos_y = -1
def save_pos(event):
## updating the position of the target point from position of mouse click on image
global pos_x
global pos_y
pos_x = event.x
pos_y = event.y
def display_message_box(message):
return tkMessageBox.askyesno("Information", message)
class DroneGUI:
def __init__(self, master):
self.master = master
master.title("Drone GUI")
## Initialising framework of GUI
self.frame1 = Frame(master, height = 480, width = 200, bd = 2, relief = "sunken")
self.frame1.grid(row = 3, column = 1, rowspan = 15)
self.frame1.grid_propagate(False)
explanation_label = Label(master, justify = 'left', text = "How you control the drone \nW - move forwards \nS - move down \nA - move let \nD - move right \nI - move up \nK - move down \nJ - rotate counterclockwise \nL - rotate clockwise \nENTER - Takeoff/Land")
explanation_label.grid(row = 4, column = 1, rowspan = 15)
self.abort_button = Button(master, text = "ABORT", command = self.abort_auto_flight, bg = "grey", fg = "lightgrey", state = "disabled")
self.abort_button.grid(row = 17, column = 1)
frame2 = Frame(master, height = 480, width = 200, bd = 2, relief = "sunken")
frame2.grid(row = 3, column = 3, rowspan = 15)
frame2.grid_propagate(False)
self.select_target_button = Button(master, text="Select target", command=self.select_target)
self.select_target_button.grid(row = 4, column = 3)
self.distance_label = Label(master, text = "Distance: NA")
self.distance_label.grid(row = 6, column = 3)
self.battery_label = Label(master, text = "Battery level: NA")
self.battery_label.grid(row = 8, column = 3)
header_label = Label(master, text="Choosing target for drone")
header_label.grid(row = 1, column = 8)
self.close_button = Button(master, text="Close", command=master.quit)
self.close_button.grid(row = 1, column = 20)
self.image_label = Label(text = "", height = 480, width = 640)
self.image_label.grid(row = 3, column = 6, columnspan = 15, rowspan = 15)
## Initialising variables for selecting target
self.imgClick = False
self.bridge = CvBridge()
self.enable_video_stream = None
self.prev_img = None
self.select_target_bool = False
self.circle_center = [None, None]
## Initialising variables for autonoumous flight
self.flying = False
self.auto_flying = False
self.abort_bool = False
## Enabling keyboard control of drone
self.master.bind("<Key>", self.move_drone)
self.master.bind("<KeyRelease>", self.key_release)
self.master.bind("<Return>", self.return_key_pressed)
## Initialising of publishers and subscribers
self.distance_sub = rospy.Subscriber('distance', Pose, self.update_distance_label)
self.pid_enable_sub = rospy.Subscriber('pid_enable', Bool, self.pid_enabled)
self.battery_sub = rospy.Subscriber('bebop/CommonStates/BatteryLevelChanged', UInt8, self.update_battery_label)
self.target_sub = rospy.Subscriber("target", Pose, self.draw_target)
self.image_sub = rospy.Subscriber('/webcam/image_raw', SensorImage, self.image_subscriber_callback)
self.gui_target_pub = rospy.Publisher('gui_target', Pose , queue_size=10)
self.abort_pub = rospy.Publisher('abort', Bool, queue_size=10)
self.drone_vel_pub = rospy.Publisher('bebop/cmd_vel', Twist,queue_size=10)
self.takeoff_pub = rospy.Publisher('bebop/takeoff', Empty,queue_size=10)
self.land_pub = rospy.Publisher('bebop/land', Empty,queue_size=10)
rospy.init_node('gui', anonymous=True)
self.rate = rospy.Rate(10)
rospy.loginfo("GUI initialised")
self.abort_pub.publish(self.abort_bool)
def image_subscriber_callback(self, image):
cv_image = CvBridge().imgmsg_to_cv2(image, "rgb8")
# cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)
if self.circle_center[0] != None:
cv2.circle(cv_image, (int(self.circle_center[0]), int(self.circle_center[1])), 3, (0, 255, 0), 10)
self.img = Image.fromarray(cv_image)
# print("got image")
def draw_target(self,data):
self.circle_center = [data.position.x, data.position.y]
def update_image(self):
## Updating the image from the 'drone_cam_sub.py', if it's new. The update is automatic with a frequency 20 Hz (50 ms)
frequency = 20
try:
if self.img != self.prev_img:
self.imgtk = ImageTk.PhotoImage(self.img)
self.image_label.pic = self.imgtk
self.image_label.configure(image=self.imgtk)
self.prev_img = self.img
except:
print("Image not updated")
self.enable_video_stream = self.image_label.after(int(1000/frequency), self.update_image)
def select_target(self):
## Allows the user to select target, and interrupt selection if wanted
if not self.select_target_bool:
rospy.loginfo( "User is selecting target")
self.select_target_bool = True
self.imgClick = True
self.select_target_button.configure(text = "Cancel")
self.image_label.bind("<Button-1>", self.target_selected)
self.image_label.configure(cursor="dotbox")
else:
rospy.loginfo("User cancelled selection")
self.select_target_bool = False
self.imgClick = False
self.select_target_button.configure(text="Select target")
self.image_label.unbind("<Button-1>")
self.image_label.configure(cursor="")
def target_selected(self, event):
## Once target has been selected, variables and functions need to be reset.
## By un-commenting line 158 control will be disabled, once autonomous flight is enabled
## (For now it is possible to interfere with the drone by using the keyboard)
self.select_target_bool = False
rospy.loginfo("User selected target")
self.imgClick = False
save_pos(event)
self.publish_pos()
self.update_image()
self.select_target_button.configure(text="Select target")
self.image_label.unbind("<Button-1>")
self.image_label.configure(cursor="")
#self.auto_flying = True
def move_drone(self, event):
## if auto_flying = True no other option than pressing 'g' is possible
if self.flying and not self.auto_flying:
cmd = Twist()
factor = 0.5
rospy.loginfo( "User pressed " + repr(event.char))
if event.char == 'a':
cmd.linear.y = factor
elif event.char == 'd':
cmd.linear.y = -factor
elif event.char == 'w':
cmd.linear.x = factor
elif event.char == 's':
cmd.linear.x = -factor
elif event.char == 'j':
cmd.angular.z = factor
elif event.char == 'l':
cmd.angular.z = -factor
elif event.char == 'i':
cmd.linear.z = factor
elif event.char == 'k':
cmd.linear.z = -factor
elif event.char == 'g':
if not self.abort_bool:
self.abort_auto_flight()
cmd.linear.x= - factor
self.drone_vel_pub.publish(cmd)
elif self.flying:
if event.char == 'g':
if not self.abort_bool:
self.abort_auto_flight()
cmd.linear.x= - factor
self.drone_vel_pub.publish(cmd)
def key_release(self,event):
cmd = Twist()
cmd.linear.x = 0
cmd.linear.y = 0
cmd.linear.z = 0
cmd.angular.z = 0
self.drone_vel_pub.publish(cmd)
def return_key_pressed(self,event):
## enabling takeoff and landing
if not self.flying:
self.flying = True
e = Empty()
self.takeoff_pub.publish(e)
else:
self.abort_pub.publish(True)
self.flying = False
e = Empty()
self.land_pub.publish(e)
def abort_auto_flight(self):
## aborting autonousmous flight and allowing the user the have full control of the drone again
rospy.loginfo("Aborting")
self.abort_bool = True
self.abort_pub.publish(self.abort_bool)
cmd = Twist()
cmd.linear.x = 0
cmd.linear.y = 0
cmd.linear.z = 0
cmd.angular.z = 0
self.drone_vel_pub.publish(cmd)
def pid_enabled(self, data):
## cheching whether the move_to_target program is running
data = str(data)
if data == 'data: True':
self.abort_button.configure(state = "active", bg = "grey", fg = "black")
if data == False and self.abort_bool == True:
self.abort_bool = False
self.abort_pub.publish(self.abort_bool)
def update_distance_label(self, data):
self.distance_label.configure( text = 'Distance: {:02.2f} m'.format(data.position.x) )
def update_battery_label(self, data):
self.battery_label.configure( text = 'Battery level: {}'.format(data))
def publish_pos(self):
#publishing the position of the target position in pixels
if not rospy.is_shutdown():
p = Pose()
p.position.x = pos_x
p.position.y = pos_y
self.gui_target_pub.publish(p)
self.rate.sleep()
## sizing the gui window and initialising
ImageFile.LOAD_TRUNCATED_IMAGES = True
root = Tk()
root.geometry('1800x600')
gui = DroneGUI(root)
gui.update_image()
col_count, row_count = root.grid_size()
for col in xrange(col_count):
root.grid_columnconfigure(col, minsize=40)
for row in xrange(row_count):
root.grid_rowconfigure(row, minsize=20)
root.mainloop()
| [
"anhon@elektro.dtu.dk"
] | anhon@elektro.dtu.dk |
633e313b2e3d9671331d27aed231d2656fe2b5c5 | b9fed5ae3fbb22317d29fc20af4336f2e31489a6 | /main.py | 323c7f37baa0e798abd1ca4351e5b56b64fc2fe0 | [] | no_license | Raashid710/flask-blog | e484538a8df61e8b333b82b261a14114e5b0395a | a343e0cdaec16e2e5a45860427a282e8c221c766 | refs/heads/master | 2023-06-28T19:44:54.234499 | 2021-08-10T08:52:36 | 2021-08-10T08:52:36 | 393,982,315 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,961 | py | from flask import Flask, render_template, request,session,redirect
from flask_sqlalchemy import SQLAlchemy
from werkzeug.utils import secure_filename
from flask_mail import Mail
import json
import os
import math
from datetime import datetime
with open('config.json', 'r') as c:
params = json.load(c)["params"]
local_server = True
app = Flask(__name__)
app.secret_key = 'super_secret_key'
app.config['UPLOAD_FOLDER']= params['upload_location']
# app.config.update(
# MAIL_SERVER = 'smtp.gmail.com',
# MAIL_PORT = '465',
# MAIL_USE_SSL = True,
# MAIL_USERNAME = params['gmail-user'],
# MAIL_PASSWORD= params['gmail-password']
# )
# mail = Mail(app)
if(local_server):
app.config['SQLALCHEMY_DATABASE_URI'] = params['local_uri']
else:
app.config['SQLALCHEMY_DATABASE_URI'] = params['prod_uri']
db = SQLAlchemy(app)
class Contacts(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), nullable=False)
phn_num = db.Column(db.String(12), nullable=False)
msg = db.Column(db.String(120), nullable=False)
date = db.Column(db.String(12), nullable=True)
email = db.Column(db.String(20), nullable=False)
class Posts(db.Model):
sno = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(80), nullable=False)
slug = db.Column(db.String(21), nullable=False)
content = db.Column(db.String(120), nullable=False)
tagline = db.Column(db.String(120), nullable=False)
date = db.Column(db.String(12), nullable=True)
img_file = db.Column(db.String(12), nullable=True)
@app.route("/")
def home():
posts = Posts.query.filter_by().all()
last=math.ceil(len(posts))/int(params['no_of_posts'])
page=request.args.get('page')
if(not str(page).isnumeric()):
page=1
page=int(page)
posts=posts[(page-1)*int(params['no_of_posts']): (page-1)*int(params['no_of_posts'])+int(params['no_of_posts'])]
if (page==1):
prev = "#"
next = "/?page="+ str(page+1)
elif(page==last):
prev = "/?page=" + str(page-1)
next= "#"
else:
prev="/?page=" + str(page-1)
next="/?page" + str(page+1)
return render_template('index.html', params=params,posts=posts,prev=prev,next=next)
@app.route("/post/<string:post_slug>", methods=['GET'])
def post_route(post_slug):
post = Posts.query.filter_by(slug=post_slug).first()
return render_template('post.html', params=params, post=post)
@app.route("/about")
def about():
return render_template('about.html', params=params)
@app.route("/dashboard", methods= ['GET','POST'])
def dashboard():
if ('user' in session and session['user'] == params['admin_user']):
posts = Posts.query.all()
return render_template('dashboard.html', params=params, posts = posts)
if request.method=='POST':
username=request.form.get('uname')
userpass=request.form.get('pass')
if (username == params ['admin_user'] and userpass == params ['admin_password']):
session['user'] = username
posts = Posts.query.all()
return render_template('dashboard.html', params=params, posts = posts)
return render_template('login.html', params=params)
@app.route("/edit/<string:sno>", methods=['GET', 'POST'])
def edit(sno):
if ('user' in session and session['user'] == params['admin_user']):
if request.method == "POST":
box_title = request.form.get('title')
tagline = request.form.get('tagline')
slug = request.form.get('slug')
content = request.form.get('content')
img_file = request.form.get('img_file')
date = datetime.now()
if sno == '0':
post = Posts(title= box_title, slug=slug, content=content, tagline=tagline, img_file=img_file, date=date)
db.session.add(post)
db.session.commit()
else:
post = Posts.query.filter_by(sno=sno).first()
post.title = box_title
post.tagline = tagline
post.slug = slug
post.content = content
post.img_file = img_file
post.date = date
db.session.commit()
return redirect('/edit/' + sno)
post = Posts.query.filter_by(sno=sno).first()
return render_template('edit.html', params=params, post=post,sno=sno)
@app.route("/uploader", methods = ['GET', 'POST'])
def uploader():
if "user" in session and session['user'] == params['admin_user']:
if(request.method=='POST'):
f = request.files['file1']
f.save(os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(f.filename)))
return "uploaded successfully.."
@app.route("/logout")
def logout():
session.pop('user')
return redirect('/dashboard')
@app.route("/delete/<string:sno>", methods=['GET', 'POST'])
def delete(sno):
if "user" in session and session['user'] == params['admin_user']:
post= Posts.query.filter_by(sno=sno).first()
db.session.delete(post)
db.session.commit()
return redirect('/dashboard')
@app.route("/contact", methods = ['GET', 'POST'])
def contact():
if(request.method=='POST'):
name = request.form.get('name')
email = request.form.get('email')
phn_num = request.form.get('phone')
message = request.form.get('message')
entry = Contacts(name=name, phn_num = phn_num, msg = message, date= datetime.now(),email = email )
db.session.add(entry)
db.session.commit()
# mail.send_message('New message from ' + name,
# sender=email,
# recipients = [params['gmail-user']],
# body = message + "\n" + phn_num
# )
return render_template('contact.html', params=params)
app.run(debug=True)
| [
"Raashid710"
] | Raashid710 |
b10f1d015e39167aece2ad6df9958a447a5a7f45 | ff73cf9a9b1c924e46e9e088243991a184a8a668 | /UMass/2019Research/synthesis/minimal_box/softbox_model.py | 071de3ad937c9345961ca5481b953dce6441bbc0 | [] | no_license | Lorraine333/minimal_box | c844b14ca2b5a6a898be9dec48ce89473887a325 | 472e1de6087c4183fe706edc0efc4e917b7518a3 | refs/heads/master | 2020-06-11T14:31:39.612547 | 2019-06-27T01:11:26 | 2019-06-27T01:11:26 | 193,998,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,992 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import utils
from box import MyBox
import tensorflow as tf
my_seed = 20180112
tf.set_random_seed(my_seed)
def model_fn(features, labels, mode, params):
"""
Creates model_fn for Tensorflow estimator. This function takes features and input, and
is responsible for the creation and processing of the Tensorflow graph for training, prediction and evaluation.
Expected feature: {'image': image tensor }
:param features: dictionary of input features
:param labels: dictionary of ground truth labels
:param mode: graph mode
:param params: params to configure model
:return: Estimator spec dependent on mode
"""
learning_rate = params['learning_rate']
"""Initiate box embeddings"""
mybox = softbox_model_fn(features, labels, mode, params)
log_prob = mybox.log_prob
if mode == tf.estimator.ModeKeys.PREDICT:
return get_prediction_spec(log_prob)
total_loss = mybox.get_loss(log_prob, labels, params)
if mode == tf.estimator.ModeKeys.TRAIN:
return get_training_spec(learning_rate, total_loss)
else:
return get_eval_spec(log_prob, labels, total_loss)
def get_prediction_spec(log_cond_prob):
"""
Creates estimator spec for prediction
:param log_cond_prob: log prob for conditionals
:param log_marg_prob: log prob for marginals
:return: Estimator spec
"""
predictions = {
"probability": tf.exp(log_cond_prob)
}
return tf.estimator.EstimatorSpec(mode=tf.estimator.ModeKeys.PREDICT, predictions=predictions)
def get_training_spec(learning_rate, loss):
"""
Creates training estimator spec
:param learning rate for optimizer
:param joint_loss: loss op
:return: Training estimator spec
"""
# optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
# train_op = optimizer.minimize(
# loss=loss,
# global_step=tf.train.get_global_step())
return tf.estimator.EstimatorSpec(mode=tf.estimator.ModeKeys.TRAIN, loss=loss, train_op=train_op)
def get_eval_spec(log_cond_prob, labels, loss):
"""
Creates eval spec for tensorflow estimator
:param log_cond_prob: log prob for conditionals
:param log_marg_prob: log prob for marginals
:param labels: ground truth labels for conditional and marginal
:param loss: loss op
:return: Eval estimator spec
"""
eval_metric_ops = {
"pearson_correlation":tf.contrib.metrics.streaming_pearson_correlation(
predictions=tf.exp(log_cond_prob), labels=labels['prob'])
}
return tf.estimator.EstimatorSpec(
mode=tf.estimator.ModeKeys.EVAL, loss=loss, eval_metric_ops=eval_metric_ops)
class softbox_model_fn(object):
def __init__(self, features, labels, mode, params):
self.label_size = params['label_size']
self.embed_dim = params['embed_dim']
self.prob_label = labels['prob']
self.cond_weight = params['cond_weight']
self.marg_weight = params['marg_weight']
self.reg_weight = params['reg_weight']
self.regularization_method = params['regularization_method']
self.temperature = 1.0
self.min_embed, self.delta_embed = init_word_embedding(self.label_size, self.embed_dim)
self.log_prob = tf.cond(tf.equal(tf.shape(self.prob_label)[1], tf.constant(self.label_size)),
true_fn=lambda: self.softbox_marg(features, params, mode),
false_fn=lambda: self.softbox_cond(features, params, mode))
self.log_prob = tf.Print(self.log_prob, [tf.equal(tf.shape(self.prob_label)[1], tf.constant(self.label_size))], '0 for marginal, 1 for conditional')
def softbox_cond(self, features, params, mode):
embed_dim = params['embed_dim']
t1x = features['term1']
t2x = features['term2']
# t1x = tf.Print(t1x, [t1x, t2x], 't1x shape')
"""cond log probability"""
t1_box = get_word_embedding(t1x, self.min_embed, self.delta_embed)
t2_box = get_word_embedding(t2x, self.min_embed, self.delta_embed)
evaluation_logits = get_conditional_probability(t1_box, t2_box, embed_dim, self.temperature)
return evaluation_logits
def softbox_marg(self, features, params, mode):
"""marg log probability"""
max_embed = self.min_embed + tf.exp(self.delta_embed)
universe_min = tf.reduce_min(self.min_embed, axis=0, keepdims=True)
universe_max = tf.reduce_max(max_embed, axis=0, keepdims=True)
universe_volume = volume_calculation(MyBox(universe_min, universe_max), self.temperature)
box_volume = volume_calculation(MyBox(self.min_embed, max_embed), self.temperature)
predicted_marginal_logits = tf.log(box_volume) - tf.log(universe_volume)
return predicted_marginal_logits
def get_cond_loss(self, cond_log_prob):
"""get conditional probability loss"""
cond_pos_loss = tf.multiply(cond_log_prob, self.prob_label)
cond_neg_loss = tf.multiply(tf.log(1-tf.exp(cond_log_prob)+1e-10), 1-self.prob_label)
cond_loss = -tf.reduce_mean(cond_pos_loss+ cond_neg_loss)
cond_loss = self.cond_weight * cond_loss
return cond_loss
def get_marg_loss(self, marg_log_prob):
"""get marginal probability loss"""
marg_pos_loss = tf.multiply(marg_log_prob, self.prob_label)
marg_neg_loss = tf.multiply(tf.log(1-tf.exp(marg_log_prob)+1e-10), 1-self.prob_label)
marg_loss = -tf.reduce_mean(marg_pos_loss+marg_neg_loss)
marg_loss = self.marg_weight * marg_loss
return marg_loss
def get_loss(self, log_prob, labels, params):
prob_loss = tf.cond(tf.equal(tf.shape(self.prob_label)[0], tf.constant(self.label_size)),
true_fn=lambda: self.get_marg_loss(log_prob),
false_fn=lambda: self.get_cond_loss(log_prob))
"""get regularization loss"""
if self.regularization_method == 'universe_edge':
max_embed = self.min_embed + tf.exp(self.delta_embed)
universe_min = tf.reduce_min(self.min_embed, axis=0, keepdims=True)
universe_max = tf.reduce_max(max_embed, axis=0, keepdims=True)
regularization = tf.reduce_mean(
tf.nn.softplus(universe_max - universe_min))
elif self.regularization_method == 'delta':
regularization = tf.reduce_mean(
tf.square(tf.exp(self.delta_embed)))
else:
raise ValueError('Wrong regularization method')
total_loss = prob_loss + self.reg_weight * regularization
total_loss = tf.Print(total_loss, [prob_loss, self.reg_weight * regularization], 'loss')
return total_loss
# def softbox(features, params, mode):
# label_size = params['label_size']
# embed_dim = params['embed_dim']
# temperature = 1.0
#
# t1x = features['term1']
# t2x = features['term2']
#
# t1x = tf.Print(t1x, [tf.shape(t1x)], 't1x shape')
#
# """Initiate box embeddings"""
# min_embed, delta_embed = init_word_embedding(label_size, embed_dim)
# """cond log probability"""
# t1_box = get_word_embedding(t1x, min_embed, delta_embed)
# t2_box = get_word_embedding(t2x, min_embed, delta_embed)
# evaluation_logits = get_conditional_probability(t1_box, t2_box, embed_dim, temperature)
#
# """marg log probability"""
# max_embed = min_embed + tf.exp(delta_embed)
# universe_min = tf.reduce_min(min_embed, axis=0, keepdims=True)
# universe_max = tf.reduce_max(max_embed, axis=0, keepdims=True)
# universe_volume = volume_calculation(MyBox(universe_min, universe_max), temperature)
# box_volume = volume_calculation(MyBox(min_embed, max_embed), temperature)
# predicted_marginal_logits = tf.log(box_volume) - tf.log(universe_volume)
#
# return evaluation_logits, predicted_marginal_logits, min_embed, delta_embed
def volume_calculation(mybox, temperature):
return tf.reduce_prod(tf.nn.softplus((mybox.max_embed - mybox.min_embed)/
temperature)*temperature, axis=-1)
def init_embedding_scale():
# softbox delta log init
# min_lower_scale, min_higher_scale = 1e-4, 0.9
# delta_lower_scale, delta_higher_scale = -1.0, -0.1
min_lower_scale, min_higher_scale = 1e-4, 0.9
delta_lower_scale, delta_higher_scale = -0.1, 0
return min_lower_scale, min_higher_scale, delta_lower_scale, delta_higher_scale
def init_word_embedding(label_size, embed_dim):
min_lower_scale, min_higher_scale, delta_lower_scale, delta_higher_scale = init_embedding_scale()
min_embed = tf.Variable(
tf.random_uniform([label_size, embed_dim],
min_lower_scale, min_higher_scale, seed=my_seed), trainable=True, name='word_embed')
delta_embed = tf.Variable(
tf.random_uniform([label_size, embed_dim],
delta_lower_scale, delta_higher_scale, seed=my_seed), trainable=True, name='delta_embed')
return min_embed, delta_embed
def get_word_embedding(idx, min_embed, delta_embed):
"""read word embedding from embedding table, get unit cube embeddings"""
min_embed = tf.nn.embedding_lookup(min_embed, idx)
delta_embed = tf.nn.embedding_lookup(delta_embed, idx) # [batch_size, embed_size]
max_embed = min_embed + tf.exp(delta_embed)
t1_box = MyBox(min_embed, max_embed)
return t1_box
def get_conditional_probability(t1_box, t2_box, embed_dim, temperature):
_, meet_box, disjoint = utils.calc_join_and_meet(t1_box, t2_box)
nested = utils.calc_nested(t1_box, t2_box, embed_dim)
"""get conditional probabilities"""
overlap_volume = volume_calculation(meet_box, temperature)
rhs_volume = volume_calculation(t1_box, temperature)
conditional_logits = tf.log(overlap_volume+1e-10) - tf.log(rhs_volume+1e-10)
return conditional_logits
| [
"abbeyli92@gmail.com"
] | abbeyli92@gmail.com |
3aefc1186a88845c16d658de39ccb722a760a83f | e922f5dac332fbf4de910ade55f07cb75d900d1b | /templates/influxdb/actions.py | 22fc5f7c27e2e8b057d5a9a71db43c9800bbaa34 | [
"Apache-2.0"
] | permissive | hossnys/0-orchestrator | 441970f0bd784b72c40f6da4fa44ca2c70b9ea8c | cce7cc1e1f957e0eb691b863502fa6c3f4620d52 | refs/heads/master | 2021-01-01T18:46:27.123614 | 2017-07-26T13:59:30 | 2017-07-26T13:59:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,070 | py | def get_container(service, force=True):
containers = service.producers.get('container')
if not containers:
if force:
raise RuntimeError('Service didn\'t consume any containers')
else:
return
return containers[0]
def init(job):
from zeroos.orchestrator.configuration import get_configuration
service = job.service
container_actor = service.aysrepo.actorGet('container')
config = get_configuration(service.aysrepo)
args = {
'node': service.model.data.node,
'flist': config.get(
'influxdb-flist', 'https://hub.gig.tech/gig-official-apps/influxdb.flist'),
'hostNetworking': True
}
cont_service = container_actor.serviceCreate(instance='{}_influxdb'.format(service.name), args=args)
service.consume(cont_service)
def install(job):
j.tools.async.wrappers.sync(job.service.executeAction('start', context=job.context))
def start(job):
from zeroos.orchestrator.sal.Container import Container
from zeroos.orchestrator.sal.influxdb.influxdb import InfluxDB
service = job.service
container = get_container(service)
j.tools.async.wrappers.sync(container.executeAction('start', context=job.context))
container_ays = Container.from_ays(container, job.context['token'])
influx = InfluxDB(
container_ays, service.parent.model.data.redisAddr, service.model.data.port)
influx.start()
service.model.data.status = 'running'
influx.create_databases(service.model.data.databases)
service.saveAll()
def stop(job):
from zeroos.orchestrator.sal.Container import Container
from zeroos.orchestrator.sal.influxdb.influxdb import InfluxDB
service = job.service
container = get_container(service)
container_ays = Container.from_ays(container, job.context['token'])
if container_ays.is_running():
influx = InfluxDB(
container_ays, service.parent.model.data.redisAddr, service.model.data.port)
influx.stop()
j.tools.async.wrappers.sync(container.executeAction('stop', context=job.context))
service.model.data.status = 'halted'
service.saveAll()
def uninstall(job):
service = job.service
container = get_container(service, False)
if container:
j.tools.async.wrappers.sync(service.executeAction('stop', context=job.context))
j.tools.async.wrappers.sync(container.delete())
j.tools.async.wrappers.sync(service.delete())
def processChange(job):
from zeroos.orchestrator.sal.Container import Container
from zeroos.orchestrator.sal.influxdb.influxdb import InfluxDB
from zeroos.orchestrator.configuration import get_jwt_token_from_job
service = job.service
args = job.model.args
if args.pop('changeCategory') != 'dataschema' or service.model.actionsState['install'] in ['new', 'scheduled']:
return
container_service = get_container(service)
container = Container.from_ays(container_service, get_jwt_token_from_job(job))
influx = InfluxDB(
container, service.parent.model.data.redisAddr, service.model.data.port)
if args.get('port'):
if container.is_running() and influx.is_running()[0]:
influx.stop()
service.model.data.status = 'halted'
influx.port = args['port']
influx.start()
service.model.data.status = 'running'
service.model.data.port = args['port']
if args.get('databases'):
if container.is_running() and influx.is_running()[0]:
create_dbs = set(args['databases']) - set(service.model.data.databases)
drop_dbs = set(service.model.data.databases) - set(args['databases'])
influx.create_databases(create_dbs)
influx.drop_databases(drop_dbs)
service.model.data.databases = args['databases']
service.saveAll()
def init_actions_(service, args):
return {
'init': [],
'install': ['init'],
'monitor': ['start'],
'delete': ['uninstall'],
'uninstall': [],
} | [
"deboeck.jo@gmail.com"
] | deboeck.jo@gmail.com |
9dd6a4dec2e3bae975e974846e5a24ca372e9610 | 62d305dc260c7d5ef49d44a8d61f29896fe612b4 | /scqubits/tests/test_spectrumlookup.py | 09f8bea073f14352c8aa085865dbd5b194d715fe | [
"BSD-3-Clause"
] | permissive | ooovector/scqubits | 1185ca8bc4eb5a88638d1638c92fde789dc6036b | d3f0108eda9e9fa216e3efe1a8feaee220b29444 | refs/heads/master | 2023-01-05T06:13:55.862278 | 2020-10-30T07:54:57 | 2020-10-30T07:54:57 | 268,481,492 | 0 | 2 | BSD-3-Clause | 2020-08-28T12:34:43 | 2020-06-01T09:40:41 | Python | UTF-8 | Python | false | false | 16,587 | py | # test_spectrumlookup.py
# meant to be run with 'pytest'
#
# This file is part of scqubits.
#
# Copyright (c) 2019, Jens Koch and Peter Groszkowski
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
############################################################################
import numpy as np
import scqubits as qubit
from scqubits.core.hilbert_space import HilbertSpace, InteractionTerm
from scqubits.core.param_sweep import ParameterSweep
class TestSpectrumLookup:
def initialize_hilbertspace(self):
CPB1 = qubit.Transmon(
EJ=40.0,
EC=0.2,
ng=0.3,
ncut=40,
truncated_dim=3 # after diagonalization, we will keep 3 levels
)
CPB2 = qubit.Transmon(
EJ=30.0,
EC=0.15,
ng=0.0,
ncut=10,
truncated_dim=4
)
resonator = qubit.Oscillator(
E_osc=6.0,
truncated_dim=4 # up to 3 photons (0,1,2,3)
)
# Form a list of all components making up the Hilbert space.
hilbertspace = HilbertSpace([CPB1, CPB2, resonator])
g1 = 0.1 # coupling resonator-CPB1 (without charge matrix elements)
g2 = 0.2 # coupling resonator-CPB2 (without charge matrix elements)
interaction1 = InteractionTerm(
g_strength=g1,
op1=CPB1.n_operator(),
subsys1=CPB1,
op2=resonator.creation_operator() + resonator.annihilation_operator(),
subsys2=resonator
)
interaction2 = InteractionTerm(
g_strength=g2,
op1=CPB2.n_operator(),
subsys1=CPB2,
op2=resonator.creation_operator() + resonator.annihilation_operator(),
subsys2=resonator
)
interaction_list = [interaction1, interaction2]
hilbertspace.interaction_list = interaction_list
return hilbertspace
def test_hilbertspace_generate_lookup(self):
hilbertspace = self.initialize_hilbertspace()
hilbertspace.generate_lookup()
def test_hilbertspace_lookup_bare_eigenenergies(self):
hilbertspace = self.initialize_hilbertspace()
hilbertspace.generate_lookup()
CPB = hilbertspace[0]
reference = np.asarray([-36.05064983, -28.25601136, -20.67410141])
assert np.allclose(hilbertspace.lookup.bare_eigenenergies(CPB), reference)
def test_hilbertspace_lookup_bare_index(self):
hilbertspace = self.initialize_hilbertspace()
hilbertspace.generate_lookup()
reference = (1, 0, 1)
assert hilbertspace.lookup.bare_index(8) == reference
def test_hilbertspace_lookup_dressed_index(self):
hilbertspace = self.initialize_hilbertspace()
hilbertspace.generate_lookup()
reference = 21
assert hilbertspace.lookup.dressed_index((1, 2, 1)) == reference
def test_hilbertspace_lookup_bare_eigenstates(self):
hilbertspace = self.initialize_hilbertspace()
hilbertspace.generate_lookup()
CPB = hilbertspace[0]
reference = np.asarray(
[[9.81355277e-48, 9.30854381e-47, 6.24667247e-46],
[6.43691609e-46, 6.16876506e-45, 4.12289788e-44],
[4.08432918e-44, 3.89716449e-43, 2.58910351e-42],
[2.46946241e-42, 2.34112552e-41, 1.54552340e-40],
[1.41840000e-40, 1.33555952e-39, 8.75825742e-39],
[7.72924773e-39, 7.22576051e-38, 4.70525632e-37],
[3.99043912e-37, 3.70232919e-36, 2.39303178e-35],
[1.94904073e-35, 1.79388526e-34, 1.15041755e-33],
[8.99241757e-34, 8.20662359e-33, 5.21927229e-32],
[3.91282194e-32, 3.53890467e-31, 2.23088730e-30],
[1.60297165e-30, 1.43598881e-29, 8.96770059e-29],
[6.17171701e-29, 5.47280554e-28, 3.38373118e-27],
[2.22898717e-27, 1.95522337e-26, 1.19604078e-25],
[7.53630460e-26, 6.53443664e-25, 3.95184002e-24],
[2.38030833e-24, 2.03838345e-23, 1.21776107e-22],
[7.00726389e-23, 5.92116628e-22, 3.49118136e-21],
[1.91803972e-21, 1.59764762e-20, 9.28738868e-20],
[4.86905872e-20, 3.99337435e-19, 2.28615708e-18],
[1.14319364e-18, 9.22005967e-18, 5.19156110e-17],
[2.47519037e-17, 1.96028507e-16, 1.08406384e-15],
[4.92660565e-16, 3.82517606e-15, 2.07418437e-14],
[8.98410446e-15, 6.82608546e-14, 3.62257139e-13],
[1.49561042e-13, 1.10967323e-12, 5.75121167e-12],
[2.26406443e-12, 1.63642248e-11, 8.26237305e-11],
[3.10359624e-11, 2.17915666e-10, 1.06879561e-09],
[3.83517061e-10, 2.60742769e-09, 1.23809279e-08],
[4.25136635e-09, 2.78807695e-08, 1.27659489e-07],
[4.20541851e-08, 2.64836082e-07, 1.16378489e-06],
[3.69111261e-07, 2.22015398e-06, 9.30982723e-06],
[2.85699456e-06, 1.63072825e-05, 6.47991812e-05],
[1.93730997e-05, 1.04109860e-04, 3.88641713e-04],
[1.14275333e-04, 5.72579791e-04, 1.98618170e-03],
[5.81958843e-04, 2.68572824e-03, 8.53587859e-03],
[2.53837018e-03, 1.06226113e-02, 3.03588663e-02],
[9.40432575e-03, 3.49650614e-02, 8.75591950e-02],
[2.93435401e-02, 9.42865786e-02, 1.99160495e-01],
[7.64587603e-02, 2.04183470e-01, 3.42090752e-01],
[1.65024759e-01, 3.45198039e-01, 4.07470769e-01],
[2.92888514e-01, 4.33880781e-01, 2.56608116e-01],
[4.24891514e-01, 3.59598148e-01, -8.79153816e-02],
[5.01714912e-01, 9.84685217e-02, -3.53429771e-01],
[4.81272090e-01, -2.20127278e-01, -2.78699112e-01],
[3.75226600e-01, -4.13778959e-01, 5.98745832e-02],
[2.38462243e-01, -4.12292719e-01, 3.47513274e-01],
[1.24144932e-01, -2.88932985e-01, 4.00686521e-01],
[5.32947942e-02, -1.54131669e-01, 2.86094353e-01],
[1.90119468e-02, -6.50150668e-02, 1.47843633e-01],
[5.68278365e-03, -2.22152354e-02, 5.88699461e-02],
[1.43546171e-03, -6.26030710e-03, 1.87174037e-02],
[3.09023712e-04, -1.47627434e-03, 4.86852349e-03],
[5.71637638e-05, -2.94942330e-04, 1.05515547e-03],
[9.15737091e-06, -5.04652995e-05, 1.93379175e-04],
[1.27979057e-06, -7.46595985e-06, 3.03408392e-05],
[1.57114391e-07, -9.63222539e-07, 4.11860306e-06],
[1.70524328e-08, -1.09208038e-07, 4.88161208e-07],
[1.64601782e-09, -1.09569023e-08, 5.09307414e-08],
[1.42087441e-10, -9.78961139e-10, 4.71111386e-09],
[1.10247361e-11, -7.83397639e-11, 3.88859043e-10],
[7.72550576e-13, -5.64439383e-12, 2.88076527e-11],
[4.91062053e-14, -3.67924669e-13, 1.92555426e-12],
[2.84291230e-15, -2.17934446e-14, 1.16685222e-13],
[1.50469805e-16, -1.17783922e-15, 6.43856605e-15],
[7.30668786e-18, -5.83004745e-17, 3.24802569e-16],
[3.26587050e-19, -2.65213639e-18, 1.50352885e-17],
[1.34775547e-20, -1.11239813e-19, 6.40837496e-19],
[5.14987234e-22, -4.31489395e-21, 2.52289937e-20],
[1.82690101e-23, -1.55217860e-22, 9.20113815e-22],
[6.03189844e-25, -5.19171482e-24, 3.11715376e-23],
[1.85795057e-26, -1.61860341e-25, 9.83457910e-25],
[5.35075470e-28, -4.71440666e-27, 2.89646327e-26],
[1.44377736e-29, -1.28560173e-28, 7.98111283e-28],
[3.65714082e-31, -3.28896361e-30, 2.06182159e-29],
[8.71252159e-33, -7.90887032e-32, 5.00363611e-31],
[1.95554312e-34, -1.79083458e-33, 1.14280608e-32],
[4.14221247e-36, -3.82491690e-35, 2.46077129e-34],
[8.29318581e-38, -7.71816934e-37, 5.00379776e-36],
[1.57174582e-39, -1.47365562e-38, 9.62359716e-38],
[2.82377905e-41, -2.66622536e-40, 1.75318677e-39],
[4.81562064e-43, -4.57736079e-42, 3.02958416e-41],
[7.80686036e-45, -7.46654748e-44, 4.97258814e-43],
[1.20604294e-46, -1.15822587e-45, 7.76029780e-45]]
)
assert np.allclose(hilbertspace.lookup.bare_eigenstates(CPB), reference)
class TestParameterSweep:
def initialize(self):
# Set up the components / subspaces of our Hilbert space
# Set up the components / subspaces of our Hilbert space
CPB1 = qubit.Transmon(
EJ=40.0,
EC=0.2,
ng=0.3,
ncut=40,
truncated_dim=3 # after diagonalization, we will keep 3 levels
)
CPB2 = qubit.Transmon(
EJ=30.0,
EC=0.15,
ng=0.0,
ncut=10,
truncated_dim=4
)
resonator = qubit.Oscillator(
E_osc=6.0,
truncated_dim=4 # up to 3 photons (0,1,2,3)
)
# Form a list of all components making up the Hilbert space.
hilbertspace = HilbertSpace([CPB1, CPB2, resonator])
g1 = 0.1 # coupling resonator-CPB1 (without charge matrix elements)
g2 = 0.2 # coupling resonator-CPB2 (without charge matrix elements)
interaction1 = InteractionTerm(
g_strength=g1,
op1=CPB1.n_operator(),
subsys1=CPB1,
op2=resonator.creation_operator() + resonator.annihilation_operator(),
subsys2=resonator
)
interaction2 = InteractionTerm(
g_strength=g2,
op1=CPB2.n_operator(),
subsys1=CPB2,
op2=resonator.creation_operator() + resonator.annihilation_operator(),
subsys2=resonator
)
interaction_list = [interaction1, interaction2]
hilbertspace.interaction_list = interaction_list
param_name = 'flux' # name of varying external parameter
param_vals = np.linspace(0., 2.0, 300) # parameter values
subsys_update_list = [CPB1,
CPB2] # list of HilbertSpace subsys_list which are affected by parameter changes
def update_hilbertspace(param_val): # function that shows how Hilbert space components are updated
CPB1.EJ = 20 * np.abs(np.cos(np.pi * param_val))
CPB2.EJ = 15 * np.abs(np.cos(np.pi * param_val * 0.65))
sweep = ParameterSweep(
param_name=param_name,
param_vals=param_vals,
evals_count=20,
hilbertspace=hilbertspace,
subsys_update_list=subsys_update_list,
update_hilbertspace=update_hilbertspace
)
return sweep
def test_sweep_bare_eigenenergies(self):
sweep = self.initialize()
reference = np.asarray([-12.6254519, -8.58335482, -4.70576686, -1.00508497])
CPB2 = sweep.get_subsys(1)
calculated = sweep.lookup.bare_eigenenergies(CPB2, 15)
print(calculated)
assert np.allclose(reference, calculated)
def test_sweep_bare_eigenstates(self):
sweep = self.initialize()
reference = np.asarray(
[[-4.36541328e-50, -2.75880308e-54, 8.97850818e-52],
[-3.11868192e-49, -2.79006815e-54, -8.74802339e-53],
[-8.58246250e-50, -1.00876179e-53, -1.01005272e-51],
[1.07036276e-49, -7.49520810e-52, 3.40138146e-51],
[2.22076735e-50, -9.29825414e-50, 5.29476367e-49],
[9.31798507e-49, -1.09455894e-47, 6.20160456e-47],
[1.48748935e-46, -1.21908616e-45, 6.87331138e-45],
[1.57335293e-44, -1.28270423e-43, 7.19436330e-43],
[1.57038899e-42, -1.27291721e-41, 7.10007771e-41],
[1.47623385e-40, -1.18931710e-39, 6.59489623e-39],
[1.30464570e-38, -1.04427630e-37, 5.75451766e-37],
[1.08188824e-36, -8.60002157e-36, 4.70755913e-35],
[8.40106916e-35, -6.62889364e-34, 3.60280729e-33],
[6.09538520e-33, -4.77167090e-32, 2.57367705e-31],
[4.12259568e-31, -3.20001276e-30, 1.71188450e-29],
[2.59275754e-29, -1.99422862e-28, 1.05746143e-27],
[1.51222344e-27, -1.15172482e-26, 6.04923527e-26],
[8.15621522e-26, -6.14597428e-25, 3.19493711e-24],
[4.05546577e-24, -3.02076696e-23, 1.55282385e-22],
[1.85279307e-22, -1.36279552e-21, 6.92039152e-21],
[7.74967547e-21, -5.62219212e-20, 2.81708905e-19],
[2.95606461e-19, -2.11237965e-18, 1.04300880e-17],
[1.02393371e-17, -7.19606307e-17, 3.49598322e-16],
[3.20583976e-16, -2.21183282e-15, 1.05539288e-14],
[9.02643915e-15, -6.10107117e-14, 2.85332763e-13],
[2.27284550e-13, -1.50130691e-12, 6.86483062e-12],
[5.08658910e-12, -3.27390598e-11, 1.45936793e-10],
[1.00490495e-10, -6.28031136e-10, 2.71942263e-09],
[1.73929913e-09, -1.05099253e-08, 4.40157221e-08],
[2.61520163e-08, -1.51995670e-07, 6.12372338e-07],
[3.38385801e-07, -1.87937831e-06, 7.23484440e-06],
[3.72810682e-06, -1.96242274e-05, 7.15569990e-05],
[3.45584871e-05, -1.70590444e-04, 5.82474864e-04],
[2.65942237e-04, -1.21399020e-03, 3.82167032e-03],
[1.67360896e-03, -6.93272504e-03, 1.96865734e-02],
[8.46996689e-03, -3.10088949e-02, 7.69019540e-02],
[3.38450374e-02, -1.05369628e-01, 2.16719161e-01],
[1.04711605e-01, -2.61101801e-01, 4.05243750e-01],
[2.45912280e-01, -4.42899710e-01, 4.12353303e-01],
[4.30449532e-01, -4.49470571e-01, 3.05581613e-02],
[5.53739884e-01, -1.35052186e-01, -3.89790634e-01],
[5.19852531e-01, 2.94931877e-01, -2.63181317e-01],
[3.56804110e-01, 4.82977168e-01, 2.23412560e-01],
[1.80869825e-01, 3.77382768e-01, 4.51858334e-01],
[6.88151152e-02, 1.89260831e-01, 3.34098107e-01],
[2.00308382e-02, 6.69188913e-02, 1.49539620e-01],
[4.55033333e-03, 1.75694853e-02, 4.62204064e-02],
[8.22318568e-04, 3.55076664e-03, 1.05407454e-02],
[1.20336099e-04, 5.67882562e-04, 1.85083059e-03],
[1.44906152e-05, 7.34992558e-05, 2.58056443e-04],
[1.45665042e-06, 7.84270617e-06, 2.92596095e-05],
[1.23810941e-07, 7.00861842e-07, 2.75014737e-06],
[8.99993323e-09, 5.31654232e-08, 2.17699883e-07],
[5.65189610e-10, 3.46352201e-09, 1.47086607e-08],
[3.09419265e-11, 1.95757580e-10, 8.57946263e-10],
[1.48871045e-12, 9.68547769e-12, 4.36327789e-11],
[6.34068704e-14, 4.22836088e-13, 1.95158208e-12],
[2.40640133e-15, 1.64038032e-14, 7.73561260e-14],
[8.18610028e-17, 5.69106676e-16, 2.73577781e-15],
[2.50956428e-18, 1.77583368e-17, 8.68518693e-17],
[6.96721961e-20, 5.00977686e-19, 2.48862809e-18],
[1.75956716e-21, 1.28376682e-20, 6.46796092e-20],
[4.05902398e-23, 3.00104047e-22, 1.53161646e-21],
[8.58515967e-25, 6.42518490e-24, 3.31807834e-23],
[1.67070903e-26, 1.26444488e-25, 6.60096121e-25],
[3.00109957e-28, 2.29490687e-27, 1.21007053e-26],
[4.99099559e-30, 3.85321228e-29, 2.05059703e-28],
[7.70601966e-32, 6.00230825e-31, 3.22178302e-30],
[1.10748033e-33, 8.69778971e-33, 4.70594474e-32],
[1.48510624e-35, 1.17536887e-34, 6.40674933e-34],
[1.86242981e-37, 1.48464566e-36, 8.14893098e-36],
[2.18890702e-39, 1.75670712e-38, 9.70510353e-38],
[2.41583947e-41, 1.95114772e-40, 1.08452672e-39],
[2.50853833e-43, 2.03811379e-42, 1.13938276e-41],
[2.45571628e-45, 2.00585220e-44, 1.12742273e-43],
[2.30038269e-47, 1.86312496e-46, 1.05255328e-45],
[2.97703610e-49, 1.63590361e-48, 9.28557377e-48],
[-2.98788454e-49, 1.35968736e-50, 7.75238654e-50],
[-2.85901234e-50, 1.01001731e-52, 6.08803345e-52],
[-3.46227453e-49, 2.24305288e-55, 3.05453863e-52],
[-5.44193943e-50, -2.99920038e-54, -5.73539237e-53]]
)
CPB1 = sweep.get_subsys(0)
assert np.allclose(reference, sweep.lookup.bare_eigenstates(CPB1, 21))
| [
"jens-koch@northwestern.edu"
] | jens-koch@northwestern.edu |
de23996569a965273f6b640c2706fdee4576e88d | bf154ac3c8b1f9aaa50115f7004b8ff7235e730a | /ask_buevich/ask_buevich/migrations/0003_question_created_at.py | e25003fe47ce7ce3729ff49b9980500a48004a4e | [] | no_license | JohnKeats97/AskBuevich | 69d486acd358ce321eaabd1b990b8ffa5b29df73 | 410e2f558f3d08f15214dc15e1dbe020aeeb6b46 | refs/heads/master | 2021-01-19T08:41:42.026059 | 2018-01-31T21:35:39 | 2018-01-31T21:35:39 | 87,663,854 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('ask_buevich', '0002_question'),
]
operations = [
migrations.AddField(
model_name='question',
name='created_at',
field=models.DateTimeField(default=datetime.datetime.now),
),
]
| [
"johnbuevich@yahoo.com"
] | johnbuevich@yahoo.com |
a6c4aa0e8380fd9ff7002a247ccdb28e8f77b1b8 | 6cbe9db58d3952c3a201dafb7ce4dc40ca865117 | /payment/urls.py | b8eb16da15294e55e39d9a47063dc253981c0dd2 | [] | no_license | Mundia-Pizo/ecom | 782b0ab57566f503c412b6114dd4b59e72fdd632 | 4ca6ba313cbdf3aef86304d34bf6bfc1a1ade1cf | refs/heads/main | 2023-03-19T06:54:56.414179 | 2022-10-24T22:52:08 | 2022-10-24T22:52:08 | 252,992,143 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | from .views import payment_process, payment_done, payment_cancelled
from django.urls import path
app_name = 'payment'
urlpatterns = [
path('payment/', payment_process, name='payment_process'),
path('done/', payment_done, name ='done'),
path('payment_cancelled/', payment_cancelled, name='cancelled')
] | [
"stacypizom@gmail.com"
] | stacypizom@gmail.com |
d5397373ff70a6206913d19b80955ec6fe0df354 | 6d1747234e372917032452fddb94636c28b0122b | /Summary/scripts_becky/data_storage.py | 54da45ec3c4d4686e5cd7c13aae9a5f540d7cdb0 | [] | no_license | amitamisra/Summary_Dialogs | 869e66f26c83fc627546c85d2517245f85e742c3 | e0257bac31067a78f3225cc81b6b02e65e2bdd89 | refs/heads/master | 2020-04-06T04:32:31.736617 | 2015-10-28T19:51:37 | 2015-10-28T19:51:37 | 19,559,994 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,738 | py | import pymongo
from pymongo import Connection
from bson.binary import Binary
import pickle
import glob
import nltk
from train_on_conll2000 import *
import xml.etree.ElementTree as ET
def all_lower(list_of_tokens):
return [t.lower() for t in list_of_tokens]
def get_sents(raw):
# returns a list of word-tokenized sents
untok = nltk.tokenize.sent_tokenize(raw)
word_tok_sents = []
punct = ['.',',','?',';',"'",'-',':','...','!','(',')','[',']']
for s in untok:
word_tok_sents.append(all_lower([i for i in nltk.tokenize.word_tokenize(s) if i not in punct]))
print word_tok_sents
return word_tok_sents
def shallow_parse(sent_list):
# currently returns a list of Tree objects
# if subtree = Tree('NP', [('energy', 'NN')])
# subtree[0] returns ('energy', 'NN')
chunks = []
with open('conll2000_trigram_uni_backoff','rb') as f:
chunker = pickle.load(f)
for sent in sent_list:
tree = chunker.parse(sent)
for sub in tree.subtrees():
if sub.node != 'S':
chunks.append(sub)
print chunks
return chunks
def words(sent):
punct = ['.',',','?',';',"'",'-',':','...','!','(',')','[',']']
return [w for w in sent if w not in punct]
def get_scus(filename):
tree = ET.parse(filename)
root = tree.getroot()
return [Scu(element) for element in root.iter('scu')]
class Summary(object):
def __init__(self,raw):
self.sents = get_sents(raw)
self.pos = nltk.tag.batch_pos_tag(self.sents)
# create another field to store stemmed, spellchecked sents with punct removed
# import the parser that had the best performance on your sample
self.chunks = shallow_parse(self.pos)
class Pyramid(object):
def __init__(self,filename):
self.scus = get_scus(filename)
class Scu(object):
def __init__(self,element):
self.id = int(element.get('uid'))
self.label = words(all_lower(nltk.tokenize.word_tokenize(element.get('label'))))
self.contrib = [words(all_lower(nltk.tokenize.word_tokenize(c.get('label')))) for c in element.iter('contributor')]
self.weight = len(self.contrib)
def main():
# create mongo connection
conn = Connection()
db = conn.tc_storage
coll = db.writingsamples
coll.remove()
print "coll.count()"
print coll.count()
for path in glob.glob('/Users/EmilyChen/Dropbox/pyramids/data/target/summaries/[0-9]*.txt'):
id = path[-13:-4]
with open(path,'r') as f:
raw = f.read()
s = Summary(raw)
coll.save({'id':id,'summary':Binary(pickle.dumps(s),subtype=128)})
pyr_coll = db.pyramid
pyr_coll.remove()
matter_pyr = Pyramid('../pyr/12_10_09_MATTER.pyr')
for scu in matter_pyr.scus:
doc = {'id':scu.id,'weight':scu.weight,'label':scu.label,'contrib':scu.contrib}
pyr_coll.save(doc)
print "pyrcoll.count()"
print pyr_coll.count()
if __name__ == '__main__':
main() | [
"amitamisra1@gmail.com"
] | amitamisra1@gmail.com |
9f44906ee88378f8a8718133ab9aab13900371e0 | 221751f033d97c073eeab6b98b44df4526c45687 | /crud/forms.py | fd7de17ea4ab1200947009893982d86ce6c70d8a | [] | no_license | RomHartmann/todo_tracker | acb6ff1a974670df2b55542824811455284f4c77 | 70c785c55f4708ec88f43f6ba9b15df6b020255d | refs/heads/master | 2021-06-12T20:59:51.442768 | 2018-11-21T05:35:09 | 2018-11-21T05:35:09 | 158,182,484 | 0 | 0 | null | 2021-06-10T21:03:35 | 2018-11-19T07:53:14 | Python | UTF-8 | Python | false | false | 2,260 | py | from datetime import datetime
import calendar
from django.forms import ModelForm, widgets, Textarea
from crud.models import Todo
from django.utils import dates as dj_dates
class DateTimeWidget(widgets.MultiWidget):
"""A Date time widgit for year, month, day, hour, minute(15 min intervals).
Uses a set of 5 Select widgits, one for each range.
"""
def __init__(self, attrs=None):
now_year = datetime.now().year
years = [(year, year) for year in range(now_year, now_year + 5)]
months = dj_dates.MONTHS_AP.items()
days = [(i, i) for i in range(1, 32)]
hours = [(i, i) for i in range(0, 24)]
minutes = [(i, i) for i in (0, 15, 30, 45)]
_widgets = (
widgets.Select(attrs=attrs, choices=days),
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
widgets.Select(attrs=attrs, choices=hours),
widgets.Select(attrs=attrs, choices=minutes),
)
super().__init__(_widgets, attrs)
def decompress(self, value):
if value:
return [value.day, value.month, value.year, value.hour, value.minute]
return [None, None, None, None, None]
def value_from_datadict(self, data, files, name):
"""Parses a valid datetime. Sets default value to current datetime."""
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
year = int(datelist[2])
month = int(datelist[1])
# Deals with day is out of range for month
day = min(int(datelist[0]), calendar.monthrange(year, month)[1])
dt = datetime(
year=year,
month=month,
day=day,
hour=int(datelist[3]),
minute=int(datelist[4]),
)
return dt
except TypeError:
return datetime.now()
class TodoForm(ModelForm):
class Meta:
model = Todo
fields = ['text', 'state', 'due_at']
widgets = {
'text': Textarea(attrs={'cols': 50, 'rows': 2}),
'due_at': DateTimeWidget()
}
| [
"romhartmann@gmail.com"
] | romhartmann@gmail.com |
6050cf57bbb806c30de0a77a82955bf4cc53dd93 | 213349ea60979af4c830645fd30539a2bbd977c9 | /course/migrations/0010_auto_20191010_1825.py | d52169320d36f0704b4fb6135f9b1032b64f344f | [] | no_license | minhajulislam56/WhiteBoard | 435477671cc911aa36f47b430116ecd82d3bb49d | c5a00d7428edabaa9defd1af2b8461c36e4dcc7c | refs/heads/master | 2022-12-17T12:52:44.218529 | 2019-12-30T19:35:11 | 2019-12-30T19:35:11 | 229,997,443 | 0 | 0 | null | 2022-11-22T04:19:28 | 2019-12-24T19:55:59 | Python | UTF-8 | Python | false | false | 384 | py | # Generated by Django 2.2.5 on 2019-10-10 12:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course', '0009_auto_20191010_1823'),
]
operations = [
migrations.AlterField(
model_name='course',
name='rating',
field=models.FloatField(default=0.0),
),
]
| [
"35999036+minhajulislam56@users.noreply.github.com"
] | 35999036+minhajulislam56@users.noreply.github.com |
c769e4bd93525540e181e3300e654a6fc7f2e039 | 5f61858f4768991743cc6195f6f36bb38843e97c | /EOSWebApp/EOSWebApp/imageProcessing/processingFunc/template_matching.py | fe8a5725f4c180aab0831c149d351b0ca8e269a1 | [] | no_license | quynhkha/EOS | 525acc61e25ad495e32abdad14373cb32e36b1f0 | 035cdbf2f3fd0fbd9542a4591ca4fafca7ebfe67 | refs/heads/master | 2020-03-29T09:29:32.785465 | 2018-03-09T03:19:27 | 2018-03-09T03:19:27 | 149,760,744 | 0 | 0 | null | 2018-09-21T12:32:05 | 2018-09-21T12:32:05 | null | UTF-8 | Python | false | false | 2,437 | py | import cv2 as cv
import numpy as np
from skimage import io
from matplotlib import pyplot as plt
img = cv.imread('/home/long/PycharmProjects/EOS/ImageProcessing/data/1947-1_plg1.tif',0)
img2 = img.copy()
template = cv.imread('/home/long/PycharmProjects/EOS/ImageProcessing/data/template3.tif',0)
w, h = template.shape[::-1]
# All the 6 methods for comparison in a list
# methods = ['cv.TM_CCOEFF', 'cv.TM_CCOEFF_NORMED', 'cv.TM_CCORR',
# 'cv.TM_CCORR_NORMED', 'cv.TM_SQDIFF', 'cv.TM_SQDIFF_NORMED']
# for meth in methods:
# img = img2.copy()
# method = eval(meth)
# # Apply template Matching
# res = cv.matchTemplate(img,template,method)
# min_val, max_val, min_loc, max_loc = cv.minMaxLoc(res)
# # If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum
# if method in [cv.TM_SQDIFF, cv.TM_SQDIFF_NORMED]:
# top_left = min_loc
# else:
# top_left = max_loc
# bottom_right = (top_left[0] + w, top_left[1] + h)
# cv.rectangle(img,top_left, bottom_right, 255, 2)
#
# # res = cv.matchTemplate(img,template,cv.TM_SQDIFF)
# # threshold = 0.8
# # loc = np.where(res >= threshold)
# # for pt in zip(*loc[::-1]):
# # cv.rectangle(img, pt, (pt[0] + w, pt[1] + h), 255, 2)
# plt.subplot(121),plt.imshow(res,cmap = 'gray')
# plt.title('Matching Result'), plt.xticks([]), plt.yticks([])
# plt.subplot(122),plt.imshow(img,cmap = 'gray')
# plt.title('Detected Point'), plt.xticks([]), plt.yticks([])
# plt.suptitle(meth)
# plt.show()
meth = 'cv.TM_SQDIFF'
img = img2.copy()
method = eval(meth)
# Apply template Matching
res = cv.matchTemplate(img,template,method)
min_val, max_val, min_loc, max_loc = cv.minMaxLoc(res)
# If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum
top_left = min_loc
bottom_right = (top_left[0] + w, top_left[1] + h)
cv.rectangle(img,top_left, bottom_right, 255, 2)
# res = cv.matchTemplate(img,template,cv.TM_SQDIFF)
# threshold = 0.8
# loc = np.where(res >= threshold)
# for pt in zip(*loc[::-1]):
# cv.rectangle(img, pt, (pt[0] + w, pt[1] + h), 255, 2)
# res = np.uint8(res)
cv.normalize(res,res,0,255,cv.NORM_MINMAX)
io.imshow(np.uint8(res))
res = np.uint8(res)
plt.subplot(121),plt.imshow(res,cmap = 'gray')
plt.title('Matching Result'), plt.xticks([]), plt.yticks([])
plt.subplot(122),plt.imshow(img,cmap = 'gray')
plt.title('Detected Point'), plt.xticks([]), plt.yticks([])
plt.suptitle(meth)
plt.show() | [
"ndlong95@gmail.com"
] | ndlong95@gmail.com |
ddc161b7e46516dd3785e6dba80385cf69326f1e | f3f01d98f2f924b7f2ce9c682b63ef68a0b943d7 | /Type_conversion.py | 67e8274a805561624b385bea780d5a3d1ffc4e07 | [] | no_license | nihalgaurav/pythonprep | 0d935244f4c20b2ba660a1bc192352654d4a9366 | d3023e1b58d9d5333e909f71d9c3fa7c54c420f5 | refs/heads/master | 2023-03-27T06:09:38.757433 | 2021-03-16T05:22:07 | 2021-03-16T05:22:07 | 344,804,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | number = 17
width = len(str(bin(number)[2:])) + 2
print("INT".rjust(width) + "OCT".rjust(width) + "HEX".rjust(width) + "BIN".rjust(width))
for x in range(1, number+1):
print(str(int(x)).rjust(width, " ") + str(oct(x))[2:].rjust(width, " ") + str(hex(x))[2:].upper().rjust(width, " ")
+ str(bin(x)[2:]).rjust(width, " "))
num = 5
n = 97 + num
for i in range(num):
p = ''
for j in range(i):
p = p + "-" + chr(n-i+j)
print(p[::-1].rjust(num*2-2, "-") + chr(n-i-1) + p.ljust(num*2-2, "-"))
for i in range(num-2,-1, -1):
p = ''
for j in range(i):
p = p + "-" + chr(n-i+j)
print(p[::-1].rjust(num*2-2, "-") + chr(n-i-1) + p.ljust(num*2-2, "-")) | [
"nihalgaurav85@gmail.com"
] | nihalgaurav85@gmail.com |
2c20be9ac07ccbe8056cc8e2fc2e7b6b026b9ef2 | ab39a09f75663a291368eb3b5bf3ecb7fcdce25c | /06.Lists.py | 342085c2aa0a9684ccf0422eec1a0e3ad3de96bc | [] | no_license | Iseries2090/Complete_Python_Bootcamp | 8df0cea5eb97c98202c3353369a06bb533cbf8c9 | 33279695c6ee9d0bab4ceae7676928790fc08281 | refs/heads/master | 2022-08-08T19:14:07.734615 | 2020-05-18T17:49:47 | 2020-05-18T17:49:47 | 264,511,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | """
Lists
Creat a list that contains at least one string, one integer and one float
"""
[1,'hello',3.14] | [
"khayden2090@gmail.com"
] | khayden2090@gmail.com |
c2f894d59b1e97ea33c13e4f63461c1e4d7d9315 | c7da66b0e719686dd2a36a99602fd1e6b933445d | /examples/python/helloworld/async_greeter_server_with_graceful_shutdown.py | 6dedc2262af5bfd73a67b71196e2a3273efc6c9c | [
"Apache-2.0"
] | permissive | yulin-liang/grpc | 5b5cf2a050dd9a0f43090464583781a2eebfbb53 | c52005c161281a4dd429a19aed380af2308dc885 | refs/heads/master | 2023-07-03T11:27:58.122532 | 2021-07-12T05:10:04 | 2021-07-12T05:10:04 | 263,710,997 | 0 | 1 | Apache-2.0 | 2020-05-13T18:24:23 | 2020-05-13T18:24:21 | null | UTF-8 | Python | false | false | 2,242 | py | # Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The graceful shutdown example for the asyncio Greeter server."""
import logging
import asyncio
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
# Coroutines to be invoked when the event loop is shutting down.
_cleanup_coroutines = []
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
logging.info('Received request, sleeping for 4 seconds...')
await asyncio.sleep(4)
logging.info('Sleep completed, responding')
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
async def serve() -> None:
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
listen_addr = '[::]:50051'
server.add_insecure_port(listen_addr)
logging.info("Starting server on %s", listen_addr)
await server.start()
async def server_graceful_shutdown():
logging.info("Starting graceful shutdown...")
# Shuts down the server with 0 seconds of grace period. During the
# grace period, the server won't accept new connections and allow
# existing RPCs to continue within the grace period.
await server.stop(5)
_cleanup_coroutines.append(server_graceful_shutdown())
await server.wait_for_termination()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(serve())
finally:
loop.run_until_complete(*_cleanup_coroutines)
loop.close()
| [
"noreply@github.com"
] | noreply@github.com |
cee24ad2b9015a0358c23faf46c7db3e63048385 | b40a661aa78c10ea8413b349f1efe288149f4ab0 | /App/migrations/0004_address.py | 20a5f2bea93a0a8e0e15352f1439fbf6e1dd1c5b | [] | no_license | 0helloword/DjangoSum | daed4ab9488c5d53518623eb5d35c3a32a826129 | 72b528415edd2a76a7a19da708d4046de2a014ac | refs/heads/master | 2022-11-25T15:15:30.843401 | 2020-08-02T03:18:07 | 2020-08-02T03:18:07 | 275,606,302 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 712 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-06-27 13:58
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('App', '0003_cart'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('a_add', models.CharField(max_length=128)),
('a_customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='App.Customer')),
],
),
]
| [
"532720298@qq.com"
] | 532720298@qq.com |
12a9b9befcf7af332c3ea172149268070aea9c7c | deb740e5086386a68d155b2482f9a9ec2095012c | /jdcloud_sdk/services/live/apis/DescribeLivePublishStreamNumRequest.py | 212277a2947473efb924dd9775e2df6ca9c01142 | [
"Apache-2.0"
] | permissive | aluode99/jdcloud-sdk-python | 843afdd2855a55ecd7cd90fe255df213a8f56e28 | 3da9ae9c0f08e2c20a73dde04f6453d3eb9db16a | refs/heads/master | 2020-05-26T09:26:24.307434 | 2019-05-29T02:35:23 | 2019-05-29T02:35:23 | 188,186,313 | 0 | 0 | null | 2019-05-23T07:46:01 | 2019-05-23T07:46:00 | null | UTF-8 | Python | false | false | 2,692 | py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class DescribeLivePublishStreamNumRequest(JDCloudRequest):
"""
查询直播推流数
"""
def __init__(self, parameters, header=None, version="v1"):
super(DescribeLivePublishStreamNumRequest, self).__init__(
'/describeLivePublishStreamNum', 'GET', header, version)
self.parameters = parameters
class DescribeLivePublishStreamNumParameters(object):
def __init__(self, startTime, ):
"""
:param startTime: 起始时间
- UTC时间
格式:yyyy-MM-dd'T'HH:mm:ss'Z'
示例:2018-10-21T10:00:00Z
"""
self.domainName = None
self.appName = None
self.protocolType = None
self.period = None
self.startTime = startTime
self.endTime = None
def setDomainName(self, domainName):
"""
:param domainName: (Optional) 播放域名
"""
self.domainName = domainName
def setAppName(self, appName):
"""
:param appName: (Optional) 应用名称
"""
self.appName = appName
def setProtocolType(self, protocolType):
"""
:param protocolType: (Optional) 查询的流协议类型,取值范围:"rtmp,hdl,hls",多个时以逗号分隔
"""
self.protocolType = protocolType
def setPeriod(self, period):
"""
:param period: (Optional) 查询周期,当前取值范围:“oneMin,fiveMin,halfHour,hour,twoHour,sixHour,day,followTime”,分别表示1min,5min,半小时,1小时,2小时,6小时,1天,跟随时间。默认为空,表示fiveMin。当传入followTime时,表示按Endtime-StartTime的周期,只返回一个点
"""
self.period = period
def setEndTime(self, endTime):
"""
:param endTime: (Optional) 结束时间:
- UTC时间
格式:yyyy-MM-dd'T'HH:mm:ss'Z'
示例:2018-10-21T10:00:00Z
- 为空,默认为当前时间
"""
self.endTime = endTime
| [
"tancong@jd.com"
] | tancong@jd.com |
e33bf7188bb39a15eab44ec863cb21e1daa47b3e | acf15961c47fb947a407a4318214110b9597d9e6 | /env/bin/jupyter-kernel | 667c26308122816ab476f0645e256bfc37e040c0 | [] | no_license | qu4ku/dshub-website | 43e378352246357db83da9b9b0acd760aebbc83a | 792d94e41fa666093eda2b5511bbcab27e0bb287 | refs/heads/master | 2021-06-02T19:27:39.708138 | 2020-05-19T09:18:42 | 2020-05-19T09:18:42 | 135,715,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | #!/Users/kamilwroniewicz/_code/_github/180601-datahub-website/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from jupyter_client.kernelapp import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"qu4ku@hotmail.com"
] | qu4ku@hotmail.com | |
4be6d5e31dfde57db6b7b75cc1887ab488914e4d | 8228257edfaef99cc37f0e7bdbd1c1b0175d08dc | /Person/Person/wsgi.py | b6986e2d516b19dd2b79dd30fd9126ba64c05ab2 | [] | no_license | OMGzy/Novel | 472d11634a947196fb7453797964e37ed39d5318 | cc62a74184ccc56d2a22ca68ae433f436fd8a7de | refs/heads/master | 2021-01-22T03:18:23.068904 | 2017-05-25T06:28:30 | 2017-05-25T06:28:30 | 92,370,623 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | """
WSGI config for Person project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Person.settings")
application = get_wsgi_application()
| [
"wangzy_job@163.com"
] | wangzy_job@163.com |
21439bcac6cdd546eeab5d2c26363fe72b79eb43 | ea549f5974db822d0733e5417d313997de9ca2bb | /craig_list_site/migrations/0001_initial.py | 57e111b3bffaa4b9a0892c8fca78c81caa2d727c | [] | no_license | Bibin22/craiglist | fe5a641cf4b8c03557c1775605a5e8b4da9b43de | 853b377f4951ee3ac9072bc22d486f520e18b1bc | refs/heads/master | 2023-02-05T11:32:24.911491 | 2021-01-02T05:30:34 | 2021-01-02T05:30:34 | 326,116,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | # Generated by Django 3.1.4 on 2020-12-24 10:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Search',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('search', models.CharField(max_length=500)),
('created', models.DateTimeField(auto_now=True)),
],
),
]
| [
"bibinjoy82@gmail.com"
] | bibinjoy82@gmail.com |
a3fac0df2496aea555fb951e1641e2c5d9b07391 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_233/ch45_2020_03_09_13_17_35_961786.py | 7cf83bf5b09f45069fdb4e7c49f9a6d644c64307 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | lista = []
while True:
num = int(input())
if num <= 0: break
lista.append(num)
lista_inv = range(len(lista))
for i in range(len(lista)):
lista_inv[-i + 1] = lista[i]
print(lista_inv) | [
"you@example.com"
] | you@example.com |
e8a53b99c0ddd63bf2407ce9aae0b46ed807b353 | 53a8f7f1cfb2681cc38971a2b33e0af5c5a47e34 | /pythonNet/day07/thread_lock.py | 0bebc62c38686ca370876169a6d49579f24d070c | [] | no_license | AnnaHun/Python_Study | 00a01297eb8ba7fcf4bd8d85bade1eb7db9b8a63 | 878439a13f3613727d56df722595b30a557e81dd | refs/heads/master | 2022-09-02T04:57:04.162037 | 2020-05-28T09:30:11 | 2020-05-28T09:30:11 | 261,661,559 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 703 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
"""
@Author : 朱昭明 -- apple
@Project : Python_Study
@File : thread_lock.py
@Software : PyCharm
@Create Time : 2020/5/11 10:41 上午
@Contact : 18327510516@163.com
@License : (C)Copyright 2020-2022, ZhuGroup-ZB-CASIA
@version : v1.0
@Desciption :
"""
from threading import Thread, Lock
a = b = 0
lock = Lock()
def value():
while True:
lock.acquire() # 上锁
if a != b:
print("a=%d,b=%d" % (a, b))
lock.release() # 解锁
t = Thread(target=value)
t.start()
while True:
with lock:
a += 1
b += 1
t.join()
| [
"18327510516@163.com"
] | 18327510516@163.com |
275286b9b57618dd4edb5a140673fbc3c2c3b955 | 0a2baf73724d033360df9350aeaaac6c24b85593 | /9.DouBan_teleplay/doubanTV.py | 8d3e939a4cebed0f22ef3b000aad01037a146bb8 | [] | no_license | zhongmeimei123/SpiderCollection | 2928b0a3b56d76192dd1432a731f4a9f04ed4503 | 6f68e0fb7d83242fe672fc7e412c1ef749a7e0c2 | refs/heads/master | 2020-04-05T14:45:45.768222 | 2018-11-08T12:17:42 | 2018-11-08T12:17:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,101 | py | import json
from utils.get_classify import get_classify
from utils.parse_url import parse_url
class Douban:
def __init__(self):
self.url = 'https://m.douban.com/rexxar/api/v2/subject_collection/filter_tv_american_hot/items?os=android&start=0&count=18&loc_id=108288'
self.headers = {
'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.9 Mobile Safari/537.36',
'Referer': 'https://m.douban.com/tv/american'
}
self.classify_url = 'https://m.douban.com/tv/'
def get_url(self, classify, i=0):
self.url = 'https://m.douban.com/rexxar/api/v2/subject_collection/filter_tv_' + classify + '_hot/items?os=android&start=' + str(
i) + '&count=18&loc_id=108288'
def parse_content(self, response_str):
response_dict = json.loads(response_str)
total = response_dict['total']
return response_dict, total
def download(self, response_dict):
with open('data/douban.txt', 'a', encoding='utf-8.36kr') as f:
json.dump(response_dict, f, ensure_ascii=False, indent=2)
def run(self):
classify_list, original_list = get_classify(self.classify_url, self.headers)
print(classify_list)
for classify in classify_list:
print('开始爬取%s分类的数据' % classify)
self.get_url(classify)
print(self.url)
response_str = parse_url(self.url, self.headers)
response_dict, total = self.parse_content(response_str)
self.download(response_dict)
print("第1页爬完..")
i = 18
while i < total:
self.get_url(classify, i)
response_str = parse_url(self.url, self.headers)
response_dict, total = self.parse_content(response_str)
self.download(response_dict)
i += 18
j = i / 18
print("第%d页爬完" % j)
if __name__ == '__main__':
douban = Douban()
douban.run()
| [
"2374097101@qq.com"
] | 2374097101@qq.com |
d13c4fd48ae4636a11c0d6e98be4c6c7de0af28d | 0e2446d4cd27c90155cde011e825479382468403 | /py/foobar/IPython_support.py | 169b66bb9a57b03e882dd78931c1af1828175229 | [
"BSD-3-Clause"
] | permissive | mrcslws/ipython-web-graphics-example | fb6ee1371d412afbc11a5ce4f651170220f0c661 | 989dcb9f540317a57549e5668b4dce1d5c2b7e54 | refs/heads/master | 2021-01-11T19:28:43.053578 | 2017-03-11T02:04:24 | 2017-03-11T02:04:24 | 79,377,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,409 | py | import json
import numbers
import os
import uuid
from pkg_resources import resource_string
from IPython.display import HTML, display
def get_foobar_js():
path = os.path.join('package_data', 'foobar-bundle.js')
foobar_js = resource_string('foobar', path).decode('utf-8')
return foobar_js
def init_notebook_mode():
# Insert your own CSS here.
style_inject = """
<style>
.bar rect {
fill: steelblue;
}
.bar text {
fill: #fff;
font: 10px sans-serif;
}
div.foobar-output svg {
max-width: initial;
}
</style>
"""
script_inject = u"""
<script type='text/javascript'>
if(!window.foobar) {{
define('foobar', function(require, exports, module) {{
{script}
}});
require(['foobar'], function(foobar) {{
window.foobar = foobar;
}});
}}
</script>
""".format(script=get_foobar_js())
display(HTML(style_inject + script_inject))
def print_simple_d3_histogram(number_list):
numberListJson = json.dumps(number_list)
elementId = str(uuid.uuid1())
addChart = """
<div class="foobar-output" id="%s"></div>
<script>
require(['foobar'], function(foobar) {
foobar.insertSimpleD3Histogram(document.getElementById('%s'), '%s');
});
</script>
""" % (elementId, elementId, numberListJson)
display(HTML(addChart))
| [
"mrcslws@gmail.com"
] | mrcslws@gmail.com |
2eb9a26bdde17a586ad5280059024d4004382a91 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /ayQTiQAcFJhtauhe3_17.py | 1dca03f7cb44ef5e8ee9f421dc82cdd88d7fd01c | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | """
Given a list of integers, determine whether the sum of its elements is even or
odd.
The output should be a string (`"odd"` or `"even"`).
If the input list is empty, consider it as a list with a zero (`[0]`).
### Examples
even_or_odd([0]) ➞ "even"
even_or_odd([1]) ➞ "odd"
even_or_odd([]) ➞ "even"
even_or_odd([0, 1, 5]) ➞ "even"
### Notes
N/A
"""
def even_or_odd(lst):
summ=int(sum(lst))
if summ % 2 == 0:
return "even"
if summ % 2 == 1:
return "odd"
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
384072f8fe4c2216949622c07323ea78a8d9581f | 94b5a189c03e0a6321665fef297dde7a092b7053 | /src/tunnel_server.py | 092636d6b6fd177374ac2f28eb57f1354da36a84 | [] | no_license | voidbar/tcp-over-icmp | ee72059543ec2c9ee8c1436e5facae0af3295ff0 | f782ec541257ffb73a09d2fa21ded656d839fb3f | refs/heads/master | 2022-12-19T02:16:54.636915 | 2020-09-22T11:40:58 | 2020-09-22T11:40:58 | 290,265,391 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,206 | py |
import sys
import argparse
import icmp
import select
import socket
import logging
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s")
logger = logging.getLogger(__name__)
TCP_BUFFER_SIZE = 2 ** 10
ICMP_BUFFER_SIZE = 65565
class Tunnel(object):
"""
A Tunnel Server class used to handle commincation with the target server and the client
"""
def __init__(self):
self.tcp_socket = None
self.icmp_server_socket = self.create_icmp_server_socket()
self.icmp_send_socket = self.create_icmp_send_socket()
self.source, self.dest = None, None
@staticmethod
def create_icmp_server_socket():
"""
Creating a socket for listening for ICMP packets from the client
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)
sock.bind(("0.0.0.0", 0))
sock.setsockopt(socket.SOL_IP, socket.IP_HDRINCL, 1)
return sock
@staticmethod
def create_icmp_send_socket():
"""
Creating a socket for sending ICMP packets to the client
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)
return sock
@staticmethod
def create_tcp_socket(dest):
"""
Creating a socket for sending TCP packets to the target
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.connect(dest)
return sock
def client_to_target(self):
"""
Sending ICMP packets that were received from the client, to the server. Unwraping the ICMP and forwarding TCP to target
"""
logger.info("Receiving ICMP packets from the client's server. Unwraping ICMP and forwarding TCP to the target")
packet, addr = self.icmp_server_socket.recvfrom(ICMP_BUFFER_SIZE)
try:
packet = icmp.parse_icmp_buffer(packet)
except ValueError:
return
if packet.type == icmp.ICMP_ECHO_REPLY and packet.code == 0:
logger.debug("Received our packet, Ignoring.")
return
self.source = addr[0]
self.dest = packet.dest
if packet.type == icmp.ICMP_ECHO_REQUEST and packet.code == 1:
if self.tcp_socket in self.sockets:
self.sockets.remove(self.tcp_socket)
if self.tcp_socket:
self.tcp_socket.close()
self.tcp_socket = None
return
else:
if not self.tcp_socket:
logger.debug(f"Creating a new tcp socket to communicate with {self.dest}")
self.tcp_socket = self.create_tcp_socket(self.dest)
self.sockets.append(self.tcp_socket)
self.tcp_socket.send(packet.data)
def target_to_client(self, sock):
"""
Receiving TCP packets from the target server. Wraping them in ICMP and forwarding them to the client
"""
logger.debug("Receiving TCP packets from the target. Wraping them in ICMP and forwarding to the client server")
try:
sdata = sock.recv(TCP_BUFFER_SIZE)
except OSError:
return
new_packet = icmp.ICMPPacket(icmp.ICMP_ECHO_REPLY, 0,
sdata, self.dest)
packet = new_packet.build_raw_icmp()
self.icmp_send_socket.sendto(packet, (self.source, 0))
def run(self):
"""
Starting the tunnel which listens for ICMP packets from the client and forwards them to the target,
And listening on TCP packets from the target to forward to the client
"""
logger.info("Started listening from incoming ICMP packets...")
self.sockets = [self.icmp_server_socket]
while True:
sread, _, _ = select.select(self.sockets, [], [])
for sock in sread:
if sock.proto == socket.IPPROTO_ICMP:
self.client_to_target()
else:
self.target_to_client(sock)
if __name__ == "__main__":
tunnel = Tunnel()
tunnel.run()
| [
"tomer.lev@argus-sec.com"
] | tomer.lev@argus-sec.com |
594d970cdd44aea6dab5db8d8b2eae35f88175eb | 57f09ef87421729b2565a6fdccd35132f1acc924 | /app/__init__.py | 0e9e2f20081e599be412ac7f636fcd7ea8e02394 | [
"MIT"
] | permissive | oseme-techguy/python-pdf-annotation-api-demo | 848f51b516196aecbc45482f601f00a1489a67a4 | b86dd4e20e9cc13237eacc9a32bb142d4bb28755 | refs/heads/master | 2022-12-09T08:35:57.241082 | 2019-06-20T17:55:17 | 2019-06-20T17:55:17 | 191,609,896 | 1 | 0 | MIT | 2022-12-08T05:48:51 | 2019-06-12T16:44:02 | Python | UTF-8 | Python | false | false | 153 | py | """PDF Annotation API"""
from app.config.application import Application
from app.config.di import Controllers
__all__ = ('Application', 'Controllers')
| [
"osemeodigie@yahoo.com"
] | osemeodigie@yahoo.com |
6b8b504e3891b59e364016530a31d145e5c4c723 | 543857479feb5d2001a6471f1258c1cb8eac9cf5 | /Non_stationary learning/main_change_learning.py | 5684a91c67ccd1f247c0f75711e585817173f4fd | [] | no_license | Jun-PyoHong/Cache_replacement_DRL | 11228f66f9a43b64c2c2de6e4c06d3709c7212a5 | 2f808a0acae42618fc104517c2f8c9c5f9288667 | refs/heads/main | 2023-06-30T23:56:08.827251 | 2021-08-09T05:15:10 | 2021-08-09T05:15:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,070 | py | import random as rd
import collections
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import wireless_cache_change_network as cache
import numpy as np
from conventional_method import *
import DNN_model
import time
use_cuda = torch.cuda.is_available()
device = torch.device('cuda:0' if use_cuda else "cpu")
start_time = time.time()
learning_rate = 0.001
gamma = 0.99
batch_size = 4096
max_episode = 20000
pop = 5
env = cache.cache_replacement(pop)
node = 400
w_node = 1
input_size = 5 * env.F_packet + 4
output_size = 4 * env.F_packet
y_layer = []
z_layer = []
def Train(Q, Q_target, memory, optimizer):
for i in range(30):
state, action, reward, next_state, done = memory.sample(batch_size)
state = state.cuda(device)
action = action.cuda(device)
reward = reward.cuda(device)
next_state = next_state.cuda(device)
done = done.cuda(device)
# DQN
Q_out = Q(state)
Q_value = Q_out.gather(1, action)
Q_prime = Q_target(next_state).max(1)[0].unsqueeze(1)
target = reward + gamma * Q_prime * done
loss = F.smooth_l1_loss(Q_value, target)
optimizer.zero_grad()
loss.backward()
optimizer.step()
def main():
main_DQN = Q_model.Qnet_v6(env.Num_packet, env.Num_file, env.F_packet, node, output_size).to(device)
target_DQN = Q_model.Qnet_v6(env.Num_packet, env.Num_file, env.F_packet, node, output_size).to(device)
target_DQN.load_state_dict(main_DQN.state_dict())
target_DQN.eval()
memory = Q_model.ReplayBuffer()
env.Zip_funtion()
interval = 20
request = 1000
cost, hit_rate = 0.0, 0.0
optimizer = optim.Adam(main_DQN.parameters(), lr=learning_rate)
pro = 0
for episode in range(max_episode):
if episode % 500 == 0 and episode != 0:
env.change_pop()
state = env.reset()
file = env.file_request[0]
user = env.user_location
for i in range(request * env.Num_packet):
s = torch.from_numpy(state).float().unsqueeze(0)
with torch.no_grad():
aa = Q_model.Predict_Qnet_v6(main_DQN.eval().cpu(), s).detach().numpy()
sigma = max(10.0 / ((episode / 200) + 1), 0.316) # sigma^2 = 0.1
Noise = np.random.normal(0, sigma, size=4 * env.F_packet) / 10
action = env.action_select(aa, Noise)
if episode % 100 == 99:
if i == 0:
print(np.max(aa))
print(np.min(aa))
next_state, reward, done, file, user = env.step(action, file, user)
done_mask = 0.0 if done else 1.0
if reward <= -500:
reward = -500
memory.put((state, action, reward / 20.0, next_state, done_mask))
state = next_state
cost += env.cost
hit_rate += env.hit
if episode % interval == (interval - 1):
main_DQN.to(device)
y_layer.append(cost / interval)
z_layer.append(hit_rate / interval)
print("Episode: {} cost: {}".format(episode, (cost / interval)))
Train(main_DQN, target_DQN, memory, optimizer)
target_DQN.load_state_dict(main_DQN.state_dict())
target_DQN.eval()
cost, hit_rate = 0.0, 0.0
if episode % 2500 == 0 and episode != 0:
pro += 1
savePath = "test_model_conv0" + str(pro) + ".pth"
torch.save(main_DQN.state_dict(), savePath)
np.save("acc_delay", y_layer)
np.save("cache_hit", z_layer)
savePath = "final_model.pth"
torch.save(main_DQN.state_dict(), savePath)
print("start_time", start_time)
print("--- %s seconds ---" % (time.time() - start_time))
np.save("final_acc_delay", y_layer)
np.save("final_cache_hit", z_layer)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | noreply@github.com |
bb70c1c8ba3ba84bc0cd7614765d95521e71d50e | 7fe646e7d425bc3e1e0f2bac03c4df0f33dcf4ec | /duvidas/migrations/0002_auto_20180419_0522.py | b1280491a26a50c35bb585c334c78f46f5d22029 | [] | no_license | equeirozdenoronha/tickets | d77d13da08cf2f96ef6d6d2a28c8919719981b69 | 560b0f3dce2e2058b6875ba5f93ef175ab7afcb7 | refs/heads/master | 2022-12-11T19:05:18.367194 | 2018-04-22T19:25:19 | 2018-04-22T19:25:19 | 130,600,070 | 1 | 0 | null | 2022-12-07T23:51:09 | 2018-04-22T19:22:27 | Python | UTF-8 | Python | false | false | 587 | py | # Generated by Django 2.0.4 on 2018-04-19 05:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('duvidas', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='categoria',
name='perguntas',
),
migrations.AddField(
model_name='pergunta',
name='categorias',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='duvidas.Categoria'),
),
]
| [
"equeirozdenoronha@gmail.com"
] | equeirozdenoronha@gmail.com |
d1821f7228d33e30543f23db4a6405cb5422c161 | 1f9487f03c765f30ab3ae90feb2fb5beb55a4302 | /kaggle_main_day3.py | 6224aa8df713734f634f023d4085aed129b53ae1 | [] | no_license | chr15stevens/avito-demand-prediction-analysis | 6858b2c80c5aacbb86f52587b30d487db793f926 | 25f1513355fe09bb33c5aecffd9fe3a3ca252a2a | refs/heads/master | 2020-03-17T10:41:31.407644 | 2018-05-18T20:55:20 | 2018-05-18T20:55:20 | 133,521,741 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,686 | py | import math
import pandas as pd
import numpy as np
from sklearn.model_selection import cross_val_predict
from sklearn import linear_model
train = pd.read_csv('train.csv').fillna(0)
test = pd.read_csv('test.csv').fillna(0)
print(train.head())
print(train.axes)
x_train = train[['price', 'item_seq_number', 'image_top_1']]
x_test = test[['price', 'item_seq_number', 'image_top_1']]
y_train = train.deal_probability
lrFull = linear_model.LinearRegression()
lrFull.fit(x_train, y_train)
predictions_full = lrFull.predict(x_test)
# mean squared error on full dataset logistic regression
meanSquaredError = np.sum(np.square(y_test - predictions_full))/predictions_full.size
rootMeanSquaredError = math.sqrt(meanSquaredError)
print('Full dataset root mean squared error: ', rootMeanSquaredError)
groupableColumnLabels = ['region', 'city', 'parent_category_name', 'category_name', 'user_type']
groupModelsDict = {}
for groupableColumnLabel in groupableColumnLabels:
print('Building regression models ' + groupableColumnLabel)
# split our dataframe into a set of dataframes for each parent_category
train_grouping = train.groupby(groupableColumnLabel)
train_groups_dict = {}
[train_groups_dict.__setitem__(x,train_grouping.get_group(x)) for x in train_grouping.groups]
# build our set of regression models one for each parent_category_name
regression_models_dict = {}
for key, train_group in train_groups_dict.items():
lr = linear_model.LinearRegression()
lr.fit(train_group[['price', 'item_seq_number', 'image_top_1']], train_group.deal_probability)
regression_models_dict[key] = lr
groupModelsDict[groupableColumnLabel] = regression_models_dict
# iterate over all rows in our test data and build a new row of predictions, one for each category column
print('Making predictions')
predictions = []
for index, row in test.iterrows():
row_data = np.reshape([row['price'], row['item_seq_number'], row['image_top_1']], (1,-1))
rowPredictions = []
for groupableColumnLabel in groupableColumnLabels:
groupableColumnValue = row[groupableColumnLabel]
if groupableColumnValue in regression_models_dict:
prediction = groupModelsDict[groupableColumnLabel][groupableColumnValue].predict(row_data)
else:
prediction = lrFull.predict(row_data)
rowPredictions.append(min(max(0,prediction[0]),1))
predictions.append(rowPredictions)
meanPredictions = [sum(p)/5 for p in predictions]
submission = pd.DataFrame({'item_id': test.item_id, 'deal_probability': meanPredictions})
submission.to_csv('submission.csv', index=False, columns=['item_id', 'deal_probability']) | [
"chrisstevens901@gmail.com"
] | chrisstevens901@gmail.com |
f9e1ca44905679e39f7b725bab3e049bd3cf44d3 | 10ddfb2d43a8ec5d47ce35dc0b8acf4fd58dea94 | /Python/number-of-senior-citizens.py | 50b65c0c4bd9f9324ebc57219dbfd33cea759e81 | [
"MIT"
] | permissive | kamyu104/LeetCode-Solutions | f54822059405ef4df737d2e9898b024f051fd525 | 4dc4e6642dc92f1983c13564cc0fd99917cab358 | refs/heads/master | 2023-09-02T13:48:26.830566 | 2023-08-28T10:11:12 | 2023-08-28T10:11:12 | 152,631,182 | 4,549 | 1,651 | MIT | 2023-05-31T06:10:33 | 2018-10-11T17:38:35 | C++ | UTF-8 | Python | false | false | 229 | py | # Time: O(n)
# Space: O(1)
# string
class Solution(object):
def countSeniors(self, details):
"""
:type details: List[str]
:rtype: int
"""
return sum(x[-4:-2] > "60" for x in details)
| [
"noreply@github.com"
] | noreply@github.com |
24276880351ec6c7a471aa39ededb24bdde21891 | c7fe29e98fc797200cfc1a6013463ac7cf00d5cb | /openprocurement/auction/esco/journal.py | 7d9cdaed5b5621fd3fbc0824f2e72bc8fa2ebe2a | [
"Apache-2.0"
] | permissive | ProzorroUKR/openprocurement.auction.esco | e740afdf4c7f5978a2121790c47f2b8dfd95a291 | 16a127ac7fc47cacaaf5f2eb708ea8b273e57e56 | refs/heads/master | 2021-06-09T16:00:27.450648 | 2021-04-08T13:15:45 | 2021-04-08T13:15:45 | 138,143,214 | 0 | 0 | Apache-2.0 | 2021-04-08T13:15:45 | 2018-06-21T08:38:20 | Python | UTF-8 | Python | false | false | 2,493 | py | import uuid
#log ID for auction_worker
# TODO: changes ids for esco indentity
AUCTION_WORKER_DB_GET_DOC = uuid.UUID('e992106cf844460ab85419a5aa53dd76')
AUCTION_WORKER_DB_GET_DOC_ERROR = uuid.UUID('5ba19e99963149f9bcdbb89f978f8c31')
AUCTION_WORKER_DB_SAVE_DOC = uuid.UUID('48b5f9fef4124cf39be6d2e4722a71c3')
AUCTION_WORKER_DB_SAVE_DOC_ERROR = uuid.UUID('b219ee898b834628be23424d0c27a8b8')
AUCTION_WORKER_DB_GET_DOC_UNHANDLED_ERROR = uuid.UUID('2188fca7e99a4d409817567f894421cd')
AUCTION_WORKER_DB_SAVE_DOC_UNHANDLED_ERROR = uuid.UUID('4b650dea8eb84412a4d630265587dbcb')
AUCTION_WORKER_SERVICE_PREPARE_SERVER = uuid.UUID('7ddc92a966f7492e8dbf59f7916831c4')
AUCTION_WORKER_SERVICE_STOP_AUCTION_WORKER = uuid.UUID('e7c0a6eb8ec441e2a7cf32bad5ffa57a')
AUCTION_WORKER_SERVICE_START_AUCTION = uuid.UUID('79385d0af3e94fd2bcada5857b38214a')
AUCTION_WORKER_SERVICE_END_FIRST_PAUSE = uuid.UUID('2411cf51e6a24f62bcd9c501a862e48f')
AUCTION_WORKER_SERVICE_END_BID_STAGE = uuid.UUID('27f118f3a8014152bd55223126c5d163')
AUCTION_WORKER_SERVICE_START_STAGE = uuid.UUID('5daf4199824a4c3da4c773240a8558ce')
AUCTION_WORKER_SERVICE_START_NEXT_STAGE = uuid.UUID('b8b1a84d923a48d4beb619362d5be300')
AUCTION_WORKER_SERVICE_END_AUCTION = uuid.UUID('c6f61659fe1d4f62ab178e92949b68a4')
AUCTION_WORKER_SERVICE_NUMBER_OF_BIDS = uuid.UUID('a68c51cfdfbb451a8fd518ecaf99090c')
AUCTION_WORKER_SERVICE_AUCTION_CANCELED = uuid.UUID('77568c4543e24b70a7633d8715383fa7')
AUCTION_WORKER_SERVICE_AUCTION_STATUS_CANCELED = uuid.UUID('38b2145fa25d41198493526085168bd2')
AUCTION_WORKER_SERVICE_AUCTION_RESCHEDULE = uuid.UUID('f11bba4b55d547f1aa2e8cb2e13e4485')
AUCTION_WORKER_SERVICE_AUCTION_NOT_FOUND = uuid.UUID('ff4a1d5cf0134bf48a458b65805c9a6e')
AUCTION_WORKER_BIDS_LATEST_BID_CANCELLATION = uuid.UUID('c558309b45004ce2bd52ec4845e43b48')
AUCTION_WORKER_API_AUDIT_LOG_APPROVED = uuid.UUID('569dced149e5409d85f3078b8a3dbf9b')
AUCTION_WORKER_API_AUDIT_LOG_NOT_APPROVED = uuid.UUID('16e7263c1bcb413a8e3ec3c42aa8bee4')
AUCTION_WORKER_API_AUCTION_RESULT_APPROVED = uuid.UUID('62a92384b4ad423fbbc4647ad1d06e91')
AUCTION_WORKER_API_AUCTION_RESULT_NOT_APPROVED = uuid.UUID('f5cf9731d66b4cd4a5b0bc92e93d9435')
AUCTION_WORKER_API_APPROVED_DATA = uuid.UUID('cb4c744b6d5843ec8d324c4226ebe7c1')
AUCTION_WORKER_API_AUCTION_CANCEL = uuid.UUID('fbb8360f72234fc19fb3e37bb15e47f7')
AUCTION_WORKER_API_AUCTION_NOT_EXIST = uuid.UUID('590bcf8f604742ebb7cbf7377e573f26')
AUCTION_WORKER_SET_AUCTION_URLS = uuid.UUID('92a1e5a9a509434190d171bc143ed5cb')
| [
"yshalenyk@quinatagroup.org"
] | yshalenyk@quinatagroup.org |
66c48db3d472e9cbef6459a534d94dd8fe60f1ce | 94f156b362fbce8f89c8e15cd7687f8af267ef08 | /week3/main/models.py | 267cfd064ac83b9e6a1feed9dae6e559d5dabd77 | [] | no_license | DastanB/AdvancedDjango | 6eee5477cd5a00423972c9cc3d2b5f1e4a501841 | 2b5d4c22b278c6d0e08ab7e84161163fe42e9a3f | refs/heads/master | 2020-07-17T19:21:16.271964 | 2019-12-03T21:58:51 | 2019-12-03T21:58:51 | 206,081,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,424 | py | from django.db import models
from users.models import MainUser
from main.constants import PROJECT_STATUSES, PROJECT_IN_PROCESS, PROJECT_FROZEN, PROJECT_DONE, BLOCK_STATUSES, TASKS_DONE, TASKS_FROZEN, TASKS_IN_PROCESS
import datetime
# Create your models here.
class Project(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=1000)
status = models.PositiveSmallIntegerField(choises=PROJECT_STATUSES, default=PROJECT_IN_PROCESS)
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='projects')
def is_owner(self, request):
return self.creator.id == request.user.id
def __str__(self):
return self.name
class Block(models.Model):
name = models.CharField(max_length=255)
type_of = models.PositiveSmallIntegerField(choises=BLOCK_STATUSES, default=TASKS_IN_PROCESS)
project = models.ForeignKey(Project, on_delete=models.CASCADE, related_name='blocks')
def __str__(self):
return self.name
class Task(models.Model):
name = models.CharField(max_length=255)
description = models.CharField(max_length=1000)
priority = models.IntegerField()
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='created_tasks')
executor = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='tasks', null=True)
block = models.ForeignKey(Block, on_delete=models.CASCADE, related_name='tasks')
order = models.IntegerField()
def is_owner(self, request):
return self.creator.id == request.user.id
def __str__(self):
return self.name
class TaskDocument(models.Model):
document = models.FileField()
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='docs')
task = models.ForeignKey(Task, on_delete=models.CASCADE, related_name='docs')
def is_owner(self, request):
return self.creator.id == request.user.id
class TaskComment(models.Model):
body = models.CharField(max_length=10000)
task = models.ForeignKey(Task, on_delete=models.CASCADE, related_name='comments')
creator = models.ForeignKey(MainUser, on_delete=models.CASCADE, related_name='comments')
created_at = models.DateTimeField(default=datetime.datetime.now)
def is_owner(self, request):
return self.creator.id == request.user.id
def __str__(self):
return self.body | [
"dastan211298@gmail.com"
] | dastan211298@gmail.com |
c646d5714413964656c20a34b700806958456ef0 | 42fa0f6b67128be9cb0fb3beb77ac97a76d89af6 | /src/oscar/apps/customer/forms.py | 5bb0257189abd768a4a5a9c248f7e7e99df5bccb | [] | no_license | sandip3120/private | 198a937b8ce2967624debb2c5e9634dd85efec5f | 6a4abc46e25e220bdc0c2dcfc7aaea56d4ae2410 | refs/heads/master | 2022-12-22T08:17:19.472039 | 2019-10-20T17:07:17 | 2019-10-20T17:07:17 | 216,395,361 | 0 | 1 | null | 2022-12-10T06:12:19 | 2019-10-20T16:53:14 | Python | UTF-8 | Python | false | false | 15,799 | py | import string
from django import forms
from django.conf import settings
from django.contrib.auth import forms as auth_forms
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.password_validation import validate_password
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ValidationError
from django.utils.crypto import get_random_string
from django.utils.http import is_safe_url
from django.utils.translation import gettext_lazy as _
from django.utils.translation import pgettext_lazy
from oscar.apps.customer.utils import get_password_reset_url, normalise_email
from oscar.core.compat import existing_user_fields, get_user_model
from oscar.core.loading import get_class, get_model, get_profile_class
from oscar.forms import widgets
Dispatcher = get_class('customer.utils', 'Dispatcher')
CommunicationEventType = get_model('customer', 'communicationeventtype')
ProductAlert = get_model('customer', 'ProductAlert')
User = get_user_model()
def generate_username():
letters = string.ascii_letters
allowed_chars = letters + string.digits + '_'
uname = get_random_string(length=30, allowed_chars=allowed_chars)
try:
User.objects.get(username=uname)
return generate_username()
except User.DoesNotExist:
return uname
class PasswordResetForm(auth_forms.PasswordResetForm):
"""
This form takes the same structure as its parent from :py:mod:`django.contrib.auth`
"""
communication_type_code = "PASSWORD_RESET"
def save(self, domain_override=None, use_https=False, request=None,
**kwargs):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
site = get_current_site(request)
if domain_override is not None:
site.domain = site.name = domain_override
email = self.cleaned_data['email']
active_users = User._default_manager.filter(
email__iexact=email, is_active=True)
for user in active_users:
reset_url = self.get_reset_url(site, request, user, use_https)
ctx = {
'user': user,
'site': site,
'reset_url': reset_url}
messages = CommunicationEventType.objects.get_and_render(
code=self.communication_type_code, context=ctx)
Dispatcher().dispatch_user_messages(user, messages)
def get_reset_url(self, site, request, user, use_https):
# the request argument isn't used currently, but implementors might
# need it to determine the correct subdomain
reset_url = "%s://%s%s" % (
'https' if use_https else 'http',
site.domain,
get_password_reset_url(user))
return reset_url
class EmailAuthenticationForm(AuthenticationForm):
"""
Extends the standard django AuthenticationForm, to support 75 character
usernames. 75 character usernames are needed to support the EmailOrUsername
authentication backend.
"""
username = forms.EmailField(label=_('Email address'))
redirect_url = forms.CharField(
widget=forms.HiddenInput, required=False)
def __init__(self, host, *args, **kwargs):
self.host = host
super().__init__(*args, **kwargs)
def clean_redirect_url(self):
url = self.cleaned_data['redirect_url'].strip()
if url and is_safe_url(url, self.host):
return url
class ConfirmPasswordForm(forms.Form):
"""
Extends the standard django AuthenticationForm, to support 75 character
usernames. 75 character usernames are needed to support the EmailOrUsername
authentication backend.
"""
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.user = user
def clean_password(self):
password = self.cleaned_data['password']
if not self.user.check_password(password):
raise forms.ValidationError(
_("The entered password is not valid!"))
return password
class EmailUserCreationForm(forms.ModelForm):
email = forms.EmailField(label=_('Email address'))
password1 = forms.CharField(
label=_('Password'), widget=forms.PasswordInput)
password2 = forms.CharField(
label=_('Confirm password'), widget=forms.PasswordInput)
redirect_url = forms.CharField(
widget=forms.HiddenInput, required=False)
class Meta:
model = User
fields = ('email',)
def __init__(self, host=None, *args, **kwargs):
self.host = host
super().__init__(*args, **kwargs)
def _post_clean(self):
super()._post_clean()
password = self.cleaned_data.get('password2')
# Validate after self.instance is updated with form data
# otherwise validators can't access email
# see django.contrib.auth.forms.UserCreationForm
if password:
try:
validate_password(password, self.instance)
except forms.ValidationError as error:
self.add_error('password2', error)
def clean_email(self):
"""
Checks for existing users with the supplied email address.
"""
email = normalise_email(self.cleaned_data['email'])
if User._default_manager.filter(email__iexact=email).exists():
raise forms.ValidationError(
_("A user with that email address already exists"))
return email
def clean_password2(self):
password1 = self.cleaned_data.get('password1', '')
password2 = self.cleaned_data.get('password2', '')
if password1 != password2:
raise forms.ValidationError(
_("The two password fields didn't match."))
return password2
def clean_redirect_url(self):
url = self.cleaned_data['redirect_url'].strip()
if url and is_safe_url(url, self.host):
return url
return settings.LOGIN_REDIRECT_URL
def save(self, commit=True):
user = super().save(commit=False)
user.set_password(self.cleaned_data['password1'])
if 'username' in [f.name for f in User._meta.fields]:
user.username = generate_username()
if commit:
user.save()
return user
class OrderSearchForm(forms.Form):
date_from = forms.DateField(
required=False, label=pgettext_lazy("start date", "From"),
widget=widgets.DatePickerInput())
date_to = forms.DateField(
required=False, label=pgettext_lazy("end date", "To"),
widget=widgets.DatePickerInput())
order_number = forms.CharField(required=False, label=_("Order number"))
def clean(self):
if self.is_valid() and not any([self.cleaned_data['date_from'],
self.cleaned_data['date_to'],
self.cleaned_data['order_number']]):
raise forms.ValidationError(_("At least one field is required."))
return super().clean()
def description(self):
"""
Uses the form's data to build a useful description of what orders
are listed.
"""
if not self.is_bound or not self.is_valid():
return _('All orders')
else:
date_from = self.cleaned_data['date_from']
date_to = self.cleaned_data['date_to']
order_number = self.cleaned_data['order_number']
return self._orders_description(date_from, date_to, order_number)
def _orders_description(self, date_from, date_to, order_number):
if date_from and date_to:
if order_number:
desc = _('Orders placed between %(date_from)s and '
'%(date_to)s and order number containing '
'%(order_number)s')
else:
desc = _('Orders placed between %(date_from)s and '
'%(date_to)s')
elif date_from:
if order_number:
desc = _('Orders placed since %(date_from)s and '
'order number containing %(order_number)s')
else:
desc = _('Orders placed since %(date_from)s')
elif date_to:
if order_number:
desc = _('Orders placed until %(date_to)s and '
'order number containing %(order_number)s')
else:
desc = _('Orders placed until %(date_to)s')
elif order_number:
desc = _('Orders with order number containing %(order_number)s')
else:
return None
params = {
'date_from': date_from,
'date_to': date_to,
'order_number': order_number,
}
return desc % params
def get_filters(self):
date_from = self.cleaned_data['date_from']
date_to = self.cleaned_data['date_to']
order_number = self.cleaned_data['order_number']
kwargs = {}
if date_from and date_to:
kwargs['date_placed__range'] = [date_from, date_to]
elif date_from and not date_to:
kwargs['date_placed__gt'] = date_from
elif not date_from and date_to:
kwargs['date_placed__lt'] = date_to
if order_number:
kwargs['number__contains'] = order_number
return kwargs
class UserForm(forms.ModelForm):
def __init__(self, user, *args, **kwargs):
self.user = user
kwargs['instance'] = user
super().__init__(*args, **kwargs)
if 'email' in self.fields:
self.fields['email'].required = True
def clean_email(self):
"""
Make sure that the email address is always unique as it is
used instead of the username. This is necessary because the
uniqueness of email addresses is *not* enforced on the model
level in ``django.contrib.auth.models.User``.
"""
email = normalise_email(self.cleaned_data['email'])
if User._default_manager.filter(
email__iexact=email).exclude(id=self.user.id).exists():
raise ValidationError(
_("A user with this email address already exists"))
# Save the email unaltered
return email
class Meta:
model = User
fields = existing_user_fields(['first_name', 'last_name', 'email'])
Profile = get_profile_class()
if Profile: # noqa (too complex (12))
class UserAndProfileForm(forms.ModelForm):
def __init__(self, user, *args, **kwargs):
try:
instance = Profile.objects.get(user=user)
except Profile.DoesNotExist:
# User has no profile, try a blank one
instance = Profile(user=user)
kwargs['instance'] = instance
super().__init__(*args, **kwargs)
# Get profile field names to help with ordering later
profile_field_names = list(self.fields.keys())
# Get user field names (we look for core user fields first)
core_field_names = set([f.name for f in User._meta.fields])
user_field_names = ['email']
for field_name in ('first_name', 'last_name'):
if field_name in core_field_names:
user_field_names.append(field_name)
user_field_names.extend(User._meta.additional_fields)
# Store user fields so we know what to save later
self.user_field_names = user_field_names
# Add additional user form fields
additional_fields = forms.fields_for_model(
User, fields=user_field_names)
self.fields.update(additional_fields)
# Ensure email is required and initialised correctly
self.fields['email'].required = True
# Set initial values
for field_name in user_field_names:
self.fields[field_name].initial = getattr(user, field_name)
# Ensure order of fields is email, user fields then profile fields
self.fields.keyOrder = user_field_names + profile_field_names
class Meta:
model = Profile
exclude = ('user',)
def clean_email(self):
email = normalise_email(self.cleaned_data['email'])
users_with_email = User._default_manager.filter(
email__iexact=email).exclude(id=self.instance.user.id)
if users_with_email.exists():
raise ValidationError(
_("A user with this email address already exists"))
return email
def save(self, *args, **kwargs):
user = self.instance.user
# Save user also
for field_name in self.user_field_names:
setattr(user, field_name, self.cleaned_data[field_name])
user.save()
return super().save(*args, **kwargs)
ProfileForm = UserAndProfileForm
else:
ProfileForm = UserForm
class ProductAlertForm(forms.ModelForm):
email = forms.EmailField(required=True, label=_('Send notification to'),
widget=forms.TextInput(attrs={
'placeholder': _('Enter your email')
}))
def __init__(self, user, product, *args, **kwargs):
self.user = user
self.product = product
super().__init__(*args, **kwargs)
# Only show email field to unauthenticated users
if user and user.is_authenticated:
self.fields['email'].widget = forms.HiddenInput()
self.fields['email'].required = False
def save(self, commit=True):
alert = super().save(commit=False)
if self.user.is_authenticated:
alert.user = self.user
alert.product = self.product
if commit:
alert.save()
return alert
def clean(self):
cleaned_data = self.cleaned_data
email = cleaned_data.get('email')
if email:
try:
ProductAlert.objects.get(
product=self.product, email__iexact=email,
status=ProductAlert.ACTIVE)
except ProductAlert.DoesNotExist:
pass
else:
raise forms.ValidationError(_(
"There is already an active stock alert for %s") % email)
# Check that the email address hasn't got other unconfirmed alerts.
# If they do then we don't want to spam them with more until they
# have confirmed or cancelled the existing alert.
if ProductAlert.objects.filter(email__iexact=email,
status=ProductAlert.UNCONFIRMED).count():
raise forms.ValidationError(_(
"%s has been sent a confirmation email for another product "
"alert on this site. Please confirm or cancel that request "
"before signing up for more alerts.") % email)
elif self.user.is_authenticated:
try:
ProductAlert.objects.get(product=self.product,
user=self.user,
status=ProductAlert.ACTIVE)
except ProductAlert.DoesNotExist:
pass
else:
raise forms.ValidationError(_(
"You already have an active alert for this product"))
return cleaned_data
class Meta:
model = ProductAlert
fields = ['email']
| [
"sandipsing45@gmail.com"
] | sandipsing45@gmail.com |
579a8846030030a1b4f846da2163172703055c1e | 3592ef6ceb0e7654dc68fa9879b8c6fe31bcf6d1 | /reveries/tools/modeldiffer/lib.py | aa794920283f2358703cbb6ef0aad11ced9d157f | [
"MIT"
] | permissive | all-in-one-of/reveries-config | a83a8208680d857a155e0a05297bde111d8c6845 | b47a5a6ce05376dffcb893e0823fecbcf1d08e67 | refs/heads/master | 2021-01-04T07:44:45.383431 | 2020-02-13T09:00:51 | 2020-02-13T09:00:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py |
import logging
from avalon import io
main_logger = logging.getLogger("modeldiffer")
def profile_from_database(version_id):
"""
"""
representation = io.find_one({"type": "representation",
"name": "mayaBinary",
"parent": version_id})
if representation is None:
main_logger.critical("Representation not found. This is a bug.")
return
model_profile = representation["data"].get("modelProfile")
if model_profile is None:
main_logger.critical("'data.modelProfile' not found."
"This is a bug.")
return
profile = dict()
for id, meshes_data in model_profile.items():
for data in meshes_data:
name = data.pop("hierarchy")
# No need to compare normals
data.pop("normals")
data["avalonId"] = id
profile[name] = data
return profile
profile_from_host = NotImplemented
select_from_host = NotImplemented
def is_supported_loader(name):
return name in ("ModelLoader",) # "RigLoader")
def is_supported_subset(name):
return any(name.startswith(family)
for family in ("model",)) # "rig"))
| [
"david962041@gmail.com"
] | david962041@gmail.com |
6f7249b4229753972631b1615fde741b81a4edd3 | 1a5819e1ca80c4cd76322e47c8983691bfcf5447 | /week2/c.fix start.py | 5a525d34ff3348bde541d4cac28505432be55c20 | [] | no_license | EzhilarasiBaladhandapani/python-exercise | 77b4fbd4d72c5b86012333f30f9a593eaa39116a | 924997fe6d994b0c518755ac1df54d2031f33c1b | refs/heads/main | 2023-04-07T00:48:43.452252 | 2021-03-30T06:10:18 | 2021-03-30T06:10:18 | 352,519,940 | 0 | 0 | null | 2021-03-30T05:31:50 | 2021-03-29T04:52:54 | Python | UTF-8 | Python | false | false | 124 | py | string = "babble yields babble"
char =string[0]
string = string.replace(char,'*')
string = char + string[1:]
print(string) | [
"ezhilbaladhandapani@gmail.com"
] | ezhilbaladhandapani@gmail.com |
2ab7ea3dc1714640bff9cac3035fb7aff6fdb289 | 44cedd06b04a46fdfb191500e27755d926b4261d | /Lecture 1 Functions/Functions MultiplyEvensbyOdds.py | 8ff65ecf7bab10c766abff39f05415a57df91ebb | [] | no_license | twoandahalf2/PythonBasics2 | 899bb56881da861bf6d57c43c4a031b56b6b5451 | b4160442a9275873b75e1c877beaff2ac58b5268 | refs/heads/master | 2020-07-06T04:07:30.552093 | 2019-08-17T13:39:25 | 2019-08-17T13:39:25 | 202,886,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 554 | py |
class Zadacha():
my_number = input()
def calculate(self):
sum_even = 0
sum_odd = 0
for i in self.my_number:
if i == '-':
continue
i = int(i)
if i % 2 == 0:
sum_even += i
if i % 2 != 0:
sum_odd += i
return [sum_even, sum_odd]
def multiple(self):
n = Zadacha().calculate()
x, y = n
result = x * y
return result
if __name__ == '__main__':
print(Zadacha().multiple())
| [
"vladimir.kolev@hotmail.com"
] | vladimir.kolev@hotmail.com |
87279f6dae5afa6e6c26657b9c58b42e66be2d5f | 2940f5416082dadd9c646cd9a46d2d0a99883efb | /venv/Lib/site-packages/networkx/algorithms/community/quality.py | 2ffe40786d0df3fd548db7595029d83819f2e47a | [
"MIT"
] | permissive | tpike3/SugarScape | 4813e4fefbfb0a701f5913d74f045fd0eaed1942 | 39efe4007fba2b12b75c72f7795827a1f74d640b | refs/heads/main | 2021-06-20T03:55:46.288721 | 2021-01-20T17:06:35 | 2021-01-20T17:06:35 | 168,583,530 | 11 | 3 | MIT | 2021-01-20T17:19:53 | 2019-01-31T19:29:40 | Jupyter Notebook | UTF-8 | Python | false | false | 10,608 | py | """Functions for measuring the quality of a partition (into
communities).
"""
from functools import wraps
from itertools import product
import networkx as nx
from networkx import NetworkXError
from networkx.utils import not_implemented_for
from networkx.algorithms.community.community_utils import is_partition
__all__ = ["coverage", "modularity", "performance"]
class NotAPartition(NetworkXError):
"""Raised if a given collection is not a partition.
"""
def __init__(self, G, collection):
msg = f"{G} is not a valid partition of the graph {collection}"
super().__init__(msg)
def require_partition(func):
"""Decorator to check that a valid partition is input to a function
Raises :exc:`networkx.NetworkXError` if the partition is not valid.
This decorator should be used on functions whose first two arguments
are a graph and a partition of the nodes of that graph (in that
order)::
>>> @require_partition
... def foo(G, partition):
... print("partition is valid!")
...
>>> G = nx.complete_graph(5)
>>> partition = [{0, 1}, {2, 3}, {4}]
>>> foo(G, partition)
partition is valid!
>>> partition = [{0}, {2, 3}, {4}]
>>> foo(G, partition)
Traceback (most recent call last):
...
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
>>> partition = [{0, 1}, {1, 2, 3}, {4}]
>>> foo(G, partition)
Traceback (most recent call last):
...
networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G
"""
@wraps(func)
def new_func(*args, **kw):
# Here we assume that the first two arguments are (G, partition).
if not is_partition(*args[:2]):
raise nx.NetworkXError(
"`partition` is not a valid partition of" " the nodes of G"
)
return func(*args, **kw)
return new_func
def intra_community_edges(G, partition):
"""Returns the number of intra-community edges for a partition of `G`.
Parameters
----------
G : NetworkX graph.
partition : iterable of sets of nodes
This must be a partition of the nodes of `G`.
The "intra-community edges" are those edges joining a pair of nodes
in the same block of the partition.
"""
return sum(G.subgraph(block).size() for block in partition)
def inter_community_edges(G, partition):
"""Returns the number of inter-community edges for a prtition of `G`.
according to the given
partition of the nodes of `G`.
Parameters
----------
G : NetworkX graph.
partition : iterable of sets of nodes
This must be a partition of the nodes of `G`.
The *inter-community edges* are those edges joining a pair of nodes
in different blocks of the partition.
Implementation note: this function creates an intermediate graph
that may require the same amount of memory as that of `G`.
"""
# Alternate implementation that does not require constructing a new
# graph object (but does require constructing an affiliation
# dictionary):
#
# aff = dict(chain.from_iterable(((v, block) for v in block)
# for block in partition))
# return sum(1 for u, v in G.edges() if aff[u] != aff[v])
#
MG = nx.MultiDiGraph if G.is_directed() else nx.MultiGraph
return nx.quotient_graph(G, partition, create_using=MG).size()
def inter_community_non_edges(G, partition):
"""Returns the number of inter-community non-edges according to the
given partition of the nodes of `G`.
`G` must be a NetworkX graph.
`partition` must be a partition of the nodes of `G`.
A *non-edge* is a pair of nodes (undirected if `G` is undirected)
that are not adjacent in `G`. The *inter-community non-edges* are
those non-edges on a pair of nodes in different blocks of the
partition.
Implementation note: this function creates two intermediate graphs,
which may require up to twice the amount of memory as required to
store `G`.
"""
# Alternate implementation that does not require constructing two
# new graph objects (but does require constructing an affiliation
# dictionary):
#
# aff = dict(chain.from_iterable(((v, block) for v in block)
# for block in partition))
# return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v])
#
return inter_community_edges(nx.complement(G), partition)
@not_implemented_for("multigraph")
@require_partition
def performance(G, partition):
"""Returns the performance of a partition.
The *performance* of a partition is the ratio of the number of
intra-community edges plus inter-community non-edges with the total
number of potential edges.
Parameters
----------
G : NetworkX graph
A simple graph (directed or undirected).
partition : sequence
Partition of the nodes of `G`, represented as a sequence of
sets of nodes. Each block of the partition represents a
community.
Returns
-------
float
The performance of the partition, as defined above.
Raises
------
NetworkXError
If `partition` is not a valid partition of the nodes of `G`.
References
----------
.. [1] Santo Fortunato.
"Community Detection in Graphs".
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
<https://arxiv.org/abs/0906.0612>
"""
# Compute the number of intra-community edges and inter-community
# edges.
intra_edges = intra_community_edges(G, partition)
inter_edges = inter_community_non_edges(G, partition)
# Compute the number of edges in the complete graph (directed or
# undirected, as it depends on `G`) on `n` nodes.
#
# (If `G` is an undirected graph, we divide by two since we have
# double-counted each potential edge. We use integer division since
# `total_pairs` is guaranteed to be even.)
n = len(G)
total_pairs = n * (n - 1)
if not G.is_directed():
total_pairs //= 2
return (intra_edges + inter_edges) / total_pairs
@require_partition
def coverage(G, partition):
"""Returns the coverage of a partition.
The *coverage* of a partition is the ratio of the number of
intra-community edges to the total number of edges in the graph.
Parameters
----------
G : NetworkX graph
partition : sequence
Partition of the nodes of `G`, represented as a sequence of
sets of nodes. Each block of the partition represents a
community.
Returns
-------
float
The coverage of the partition, as defined above.
Raises
------
NetworkXError
If `partition` is not a valid partition of the nodes of `G`.
Notes
-----
If `G` is a multigraph, the multiplicity of edges is counted.
References
----------
.. [1] Santo Fortunato.
"Community Detection in Graphs".
*Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
<https://arxiv.org/abs/0906.0612>
"""
intra_edges = intra_community_edges(G, partition)
total_edges = G.number_of_edges()
return intra_edges / total_edges
def modularity(G, communities, weight="weight"):
r"""Returns the modularity of the given partition of the graph.
Modularity is defined in [1]_ as
.. math::
Q = \frac{1}{2m} \sum_{ij} \left( A_{ij} - \frac{k_ik_j}{2m}\right)
\delta(c_i,c_j)
where $m$ is the number of edges, $A$ is the adjacency matrix of
`G`, $k_i$ is the degree of $i$ and $\delta(c_i, c_j)$
is 1 if $i$ and $j$ are in the same community and 0 otherwise.
According to [2]_ (and verified by some algebra) this can be reduced to
.. math::
Q = \sum_{c=1}^{n}
\left[ \frac{L_c}{m} - \left( \frac{k_c}{2m} \right) ^2 \right]
where the sum iterates over all communities $c$, $m$ is the number of edges,
$L_c$ is the number of intra-community links for community $c$,
$k_c$ is the sum of degrees of the nodes in community $c$.
The second formula is the one actually used in calculation of the modularity.
Parameters
----------
G : NetworkX Graph
communities : list or iterable of set of nodes
These node sets must represent a partition of G's nodes.
weight : string or None, optional (default="weight")
The edge attribute that holds the numerical value used
as a weight. If None or an edge does not have that attribute,
then that edge has weight 1.
Returns
-------
Q : float
The modularity of the paritition.
Raises
------
NotAPartition
If `communities` is not a partition of the nodes of `G`.
Examples
--------
>>> import networkx.algorithms.community as nx_comm
>>> G = nx.barbell_graph(3, 0)
>>> nx_comm.modularity(G, [{0, 1, 2}, {3, 4, 5}])
0.35714285714285715
>>> nx_comm.modularity(G, nx_comm.label_propagation_communities(G))
0.35714285714285715
References
----------
.. [1] M. E. J. Newman *Networks: An Introduction*, page 224.
Oxford University Press, 2011.
.. [2] Clauset, Aaron, Mark EJ Newman, and Cristopher Moore.
"Finding community structure in very large networks."
Physical review E 70.6 (2004). <https://arxiv.org/abs/cond-mat/0408187>
"""
if not isinstance(communities, list):
communities = list(communities)
if not is_partition(G, communities):
raise NotAPartition(G, communities)
directed = G.is_directed()
if directed:
out_degree = dict(G.out_degree(weight=weight))
in_degree = dict(G.in_degree(weight=weight))
m = sum(out_degree.values())
norm = 1 / m ** 2
else:
out_degree = in_degree = dict(G.degree(weight=weight))
deg_sum = sum(out_degree.values())
m = deg_sum / 2
norm = 1 / deg_sum ** 2
def community_contribution(community):
comm = set(community)
L_c = sum(wt for u, v, wt in G.edges(comm, data=weight, default=1) if v in comm)
out_degree_sum = sum(out_degree[u] for u in comm)
in_degree_sum = sum(in_degree[u] for u in comm) if directed else out_degree_sum
return L_c / m - out_degree_sum * in_degree_sum * norm
return sum(map(community_contribution, communities))
| [
"tpike3@gmu.edu"
] | tpike3@gmu.edu |
f00e619a8fc8bb52b9375a2f389669e88e9419c9 | 832443441e034e96939df8322481eab571e0a2a9 | /shopper/migrations/0003_shipping.py | 02380c3c2866194c7d49cafbbd574aad08c73285 | [] | no_license | AmalprashobM/Django-project | 5dfc50ddf361eeb0458efce7543c2587bc111f8a | 14a8b1cb027c1da5595370b6aa5187970b4a169c | refs/heads/main | 2023-04-12T06:28:57.988040 | 2021-05-01T20:35:28 | 2021-05-01T20:35:28 | 363,467,641 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 921 | py | # Generated by Django 3.1.5 on 2021-01-30 20:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shopper', '0002_cartitem'),
]
operations = [
migrations.CreateModel(
name='Shipping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('firstname', models.CharField(max_length=100)),
('lastname', models.CharField(max_length=100)),
('email', models.CharField(max_length=100)),
('mobile', models.IntegerField()),
('address', models.CharField(max_length=255)),
('state', models.CharField(max_length=50)),
('city', models.CharField(max_length=50)),
('pincode', models.IntegerField()),
],
),
]
| [
"alone456.aa@gmail.com"
] | alone456.aa@gmail.com |
039edd18fd3e878624c2de8607511b5b9ad8a545 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/223/users/4170/codes/1594_1800.py | 4bfac1cb471a1d30c906e35552843d6922186bbd | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 209 | py | a = int(input("Insira o valor da variavel a: "))
b = int(input("Insira o valor da variavel b: "))
c = int(input("Insira o valor da variavel c: "))
x = ((a**2) + (b**2) + (c**2)) / (a + b + c)
print(round(x,7)) | [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
a0b913ca7ff9e0d575058da4170bbd0f75e9022f | bbc1182c4b865315975efb85e5ada8fa0fec535b | /organization/urls.py | c8a7023dba8f995e9f81c6ec787b5e4d6c0b5c8e | [] | no_license | G2-9/BCR-INTEX | e088dac317a5aaba2ec01514df83d32db2ecd83a | 7133fd2668a9a7c555790083b20cc1686f5fc4c4 | refs/heads/master | 2023-02-03T17:45:17.253418 | 2020-12-10T17:12:51 | 2020-12-10T17:12:51 | 319,370,891 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,248 | py | from django.urls import path
from .views import indexPageView, searchOrganizationPageView, alterOrganizationPageView, updateOrganizationPageView, deleteOrganizationPageView, removeOrganizationPageView, displayOrganizationPageView, addOrganizationPageView, registerOrganizationPageView, postListingPageView, addListingPageView
urlpatterns = [
path('alterOrganization/', alterOrganizationPageView, name='alter_organization'),
path('updateOrganization/', updateOrganizationPageView, name='update_organization'),
path('removeOrganization/', removeOrganizationPageView, name='remove_organization'),
path('deleteOrganization/', deleteOrganizationPageView, name='delete_organization'),
path('viewListing/', addListingPageView, name='view_listing'),
path('postListing/', postListingPageView, name='post_listing'),
path('registerOrganization/', registerOrganizationPageView, name='register_organization'),
path('searchOrganization/', searchOrganizationPageView, name='search_organization'),
path('displayOrganization/', displayOrganizationPageView, name='display_organization'),
path('addOrganization/', addOrganizationPageView, name='add_organization'),
path("", indexPageView, name="organization_index")
] | [
"mitchellmel@outlook.com"
] | mitchellmel@outlook.com |
62437b90ea3393cf590c2bf16a858046597430af | 1f633e04dd62dee43197990d49751720dfef3667 | /mysite/settings.py | c2dd6a7a1b86fe32673b896151b9cced3491e1d1 | [] | no_license | addix001/my-first-blog | 24fb80234311d2c2ced752f9319d49640aa9f23e | baac019d4f387637e3662ea063ff79a70a1603a0 | refs/heads/master | 2021-01-23T02:48:31.278600 | 2017-03-24T04:06:46 | 2017-03-24T04:06:46 | 86,026,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,702 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*50myu@u(s_)$k9_&_4byi9l^%gxu%c7mo2sb@_mqfhk8kmt&-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| [
"jhlee@addix.co.kr"
] | jhlee@addix.co.kr |
7cdb8a4440f750b1cfb71bcf6fa3ad0296a03953 | 55fed201f7fc986deb2006cc6f65af8630b36945 | /shopping-list-alt.py | 8a26089dd27564c348cfe266895316916abaaa3c | [] | no_license | krystinashveda/Learning-Python | a3ddbdab5be8c4d7ea10fa4f58ca899d68e8a48c | 04cac916c151bf4d8d87a03107000cad05d8807b | refs/heads/master | 2021-01-10T07:59:31.364246 | 2016-03-18T17:26:40 | 2016-03-18T17:26:40 | 54,211,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 872 | py | # Run the script to start using it
# Put new things into the list, one at a time
# Enter the word DONe - in all caps - to quit the program
# And, once I quit, I want the app to show me everythign that's on my list
shopping_list = []
def print_list():
for item in shopping_list:
print("* {}".format(item))
def quit():
print_list()
exit()
def get_user_input():
user_input = input("Enter a new item: ")
if user_input == "DONE":
quit()
elif user_input == "SHOW":
print_list()
get_user_input()
elif user_input == "HELP":
print("instructions for idiots:")
print("SHOW: Shows the shit (duhh)")
get_user_input()
elif user_input.startswith("DEL "):
item_to_delete = user_input[4:]
shopping_list.remove(item_to_delete)
# print_list()
get_user_input()
else:
shopping_list.append(user_input)
# print_list()
get_user_input()
get_user_input()
| [
"krystinashveda@users.noreply.github.com"
] | krystinashveda@users.noreply.github.com |
79daee4cfbd4add82ae6126da5caf43695320f38 | 4c7729a025c6add065c67b2f975e8f3b2594809e | /HW1/homework1.py | b1f97d5d0946840ed818f0c1129c0402bfe54d9d | [] | no_license | tallestms/Computational_Investing | 1cfd783319a107a5b0f6a984c147094e850d9a14 | 2b1852091826f496d11674da5f49a2c9bfb5fcb5 | refs/heads/master | 2021-03-12T21:48:10.286254 | 2013-11-03T01:06:12 | 2013-11-03T01:06:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,513 | py | import QSTK.qstkutil.qsdateutil as du
import QSTK.qstkutil.tsutil as tsu
import QSTK.qstkutil.DataAccess as da
import datetime as dt
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
dt_start = dt.datetime(2011, 1, 1)
dt_end = dt.datetime(2011, 12, 31)
ls_symbols = ['AAPL', 'GOOG', 'IBM', 'MSFT']
def simulate(startDate, endDate, symbolsEq, allocationEq) :
dt_timeofday = dt.timedelta(hours=16)
ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt_timeofday)
c_dataobj = da.DataAccess('Yahoo')
ls_keys = ['open', 'high', 'low', 'close', 'volume', 'actual_close']
ldf_data = c_dataobj.get_data(ldt_timestamps, ls_symbols, ls_keys)
d_data = dict(zip(ls_keys, ldf_data))
na_price = d_data['close'].values
na_normalized_price = na_price / na_price[0, :]
symbolSP = ["$SPX"]
sp_data = c_dataobj.get_data(ldt_timestamps, symbolSP, ls_keys)
sp_d_data = dict(zip(ls_keys, sp_data))
sp_price = sp_d_data['close'].values
sp_price_normalized = sp_price / sp_price[0, :]
dailyReturnSP = sp_price_normalized.copy()
tsu.returnize0(dailyReturnSP)
na_normalizedPriceAllocation = na_normalized_price*allocationEq
na_sumRows = na_normalizedPriceAllocation.sum(axis=1)
dailyReturn = na_sumRows.copy()
tsu.returnize0(dailyReturn)
avgDailyReturn = np.average(dailyReturn)
dailyReturnStdDev = np.std(dailyReturn)
sharpeRatio = np.sqrt(252)*avgDailyReturn/dailyReturnStdDev
excessReturn = dailyReturn - dailyReturnSP
avgExcessReturn = np.average(excessReturn)
excessReturnStdDev = np.std(excessReturn)
cumulativeReturn = na_sumRows[-1]
return dailyReturnStdDev, avgDailyReturn, sharpeRatio, cumulativeReturn
if __name__ == '__main__':
optimalSharpeRatio = 0.0
optimalAllocation = [0, 0, 0, 0]
for a in range (0, 10):
for b in range (0, 10):
for c in range (0, 10):
for d in range (0, 10):
if(a + b + c + d == 10):
allocation = [float(a)/10, float(b)/10, float(c)/10, float(d)/10]
volatility, dailyReturn, sharpeRatio, cumulativeReturn = simulate(dt_start, dt_end, ls_symbols, allocation)
if(sharpeRatio > optimalSharpeRatio):
optimalSharpeRatio = sharpeRatio
optimalAllocation = allocation
| [
"talles@gmail.com"
] | talles@gmail.com |
b0512643eccee3ba6cc8d8b017717a3832b9bf32 | e323d8e7afe989a2daad65b8f0a288e024e69e84 | /classifier.py | 2fc5f404b13109cc1783868345edacfbf27d9c02 | [] | no_license | graca-dcc/graca-2.0 | 700fbcc991af67b518ad0540dc3ee12348e8778f | 37c430e08e937bfc86f95436fbbc86352464f048 | refs/heads/master | 2021-01-21T17:53:35.187044 | 2017-06-22T11:17:13 | 2017-06-22T11:17:13 | 91,997,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,835 | py | # -*- coding: utf-8 -*-
import random
from nltk import FreqDist
from nltk import NaiveBayesClassifier as nb
from nltk import DecisionTreeClassifier as dt
from nltk.classify import apply_features
from nltk.metrics.distance import edit_distance
from reader import read
from preprocess import preprocess
from preprocess import get_sub_dict
import pickle
class Classifier():
def __init__(self):
self.word_frequency = FreqDist()
self.offset = 1000
self.faq = []
self.sub_dict = dict()
self.answers = dict()
self.sub_dict = get_sub_dict(self.sub_dict, 'siglas')
self.sub_dict = get_sub_dict(self.sub_dict, 'academico')
self.sub_dict = get_sub_dict(self.sub_dict, 'abreviacoes')
self.sub_dict = get_sub_dict(self.sub_dict, 'conjuntos')
self.sub_dict = get_sub_dict(self.sub_dict, 'sinonimos')
self.read_faq()
random.shuffle(self.faq)
self.get_word_frequency()
train_set = apply_features(self.extract_feature, self.faq)
self.classifier = nb.train(train_set)
def get_word_frequency(self):
for t in self.faq:
question = t[0]
words = set(question.split(' '))
freq = 1
if len(words) <= 2:
freq = 10
for word in words:
self.word_frequency[word.lower()] += freq
def extract_feature(self, sentence):
bow = set(sentence.lower().split(' '))
features = {}
#for word in self.word_frequency.keys():
# features[word] = (word in bow)
freq = 1
if len(bow) <= 2:
freq = 10
for freq_word in self.word_frequency.keys():
for word in bow:
if edit_distance(freq_word,word) <= 2:
if freq_word in features:
features[freq_word] += freq
break
else:
features[freq_word] = freq
break
#break
return features
def read_faq(self):
# colegiado
self.get_data('1fqDkqnZ1Zws5yrAa7cZryJKZO2hQDrqU2kW64SA8zAo')
# apresentacao
self.get_data('1IxnEQxrArzEJvoCzdISERzkCEkzM6heVO58FN3F7c9Y')
# biblioteca
self.get_data('1U8t-blzZHM9m1K9H6O1eLYEv_EhuwVUGmrkzcU7STDQ')
# informacoes_gerais
self.get_data('1VXLnbmBo-OBtbFu9JfBSC0v8ufUBTT3sIwpXj5mz8Ec')
# creditos
self.get_data('1FwuOvzxT9pcvuYHIYoYQbByTwNwZWI0NW_WV8_YvPP8')
# sobre_cursos
self.get_data('1z_U7mDvru1dOkhjo62SInosYMzSpXPXBCUVobB7jgFk')
# sobre
self.get_data('1AGkOlKeuGK8BtB92PTOlFYOiIdXvxQ9RRqJcf1_5vHo')
# formacao complementar
self.get_data('1IA2rFHmD5VESpzzWSG76wwqFvbU8f0IjczPgJumEMjQ')
# matricula
self.get_data('1T1FfsVpLdBntyAfW_Rv4CNgoD8imDOt52GTg5nsKnSM')
def get_data(self, spreadsheetId):
q = read(spreadsheetId,'pergunta')
a = read(spreadsheetId,'resposta')
for row in a:
self.answers[int(row[0])+self.offset] = row[1]
for row in q:
t = (preprocess(row[0],self.sub_dict),int(row[1])+self.offset)
self.faq += [t]
self.offset += 1000
def get_answer(self, sentence):
sentence = preprocess(sentence,self.sub_dict)
p = self.classifier.prob_classify(self.extract_feature(sentence))
#if p.prob(p.max()) <= 0.7:
# return 'Me desculpe, não consegui entender :(', p.prob(p.max())
#ans = self.classifier.classify(self.extract_feature(sentence))
ans = p.max()
return self.answers[ans], p.prob(p.max())
def save(self):
fc = open('classifier.pickle', 'wb')
pickle.dump(self, fc)
fc.close()
| [
"gracabot.dcc@gmail.com"
] | gracabot.dcc@gmail.com |
01f2c3c84e5a212093e01c4e4dbbf82b5026e90e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02621/s553012004.py | 34870c7adb3722196b85d445063e4fd1201d1d96 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | a = int(input())
res = a+a**2+a**3
print(res)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
6d816df5012606bc69d35c03b4aac39b3a25c6dd | 0ec4defa6f83ec044b9e1235cc45964a8145b4d1 | /venv/lib/python3.6/site-packages/pybrain3/rl/experiments/continuous.py | 72df9483cfa96feab6da58c6c9be10525203864b | [] | no_license | nnarziev/MyWeek_Server | e6f6c10ce813cf3dc3aa644958c31a4d01567b4d | 7c51e79224ba48cd1a230536c27f3bd8cec73a21 | refs/heads/master | 2021-08-19T13:46:56.450003 | 2017-11-25T16:48:07 | 2017-11-25T16:48:07 | 112,080,782 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 583 | py | __author__ = 'Thomas Rueckstiess, ruecksti@in.tum.de'
from .experiment import Experiment
class ContinuousExperiment(Experiment):
""" The extension of Experiment to handle continuous tasks. """
def doInteractionsAndLearn(self, number = 1):
""" Execute a number of steps while learning continuously.
no reset is performed, such that consecutive calls to
this function can be made.
"""
for _ in range(number):
self._oneInteraction()
self.agent.learn()
return self.stepid
| [
"qobiljon.toshnazarov@gmail.com"
] | qobiljon.toshnazarov@gmail.com |
69a4ea6faf0cd8c55536edefeb94e92e9b3620d4 | d1025cd05371af3cd659ebce1e3b7be4a718f574 | /examlpes/diff(*args,**kwargs).py | 549ac0182aa3beeeb4f46c2ad4896de3d558438a | [] | no_license | Enhydra-xun/Book-problems | ea5a3e44e918e94f37aa97f842437ac7853bab19 | 04bb64075e2299f2d0e826687797297683a087ce | refs/heads/master | 2021-01-23T00:15:46.355647 | 2017-07-12T11:03:55 | 2017-07-12T11:03:55 | 85,711,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | def foo(*args,**kwargs):
print 'args=',kwargs
print'-------------------------------------'
if __name__ == '__main__':
foo(1,2,3,4)
foo(a=1,b=2,c=3)
foo(1,2,3,4,a=1,b=2,c=3)
foo('a', 1, None, a=1, b='2', c=3)
| [
"1092821680@qq.com"
] | 1092821680@qq.com |
684cab33328c39d32af6aef7b6c8af73a6181c68 | 383d34b34e5f73690ea851f78ac31571ed8600f2 | /python/log.py | fddb0405e79b0736cc2ce55694b271ff7a5a4032 | [
"BSD-2-Clause"
] | permissive | wchgit/infrastructure | 6b1c68f3825eca105ccc25da8ddbb3166e7f2dfb | c6299db1877443654a6510c67d127eaf75070206 | refs/heads/master | 2021-01-15T11:11:50.116348 | 2014-09-11T16:09:12 | 2014-09-11T16:09:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,701 | py | #!/usr/bin/env python
#coding:utf-8
import logging
import curses
class PrettyFormatter(logging.Formatter):
DEFAULT_FMT = '%(color)s[%(levelname)s %(asctime)s '\
'%(module)s:%(lineno)d]%(end_color)s %(message)s'
DEFAULT_DATEFMT = '%Y%m%d %H:%M:%S'
DEFAULT_COLORS = {logging.DEBUG:4, logging.INFO:2,
logging.WARNING:3, logging.ERROR:1}
def __init__(self, fmt=DEFAULT_FMT, datefmt=DEFAULT_DATEFMT,
colors=DEFAULT_COLORS):
logging.Formatter.__init__(self, datefmt=datefmt)
self._fmt = fmt
self._colors = {}
curses.setupterm()
fg_color = curses.tigetstr('setaf')
for levelno,code in colors.items():
self._colors[levelno] = curses.tparm(fg_color, code)
self._normal = curses.tigetstr('sgr0')
def format(self, record):
record.message = record.getMessage()
record.asctime = self.formatTime(record, self.datefmt)
record.color = self._colors[record.levelno]
record.end_color = self._normal
formatted = self._fmt % record.__dict__
return formatted
def get_logger(name='default.log', level=logging.DEBUG):
"""
get a Logger object with colors
"""
logger = logging.getLogger(name)
logger.setLevel(level)
hdlr = logging.StreamHandler()
hdlr.setLevel(level)
fmt = PrettyFormatter()
hdlr.setFormatter(fmt)
logger.addHandler(hdlr)
return logger
def test_get_logger():
logger = get_logger()
logger.debug('message')
logger.info('message')
logger.warning('message')
logger.error('message')
def test():
test_get_logger()
if __name__ == '__main__':
test()
| [
"wchgeek@gmail.com"
] | wchgeek@gmail.com |
704c55342dd6895f4e4aecf488437722bca88ada | cc1c7d59921a3d68b2980f8ad4ae24eb2322570e | /main.py | d4c7b1b08861f1233ec75a81a2db4ec9596f2bd7 | [] | no_license | davidwebster48/pythonproxy | 3345b5a7c422d1db6eb8c1264747bb23ee11ae15 | c6bf8d00b2ffb913a5da047bf8f9d4f9c2337468 | refs/heads/master | 2021-01-21T12:31:37.464204 | 2017-09-01T06:36:49 | 2017-09-01T06:36:49 | 102,080,978 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,543 | py | #!/usr/bin/python
from http.server import BaseHTTPRequestHandler, HTTPServer
import http.client
import zlib
import sys
class RequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
conn = http.client.HTTPSConnection(REMOTE_SERVER)
request_headers = dict()
for header in self.headers:
print(header, ':', self.headers[header])
value = self.headers[header]
if header == "host":
value = REMOTE_SERVER
request_headers[header] = value
if self.command == "GET":
conn.request(self.command, self.path, headers=request_headers)
else:
request_body = self.rfile.read(int(self.headers.getheader('content-length')))
print(request_body)
conn.request(self.command, self.path, request_body, request_headers)
res = conn.getresponse()
self.send_response(res.status)
for header in res.getheaders():
print(header)
self.send_header(header[0], header[1])
self.end_headers()
response_body = res.read()
decompressed_body = response_body
if res.getheader('content-encoding') == 'gzip':
decompressed_body = zlib.decompress(response_body, 31)
print(decompressed_body)
self.wfile.write(response_body)
conn.close()
def do_POST(self):
self.do_GET()
REMOTE_SERVER = sys.argv[1]
SERVER_ADDRESS = ('', 28021)
HTTPD = HTTPServer(SERVER_ADDRESS, RequestHandler)
HTTPD.serve_forever()
| [
"david.webster@eatnow.com.au"
] | david.webster@eatnow.com.au |
b532f132682d481fbb804c4ecb261e5bc17deb2f | b6917327bb1b2bdc06904cd0e0b60f693d0aabf1 | /src/Testing/Python/pyBlocks.py | 8b6f970f06998c2bda194f5ef4196ba12cd8a59f | [] | no_license | embprg2000/SoftDefinedBlocks | 9789ffc54473ff08369f95ea51c7d26c1f51b16b | 3881b62a15f1ba7d3ab9116f6da6038132731172 | refs/heads/master | 2020-03-27T07:27:16.590182 | 2018-08-26T16:50:29 | 2018-08-26T16:50:29 | 146,192,058 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 24,465 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Sep 17 16:15:51 2016
@author: evert
"""
"""
Test lab for FM decoding algorithms.
Use as follows:
>>> graw = pyfm.lazyRawSamples('rtlsdr.dat', 1000000)
>>> gtune = pyfm.freqShiftIQ(graw, 0.25)
>>> bfir = scipy.signal.firwin(20, 0.2, window='nuttall')
>>> gfilt = pyfm.firFilter(gtune, bfir)
>>> gbase = pyfm.quadratureDetector(gfilt, fs=1.0e6)
>>> fs,qs = pyfm.spectrum(gbase, fs=1.0e6)
"""
import sys
import datetime
import types
import numpy
import numpy.fft
import numpy.linalg
import numpy.random
import scipy.signal
def sincw(n):
"""Return Sinc or Lanczos window of length n."""
w = numpy.zeros(n)
for i in xrange(n):
if 2 * i == n + 1:
w[i] = 1.0
else:
t = 2 * i / float(n+1) - 1
w[i] = numpy.sin(numpy.pi * t) / (numpy.pi * t)
return w
def readRawSamples(fname):
"""Read raw sample file from rtl_sdr."""
d = numpy.fromfile(fname, dtype=numpy.float32)
#d = d.astype(numpy.float64)
#d = (d - 128) / 128.0
return d[::2] + 1j * d[1::2]
def lazyRawSamples(fname, blocklen):
"""Return generator over blocks of raw samples."""
f = file(fname, 'rb')
while 1:
d = f.read(2 * blocklen)
if len(d) < 2 * blocklen:
break
d = numpy.fromstring(d, dtype=numpy.uint8)
d = d.astype(numpy.float64)
d = (d - 128) / 128.0
yield d[::2] + 1j * d[1::2]
def freqShiftIQ(d, freqshift):
"""Shift frequency by multiplication with complex phasor."""
def g(d, freqshift):
p = 0
for b in d:
n = len(b)
w = numpy.exp((numpy.arange(n) + p) * (2j * numpy.pi * freqshift))
p += n
yield b * w
if isinstance(d, types.GeneratorType):
return g(d, freqshift)
else:
n = len(d)
w = numpy.exp(numpy.arange(n) * (2j * numpy.pi * freqshift))
return d * w
def firFilter(d, coeff):
"""Apply FIR filter to sample stream."""
# lazy version
def g(d, coeff):
prev = None
for b in d:
if prev is None:
yield scipy.signal.lfilter(coeff, 1, b)
prev = b
else:
k = min(len(prev), len(coeff))
x = numpy.concatenate((prev[-k:], b))
y = scipy.signal.lfilter(coeff, 1, x)
yield y[k:]
if len(coeff) > len(b):
prev = x
else:
prev = b
if isinstance(d, types.GeneratorType):
return g(d, coeff)
else:
return scipy.signal.lfilter(coeff, 1, d)
def quadratureDetector(d, fs):
"""FM frequency detector based on quadrature demodulation.
Return an array of real-valued numbers, representing frequencies in Hz."""
k = fs / (2 * numpy.pi)
# lazy version
def g(d):
prev = None
for b in d:
if prev is not None:
x = numpy.concatenate((prev[1:], b[:1]))
yield numpy.angle(x * prev.conj()) * k
prev = b
yield numpy.angle(prev[1:] * prev[:-1].conj()) * k
if isinstance(d, types.GeneratorType):
return g(d)
else:
return numpy.angle(d[1:] * d[:-1].conj()) * k
def modulateFm(sig, fs, fcenter=0):
"""Create an FM modulated IQ signal.
sig :: modulation signal, values in Hz
fs :: sample rate in Hz
fcenter :: center frequency in Hz
"""
return numpy.exp(2j * numpy.pi * (sig + fcenter).cumsum() / fs)
def spectrum(d, fs=1, nfft=None, sortfreq=False):
"""Calculate Welch-style power spectral density.
fs :: sample rate, default to 1
nfft :: FFT length, default to block length
sortfreq :: True to put negative freqs in front of positive freqs
Use Hann window with 50% overlap.
Return (freq, Pxx)."""
if not isinstance(d, types.GeneratorType):
d = [ d ]
prev = None
if nfft is not None:
assert nfft > 0
w = numpy.hanning(nfft)
q = numpy.zeros(nfft)
pos = 0
i = 0
for b in d:
if nfft is None:
nfft = len(b)
assert nfft > 0
w = numpy.hanning(nfft)
q = numpy.zeros(nfft)
while pos + nfft <= len(b):
if pos < 0:
t = numpy.concatenate((prev[pos:], b[:pos+nfft]))
else:
t = b[pos:pos+nfft]
t *= w
tq = numpy.fft.fft(t)
tq *= numpy.conj(tq)
q += numpy.real(tq)
del t
del tq
pos += (nfft+(i%2)) // 2
i += 1
pos -= len(b)
if pos + len(b) > 0:
prev = b
else:
prev = numpy.concatenate((prev[pos+len(b):], b))
if i > 0:
q /= (i * numpy.sum(numpy.square(w)) * fs)
f = numpy.arange(nfft) * (fs / float(nfft))
f[nfft//2:] -= fs
if sortfreq:
f = numpy.concatenate((f[nfft//2:], f[:nfft//2]))
q = numpy.concatenate((q[nfft//2:], q[:nfft//2]))
return f, q
def pll(d, centerfreq, bandwidth):
"""Simulate the stereo pilot PLL."""
minfreq = (centerfreq - bandwidth) * 2 * numpy.pi
maxfreq = (centerfreq + bandwidth) * 2 * numpy.pi
w = bandwidth * 2 * numpy.pi
phasor_a = numpy.poly([ numpy.exp(-1.146*w), numpy.exp(-5.331*w) ])
phasor_b = numpy.array([ sum(phasor_a) ])
loopfilter_b = numpy.poly([ numpy.exp(-0.1153*w) ])
loopfilter_b *= 0.62 * w
n = len(d)
y = numpy.zeros(n)
phasei = numpy.zeros(n)
phaseq = numpy.zeros(n)
phaseerr = numpy.zeros(n)
freq = numpy.zeros(n)
phase = numpy.zeros(n)
freq[0] = centerfreq * 2 * numpy.pi
phasor_i1 = phasor_i2 = 0
phasor_q1 = phasor_q2 = 0
loopfilter_x1 = 0
for i in xrange(n):
psin = numpy.sin(phase[i])
pcos = numpy.cos(phase[i])
y[i] = pcos
pi = pcos * d[i]
pq = psin * d[i]
pi = phasor_b[0] * pi - phasor_a[1] * phasor_i1 - phasor_a[2] * phasor_i2
pq = phasor_b[0] * pq - phasor_a[1] * phasor_q1 - phasor_a[2] * phasor_q2
phasor_i2 = phasor_i1
phasor_i1 = pi
phasor_q2 = phasor_q1
phasor_q1 = pq
phasei[i] = pi
phaseq[i] = pq
if pi > abs(pq):
perr = pq / pi
elif pq > 0:
perr = 1
else:
perr = -1
phaseerr[i] = perr
dfreq = loopfilter_b[0] * perr + loopfilter_b[1] * loopfilter_x1
loopfilter_x1 = perr
if i + 1 < n:
freq[i+1] = min(maxfreq, max(minfreq, freq[i] - dfreq))
p = phase[i] + freq[i+1]
if p > 2 * numpy.pi: p -= 2 * numpy.pi
if p < -2 * numpy.pi: p += 2 * numpy.pi
phase[i+1] = p
return y, phasei, phaseq, phaseerr, freq, phase
def pilotLevel(d, fs, freqshift, nfft=None, bw=150.0e3):
"""Calculate level of the 19 kHz pilot vs noise floor in the guard band.
d :: block of raw I/Q samples or lazy I/Q sample stream
fs :: sample frequency in Hz
nfft :: FFT length
freqshift :: frequency offset in Hz
bw :: half-bandwidth of IF signal in Hz
Return (pilot_power, guard_floor, noise)
where pilot_power is the power of the pilot tone in dB
guard_floor is the noise floor in the guard band in dB/Hz
noise is guard_floor - pilot_power in dB/Hz
"""
# Shift frequency
if freqshift != 0:
d = freqShiftIQ(d, freqshift / float(fs))
# Filter
b = scipy.signal.firwin(31, 2.0 * bw / fs, window='nuttall')
d = firFilter(d, b)
# Demodulate FM.
d = quadratureDetector(d, fs)
# Power spectral density.
f, q = spectrum(d, fs=fs, nfft=nfft, sortfreq=False)
# Locate 19 kHz bin.
k19 = int(19.0e3 * len(q) / fs)
kw = 5 + int(100.0 * len(q) / fs)
k19 = k19 - kw + numpy.argmax(q[k19-kw:k19+kw])
# Calculate pilot power.
p19 = numpy.sum(q[k19-1:k19+2]) * fs * 1.5 / len(q)
# Calculate noise floor in guard band.
k17 = int(17.0e3 * len(q) / fs)
k18 = int(18.0e3 * len(q) / fs)
guard = numpy.mean(q[k17:k18])
p19db = 10 * numpy.log10(p19)
guarddb = 10 * numpy.log10(guard)
return (p19db, guarddb, guarddb - p19db)
def modulateAndReconstruct(sigfreq, sigampl, nsampl, fs, noisebw=None, ifbw=None, ifnoise=0, ifdownsamp=1):
"""Create a pure sine wave, modulate to FM, add noise, filter, demodulate.
sigfreq :: frequency of sine wave in Hz
sigampl :: amplitude of sine wave in Hz (carrier swing)
nsampl :: number of samples
fs :: sample rate in Hz
noisebw :: calculate noise after demodulation over this bandwidth
ifbw :: IF filter bandwidth in Hz, or None for no filtering
ifnoise :: IF noise level
ifdownsamp :: downsample factor before demodulation
Return (ampl, phase, noise)
where ampl is the amplitude of the reconstructed sine wave (~ sigampl)
phase is the phase shift after reconstruction
noise is the standard deviation of noise in the reconstructed signal
"""
# Make sine wave.
sig0 = sigampl * numpy.sin(2*numpy.pi*sigfreq/fs * numpy.arange(nsampl))
# Modulate to IF.
fm = modulateFm(sig0, fs=fs, fcenter=0)
# Add noise.
if ifnoise:
fm += numpy.sqrt(0.5) * numpy.random.normal(0, ifnoise, nsampl)
fm += 1j * numpy.sqrt(0.5) * numpy.random.normal(0, ifnoise, nsampl)
# Filter IF.
if ifbw is not None:
b = scipy.signal.firwin(101, 2.0 * ifbw / fs, window='nuttall')
fm = scipy.signal.lfilter(b, 1, fm)
fm = fm[61:]
# Downsample IF.
fs1 = fs
if ifdownsamp != 1:
fm = fm[::ifdownsamp]
fs1 = fs / ifdownsamp
# Demodulate.
sig1 = quadratureDetector(fm, fs=fs1)
# Fit original sine wave.
k = len(sig1)
m = numpy.zeros((k, 3))
m[:,0] = numpy.sin(2*numpy.pi*sigfreq/fs1 * (numpy.arange(k) + nsampl - k))
m[:,1] = numpy.cos(2*numpy.pi*sigfreq/fs1 * (numpy.arange(k) + nsampl - k))
m[:,2] = 1
fit = numpy.linalg.lstsq(m, sig1)
csin, ccos, coffset = fit[0]
del fit
# Calculate amplitude, phase.
ampl1 = numpy.sqrt(csin**2 + ccos**2)
phase1 = numpy.arctan2(-ccos, csin)
# Calculate residual noise.
res1 = sig1 - m[:,0] * csin - m[:,1] * ccos
if noisebw is not None:
b = scipy.signal.firwin(101, 2.0 * noisebw / fs1, window='nuttall')
res1 = scipy.signal.lfilter(b, 1, res1)
noise1 = numpy.sqrt(numpy.mean(res1 ** 2))
return ampl1, phase1, noise1
def rdsDemodulate(d, fs):
"""Demodulate RDS bit stream.
d :: block of baseband samples or lazy baseband sample stream
fs :: sample frequency in Hz
Return (bits, levels)
where bits is a list of RDS data bits
levels is a list of squared RDS carrier amplitudes
"""
# RDS carrier in Hz
carrier = 57000.0
# RDS bit rate in bit/s
bitrate = 1187.5
# Approximate nr of samples per bit.
bitsteps = round(fs / bitrate)
# Prepare FIR coefficients for matched filter.
#
# The filter is a root-raised-cosine with hard cutoff at f = 2/bitrate.
# H(f) = cos(pi * f / (4*bitrate)) if f < 2*bitrate
# H(f) = 0 if f >= 2*bitrate
#
# Impulse response:
# h(t) = ampl * cos(pi*4*bitrate*t) / (1 - 4 * (4*bitrate*t)**2)
#
wlen = int(1.5 * fs / bitrate)
w = numpy.zeros(wlen)
for i in xrange(wlen):
t = (i - 0.5 * (wlen - 1)) * 4.0 * bitrate / fs
if abs(abs(t) - 0.5) < 1.0e-4:
# lim {t->0.5} {cos(pi*t) / (1 - 4*t**2)} = 0.25 * pi
w[i] = 0.25 * numpy.pi - 0.25 * numpy.pi * (abs(t) - 0.5)
else:
w[i] = numpy.cos(numpy.pi * t) / (1 - 4.0 * t * t)
# Use Sinc window to reduce leakage.
w *= sincw(wlen)
# Scale filter such that peak output of filter equals original amplitude.
w /= numpy.sum(w**2)
demod_phase = 0.0
prev_a1 = 0.0
prevb = numpy.array([])
pos = 0
bits = [ ]
levels = [ ]
if not isinstance(d, types.GeneratorType):
d = [ d ]
for b in d:
n = len(b)
# I/Q demodulate with fixed 57 kHz phasor
ps = numpy.arange(n) * (carrier / float(fs)) + demod_phase
dem = b * numpy.exp(-2j * numpy.pi * ps)
demod_phase = (demod_phase + n * carrier / float(fs)) % 1.0
# Merge with remaining data from previous block
prevb = numpy.concatenate((prevb[pos:], dem))
pos = 0
# Detect bits.
while pos + bitsteps + wlen < len(prevb):
# Measure average phase of first impulse of symbol.
a1 = numpy.sum(prevb[pos:pos+wlen] * w)
# Measure average phase of second impulse of symbol.
a2 = numpy.sum(prevb[pos+bitsteps//2:pos+wlen+bitsteps//2] * w)
# Measure average phase in middle of symbol.
a3 = numpy.sum(prevb[pos+bitsteps//4:pos+wlen+bitsteps//4] * w)
# Calculate inner product of first impulse and previous symbol.
sym = a1.real * prev_a1.real + a1.imag * prev_a1.imag
prev_a1 = a1
if sym < 0:
# Consecutive symbols have opposite phase; this is a 1 bit.
bits.append(1)
else:
# Consecutive symbols are in phase; this is a 0 bit.
bits.append(0)
# Calculate inner product of first and second impulse.
a1a2 = a1.real * a2.real + a1.imag * a2.imag
# Calculate inner product of first impulse and middle phasor.
a1a3 = a1.real * a3.real + a1.imag * a3.imag
levels.append(-a1a2)
if a1a2 >= 0:
# First and second impulse are in phase;
# we must be woefully misaligned.
pos += 5 * bitsteps // 8
elif a1a3 > -0.02 * a1a2:
# Middle phasor is in phase with first impulse;
# we are sampling slightly too early.
pos += (102 * bitsteps) // 100
elif a1a3 > -0.01 * a1a2:
pos += (101 * bitsteps) // 100
elif a1a3 < 0.02 * a1a2:
# Middle phasor is opposite to first impulse;
# we are sampling slightly too late.
pos += (98 * bitsteps) // 100
elif a1a3 < 0.01 * a1a2:
pos += (99 * bitsteps) // 100
else:
# Middle phasor is zero; we are sampling just right.
pos += bitsteps
return (bits, levels)
def rdsDecodeBlock(bits, typ):
"""Decode one RDS data block.
bits :: list of 26 bits
typ :: expected block type, "A" or "B" or "C" or "C'" or "D" or "E"
Return (block, ber)
where block is a 16-bit unsigned integer if the block is correctly decoded,
block is None if decoding failed,
ber is 0 if the block is error-free,
ber is 1 if a single-bit error has been corrected,
ber is 2 if decoding failed.
"""
# TODO : there are still problems with bit alignment on weak stations
# TODO : try to pin down the problem
# Offset word for each type of block.
rdsOffsetTable = { "A": 0x0fc, "B": 0x198,
"C": 0x168, "C'": 0x350,
"D": 0x1B4, "E": 0 }
# RDS checkword generator polynomial.
gpoly = 0x5B9
# Convert bits to word.
assert len(bits) == 26
w = 0
for b in bits:
w = 2 * w + b
# Remove block offset.
w ^= rdsOffsetTable[typ]
# Calculate error syndrome.
syn = w
for i in xrange(16):
if syn & (1 << (25 - i)):
syn ^= gpoly << (15 - i)
# Check block.
if syn == 0:
return (w >> 10, 0)
# Error detected; try all single-bit errors.
p = 1
for k in xrange(26):
if p == syn:
# Detected single-bit error in bit k.
w ^= (1 << k)
return (w >> 10, 1)
p <<= 1
if p & 0x400:
p ^= gpoly
# No single-bit error can explain this syndrome.
return (None, 2)
class RdsData(object):
"""Stucture to hold common RDS data fields."""
pi = None
pty = None
tp = None
ta = None
ms = None
af = None
di = None
pin = None
pserv = None
ptyn = None
ptynab = None
rtext = None
rtextab = None
time = None
tmp_afs = None
tmp_aflen = 0
tmp_afmode = 0
ptyTable = [
'None', 'News',
'Current Affairs', 'Information',
'Sport', 'Education',
'Drama', 'Cultures',
'Science', 'Varied Speech',
'Pop Music', 'Rock Music',
'Easy Listening', 'Light Classics M',
'Serious Classics', 'Other Music',
'Weather & Metr', 'Finance',
"Children's Progs", 'Social Affairs',
'Religion', 'Phone In',
'Travel & Touring', 'Leisure & Hobby',
'Jazz Music', 'Country Music',
'National Music', 'Oldies Music',
'Folk Music', 'Documentary',
'Alarm Test', 'Alarm - Alarm !' ]
def __str__(self):
if self.pi is None:
return str(None)
s = 'RDS PI=%-5d' % self.pi
s += ' TP=%d' % self.tp
if self.ta is not None:
s += ' TA=%d' % self.ta
else:
s += ' '
if self.ms is not None:
s += ' MS=%d' % self.ms
else:
s += ' '
s += ' PTY=%-2d %-20s' % (self.pty, '(' + self.ptyTable[self.pty] + ')')
if self.ptyn is not None:
s += ' PTYN=%r' + str(self.ptyn).strip('\x00')
if self.di is not None or self.pserv is not None:
s += '\n '
if self.di is not None:
distr = '('
distr += 'stereo' if self.di & 1 else 'mono'
if self.di & 2:
distr += ',artificial'
if self.di & 4:
distr += ',compressed'
if self.di & 8:
distr += ',dynpty'
distr += ')'
s += ' DI=%-2d %-37s' % (self.di, distr)
else:
s += 45 * ' '
if self.pserv is not None:
s += ' SERV=%r' % str(self.pserv).strip('\x00')
if self.time is not None or self.pin is not None:
s += '\n '
if self.time is not None:
(day, hour, mt, off) = self.time
dt = datetime.date.fromordinal(day + datetime.date(1858, 11, 17).toordinal())
s += ' TIME=%04d-%02d-%02d %02d:%02d UTC ' % (dt.year, dt.month, dt.day, hour, mt)
else:
s += 27 * ' '
if self.pin is not None:
(day, hour, mt) = self.pin
s += ' PIN=d%02d %02d:%02d' % (day, hour, mt)
else:
s += 14 * ' '
if self.af is not None:
s += '\n AF='
for f in self.af:
if f > 1.0e6:
s += '%.1fMHz ' % (f * 1.0e-6)
else:
s += '%.0fkHz ' % (f * 1.0e-3)
if self.rtext is not None:
s += '\n RT=%r' % str(self.rtext).strip('\x00')
return s
def rdsDecode(bits, rdsdata=None):
"""Decode RDS data stream.
bits :: list of RDS data bits
rdsdata :: optional RdsData object to store RDS information
Return (rdsdata, ngroups, errsoft, errhard)
where rdsdata is the updated RdsData object
ngroup is the number of correctly decoded RDS groups
errsoft is the number of correctable bit errors
errhard is the number of uncorrectable bit errors
"""
if rdsdata is None:
rdsdata = RdsData()
ngroup = 0
errsoft = 0
errhard = 0
p = 0
n = len(bits)
while p + 4 * 26 <= n:
(wa, ea) = rdsDecodeBlock(bits[p:p+26], "A")
if wa is None:
errhard += 1
p += 1
continue
(wb, eb) = rdsDecodeBlock(bits[p+26:p+2*26], "B")
if wb is None:
errhard += 1
p += 1
continue
if (wb >> 11) & 1:
(wc, ec) = rdsDecodeBlock(bits[p+2*26:p+3*26], "C'")
else:
(wc, ec) = rdsDecodeBlock(bits[p+2*26:p+3*26], "C")
if wc is None:
errhard += 1
p += 1
continue
(wd, ed) = rdsDecodeBlock(bits[p+3*26:p+4*26], "D")
if wd is None:
errhard += 1
p += 1
continue
errsoft += ea + eb + ec + ed
ngroup += 1
# Found an RDS group; decode it.
typ = (wb >> 12)
typb = (wb >> 11) & 1
# PI, TP, PTY are present in all groups
rdsdata.pi = wa
rdsdata.tp = (wb >> 10) & 1
rdsdata.pty = (wb >> 5) & 0x1f
if typ == 0:
# group type 0: TA, MS, DI, program service name
rdsdata.ta = (wb >> 4) & 1
rdsdata.ms = (wb >> 3) & 1
dseg = wb & 3
if rdsdata.di is None:
rdsdata.di = 0
rdsdata.di &= ~(1 << dseg)
rdsdata.di |= (((wb >> 2) & 1) << dseg)
if rdsdata.pserv is None:
rdsdata.pserv = bytearray(8)
rdsdata.pserv[2*dseg] = wd >> 8
rdsdata.pserv[2*dseg+1] = wd & 0xff
if typ == 0 and not typb:
# group type 0A: alternate frequencies
for f in ((wc >> 8), wc & 0xff):
if f >= 224 and f <= 249:
rdsdata.tmp_aflen = f - 224
rdsdata.tmp_aflfmode = 0
rdsdata.tmp_afs = [ ]
elif f == 250 and rdsdata.tmp_aflen > 0 and len(rdsdata.tmp_afs) < rdsdata.tmp_aflen:
rdsdata.tmp_aflfmode = 1
elif f >= 1 and f <= 204 and rdsdata.tmp_aflen > 0 and len(rdsdata.tmp_afs) < rdsdata.tmp_aflen:
if rdsdata.tmp_aflfmode:
rdsdata.tmp_afs.append(144.0e3 + f * 9.0e3)
else:
rdsdata.tmp_afs.append(87.5e6 + f * 0.1e6)
if len(rdsdata.tmp_afs) == rdsdata.tmp_aflen:
rdsdata.af = rdsdata.tmp_afs
rdsdata.tmp_aflen = 0
rdsdata.tmp_afs = [ ]
rdsdata.tmp_aflfmode = 0
if typ == 1:
# group type 1: program item number
rdsdata.pin = (wd >> 11, (wd >> 6) & 0x1f, wd & 0x3f)
if typ == 2:
# group type 2: radio text
dseg = wb & 0xf
if rdsdata.rtext is None or ((wb >> 4) & 1) != rdsdata.rtextab:
rdsdata.rtext = bytearray(64)
rdsdata.rtextab = (wb >> 4) & 1
if typb:
rdsdata.rtext[2*dseg] = (wd >> 8)
rdsdata.rtext[2*dseg+1] = wd & 0xff
else:
rdsdata.rtext[4*dseg] = (wc >> 8)
rdsdata.rtext[4*dseg+1] = wc & 0xff
rdsdata.rtext[4*dseg+2] = (wd >> 8)
rdsdata.rtext[4*dseg+3] = wd & 0xff
if typ == 4 and not typb:
# group type 4A: clock-time and date
rdsdata.time = (((wb & 3) << 15) | (wc >> 1),
((wc & 1) << 4) | (wd >> 12),
(wd >> 6) & 0x3f, (wd & 0x1f) - (wd & 0x20))
if typ == 10 and not typb:
# group type 10A: program type name
dseg = wb & 1
if rdsdata.ptyn is None or ((wb >> 4) & 1) != rdsdata.ptynab:
rdsdata.ptyn = bytearray(8)
rdsdata.ptynab = (wb >> 4) & 1
rdsdata.ptyn[4*dseg] = (wc >> 8)
rdsdata.ptyn[4*dsseg+1] = wc & 0xff
rdsdata.ptyn[4*dseg+2] = (wd >> 8)
rdsdata.ptyn[4*dsseg+3] = wd & 0xff
# Go to next group.
p += 4 * 26
return (rdsdata, ngroup, errsoft, errhard)
data = readRawSamples('IQdata.bin')
a=69
| [
"noreply@github.com"
] | noreply@github.com |
3f9563afb634994641bf0cef0a0fbe0ead53ee7f | 3076fc0c3ceb285cc2db6f0d19f045c274adcceb | /[Samarinda]_Eramart/eramart_antasari_info.py | ed95b6a7f3b265937d96d1aa1fb67b332876aa0a | [] | no_license | evosys/CSVConverter | 121b0c23e18cf59b939352b751be5e8ce12b4999 | 6026870975001f34608fc1303ed922d0f8e474ef | refs/heads/master | 2020-03-17T12:40:14.406236 | 2019-02-15T08:41:51 | 2019-02-15T08:41:51 | 133,597,807 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | # -*- coding: utf-8 -*-
# @Author: ichadhr
# @Date: 2018-10-08 16:44:24
# @Last Modified by: richard.hari@live.com
# @Last Modified time: 2018-10-12 16:39:10
_appname = "CSV Converter [ERAMART ANTASARI]"
_version = "1.0.1.0"
_by = "richard.hari@live.com"
| [
"ichadhr@gmail.com"
] | ichadhr@gmail.com |
717a50a4fc240d9760bfeec41f435549c6296ef7 | 50fc02b2d56860ed47987ad79b71da5fcabc4865 | /tools/detector_v2.py | f038242d70f791f46f88eab788ecd632cbe5a77a | [] | no_license | jxi24/TrackML | b63e92a394511b9cb70f71e054b90aa2970d8d23 | 5cd63048fcbc630392ec2588e4fe2066025249d0 | refs/heads/master | 2021-06-25T18:07:04.149287 | 2020-11-04T01:06:22 | 2020-11-04T01:06:22 | 134,465,252 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,899 | py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
class Detector:
def __init__(self,detector_file):
self.detector = pd.read_csv(detector_file)
def _load_element_info(self,volume_id,layer_id,module_id):
volume = self.detector['volume_id'] == volume_id
layer = self.detector['layer_id'] == layer_id
module = self.detector['module_id'] == module_id
detector_element = self.detector[volume & layer & module]
self.cshift = np.array([detector_element['cx'].item(),
detector_element['cy'].item(),
detector_element['cz'].item()])
self.rotation_matrix = np.matrix([
[detector_element['rot_xu'].item(),detector_element['rot_xv'].item(),detector_element['rot_xw'].item()],
[detector_element['rot_yu'].item(),detector_element['rot_yv'].item(),detector_element['rot_yw'].item()],
[detector_element['rot_zu'].item(),detector_element['rot_zv'].item(),detector_element['rot_zw'].item()]
])
self.pitch_u = detector_element['pitch_u'].item()
self.pitch_v = detector_element['pitch_v'].item()
self.module_t = detector_element['module_t'].item()
self.module_minhu = detector_element['module_minhu'].item()
self.module_maxhu = detector_element['module_maxhu'].item()
self.module_hv = detector_element['module_hv'].item()
def _position(self,ch,pitch,h):
return (ch + 0.5) * pitch - h
def _positon_inv(self,val,pitch,h):
return (val + h)/pitch - 0.5
def _calc_hu(self,v):
return (self.module_minhu * (self.module_hv + v)
+ self.module_maxhu * (self.module_hv - v)) / (2.0 * self.module_hv)
def GlobalToLocal(self,x,y,z,volume_id,layer_id,module_id):
"""
Purpose: Converts the position in global coordinates to the local coordinates.
Input: x, y, z -> Global Position
volume_id, layer_id, module_id -> identifications for detector lookup
Output: Local coordinates (u, v, w)
"""
self._load_element_info(volume_id,layer_id,module_id)
u, v, w = np.array(np.transpose(self.rotation_matrix).dot(np.array([x,y,z]) - self.cshift)).flatten()
return self._positon_inv(u,self.pitch_u,self._calc_hu(v)), \
self._positon_inv(v,self.pitch_v,self.module_hv)
def LocalToGlobal(self,ch0,ch1,volume_id,layer_id,module_id):
"""
Purpose: Converts the position in local coordinates to the global coordinates.
Input: ch0, ch1 -> channel location
volume_id, layer_id, module_id -> volume, layer, and module identifications for transformation
Output: Local coordinates (u, v, w)
"""
self._load_element_info(volume_id,layer_id,module_id)
v = self._position(ch1,self.pitch_v,self.module_hv)
u = self._position(ch0,self.pitch_u,self._calc_hu(v))
return np.array(self.rotation_matrix.dot(np.array([u,v,0.])) + self.cshift).flatten()
def LocaluvwToGlobal(self,u,v,w,volume_id,layer_id,module_id):
"""
Purpose: Converts the position in local coordinates to the global coordinates.
Input: u, v, w -> channel location
volume_id, layer_id, module_id -> volume, layer, and module identifications for transformation
Output: Local coordinates (u, v, w)
"""
return np.array(self.rotation_matrix.dot(np.array([u,v,w])) + self.cshift).flatten()
def LocaluvwToGlobal_vector(self,uvw,volume_id,layer_id,module_id):
return np.array(self.rotation_matrix.dot(np.array(uvw)) + self.cshift).flatten()
def HitsToImage(self, cell_hits, volume_id, layer_id, module_id):
self._load_element_info(volume_id,layer_id,module_id)
nCellsU = int(2*self.module_maxhu/self.pitch_u)
nCellsV = int(2*self.module_hv/self.pitch_v)
module_img = np.zeros((nCellsU,nCellsV))
return module_img
central_u = 0
central_v = 0
count = 0
for index, row in cell_hits.iterrows():
module_img[int(row['ch0']-1)][int(row['ch1']-1)] = row['value']
central_u += int(row['ch0']-1)
central_v += int(row['ch1']-1)
count += 1.
# fig = plt.figure()
# ax = fig.add_subplot(121)
# im = plt.imshow(module_img, interpolation='nearest', origin='low',
# extent=[0,nCellsU-1,0,nCellsV-1])
center = (int(central_u/count),int(central_v/count))
aspect_ratio = self.module_hv/self.module_maxhu
nU = 50
nV = int(50*aspect_ratio)
centered_img = np.zeros((nU,nV))
for i in xrange(nU):
for j in xrange(nV):
centered_img[i][j] = module_img[center[0]-nU/2+i][center[1]-nV/2+j]
# ax = fig.add_subplot(122)
# im = plt.imshow(centered_img, interpolation='nearest', origin='low',
# extent=[0, nU, 0, nV])
# plt.show()
return module_img, centered_img
if __name__ == '__main__':
from trackml.dataset import load_event
hits, cells, particles, truth = load_event('/media/isaacson/DataStorage/kaggle/competitions/trackml-particle-identification/train_100_events/event000001008')
location = '/media/isaacson/DataStorage/kaggle/competitions/trackml-particle-identification/detectors.csv'
hit_id = 17667
detector = Detector(location)
cell_rows = cells['hit_id'] == hit_id
cell_hits = cells[cell_rows].drop('hit_id',axis=1)
hit_row = hits['hit_id'] == hit_id
volume_id = hits[hit_row]['volume_id'].item()
layer_id = hits[hit_row]['layer_id'].item()
module_id = hits[hit_row]['module_id'].item()
full_img = detector.HitsToImage(cell_hits,volume_id,layer_id,module_id)
| [
"jackadsa@gmail.com"
] | jackadsa@gmail.com |
607e3e345e09801c6c8e70fac26134402c8085e2 | ae4106aaf7b9b1f2ec80f592f2224f3b1d73661a | /seminar01-localization/sdc/cycloid_movement_model.py | d89a98c0f8cd2100d0f578416cb42bc0a97d8f25 | [
"MIT"
] | permissive | yandexdataschool/sdc_course | 992a6571245b2b4457c87e8e8950630dcc350dba | 26908bdfabae3e4d798785ac6f971641540a2956 | refs/heads/spring2022 | 2022-05-05T00:34:20.827953 | 2022-05-04T02:28:13 | 2022-05-04T02:28:13 | 127,633,189 | 128 | 63 | MIT | 2022-04-06T09:58:43 | 2018-04-01T13:46:35 | Jupyter Notebook | UTF-8 | Python | false | false | 1,639 | py | # -*- coding: utf-8 -*-
import numpy as np
from .timestamp import Timestamp
from .movement_model_base import MovementModelBase
class CycloidMovementModel(MovementModelBase):
"""Реализует движение автомобиля по циклоиде"""
def __init__(self, x_vel=0, y_vel=0, omega=0, *args, **kwargs):
"""
:param x_vel: Скорость движения центра вращения вдоль оси X
:param y_vel: Скорость движения центра вращения вдоль оси Y
:param omega: Угловая скорость (рад/с) при движении по циклоиде
"""
super(CycloidMovementModel, self).__init__(*args, **kwargs)
self.x_vel = x_vel
self.y_vel = y_vel
self.omega = omega
def _move(self, dt):
assert isinstance(dt, Timestamp)
car = self._car
dt_sec = dt.to_seconds()
x = car._position_x
y = car._position_y
vel = car._velocity
yaw = car._yaw
vel_x = vel * np.cos(yaw)
vel_y = vel * np.sin(yaw)
new_x = x + vel_x * dt_sec
new_y = y + vel_y * dt_sec
new_vel_x = vel_x - self.omega * (vel_y - self.y_vel) * dt_sec
new_vel_y = vel_y + self.omega * (vel_x - self.x_vel) * dt_sec
# Продвигаем время, выставляем новое состояние
car.time += dt
car._position_x = new_x
car._position_y = new_y
car._velocity = np.sqrt(new_vel_x**2 + new_vel_y**2)
car._yaw = np.arctan2(new_vel_y, new_vel_x)
| [
"siri3us@yandex-team.ru"
] | siri3us@yandex-team.ru |
4279dc7b559d42c29c6b47aaccc87d38aaed86ca | 456894fcfba11a23fcd0d252c73b1cfea703d27a | /anynet.py | 1d56cb9726dbe072c85b40e67f5328089c661d5b | [] | no_license | ikbumheo/Stereo-3D-Detection | 4230851b41bfbe68776052bd3ca617f6fbd8fd0b | f70c5f49aea02c09debcaa01967d622815d94dcf | refs/heads/master | 2023-05-03T22:31:11.512728 | 2021-05-26T18:35:02 | 2021-05-26T18:35:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,786 | py | import argparse
import os
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.optim as optim
import torch.utils.data
import torch.nn.functional as F
import time
import Models.AnyNet.utils.logger as logger
import torch.backends.cudnn as cudnn
import numpy as np
import Models.AnyNet.models.anynet as anynet
import tqdm
parser = argparse.ArgumentParser(description='Anynet fintune on KITTI')
parser.add_argument('--maxdisp', type=int, default=192,
help='maxium disparity')
parser.add_argument('--loss_weights', type=float, nargs='+', default=[0.5, 0.7, 1, 1])
parser.add_argument('--max_disparity', type=int, default=192)
parser.add_argument('--maxdisplist', type=int, nargs='+', default=[12, 3, 3])
parser.add_argument('--datatype', default='2015',
help='datapath')
parser.add_argument('--datapath', default=None, help='datapath')
parser.add_argument('--epochs', type=int, default=110,
help='number of epochs to train')
parser.add_argument('--train_bsize', type=int, default=16,
help='batch size for training (default: 6)')
parser.add_argument('--test_bsize', type=int, default=32,
help='batch size for testing (default: 8)')
parser.add_argument('--save_path', type=str, default='results/finetune_anynet',
help='the path of saving checkpoints and log')
parser.add_argument('--resume', type=str, default=None,
help='resume path')
parser.add_argument('--lr', type=float, default=1e-4,
help='learning rate')
parser.add_argument('--with_spn', action='store_true', help='with spn network or not')
parser.add_argument('--print_freq', type=int, default=25, help='print frequence')
parser.add_argument('--init_channels', type=int, default=1, help='initial channels for 2d feature extractor')
parser.add_argument('--nblocks', type=int, default=2, help='number of layers in each stage')
parser.add_argument('--channels_3d', type=int, default=4, help='number of initial channels 3d feature extractor ')
parser.add_argument('--layers_3d', type=int, default=4, help='number of initial layers in 3d network')
parser.add_argument('--growth_rate', type=int, nargs='+', default=[4,1,1], help='growth rate in the 3d network')
parser.add_argument('--spn_init_channels', type=int, default=8, help='initial channels for spnet')
parser.add_argument('--start_epoch_for_spn', type=int, default=5)
parser.add_argument('--pretrained', type=str, default='results/pretrained_anynet/checkpoint.tar',
help='pretrained model path')
parser.add_argument('--train_file', type=str, default=None)
parser.add_argument('--validation_file', type=str, default=None)
parser.add_argument('--load_npy', action='store_true')
parser.add_argument('--evaluate', action='store_true')
parser.add_argument('--split_file', type=str, default=None)
args = parser.parse_args()
from Models.AnyNet.dataloader import KITTILoader as DA
if args.datatype == '2015':
from Models.AnyNet.dataloader import KITTIloader2015 as ls
elif args.datatype == '2012':
from Models.AnyNet.dataloader import KITTIloader2012 as ls
elif args.datatype == 'other':
from Models.AnyNet.dataloader import KITTI_dataset as ls
def main():
global args
log = logger.setup_logger(args.save_path + '/training.log')
if args.datatype == 'other':
train_left_img, train_right_img, train_left_disp, test_left_img, test_right_img, test_left_disp = ls.dataloader(
args.datapath, args.train_file, args.validation_file)
else:
train_left_img, train_right_img, train_left_disp, test_left_img, test_right_img, test_left_disp = ls.dataloader(
args.datapath, log, args.split_file)
TrainImgLoader = torch.utils.data.DataLoader(
DA.myImageFloder(train_left_img, train_right_img, train_left_disp, True),
batch_size=args.train_bsize, shuffle=True, num_workers=4, drop_last=False)
TestImgLoader = torch.utils.data.DataLoader(
DA.myImageFloder(test_left_img, test_right_img, test_left_disp, False),
batch_size=args.test_bsize, shuffle=False, num_workers=4, drop_last=False)
if not os.path.isdir(args.save_path):
os.makedirs(args.save_path)
for key, value in sorted(vars(args).items()):
log.info(str(key) + ': ' + str(value))
model = anynet.AnyNet(args)
model = nn.DataParallel(model).cuda()
optimizer = optim.Adam(model.parameters(), lr=args.lr, betas=(0.9, 0.999))
log.info('Number of model parameters: {}'.format(sum([p.data.nelement() for p in model.parameters()])))
args.start_epoch = 0
cudnn.benchmark = True
if args.resume:
if os.path.isfile(args.resume):
log.info("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_epoch = checkpoint['epoch'] + 1
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
log.info("=> loaded checkpoint '{}' (epoch {})".format(args.resume, checkpoint['epoch']))
# test(TestImgLoader, model, log, checkpoint['epoch'])
else:
log.info("=> no checkpoint found at '{}'".format(args.resume))
log.info("=> Will start from scratch.")
elif args.pretrained:
if os.path.isfile(args.pretrained):
checkpoint = torch.load(args.pretrained)
model.load_state_dict(checkpoint['state_dict'], strict=False)
log.info("=> loaded pretrained model '{}'".format(args.pretrained))
else:
log.info("=> no pretrained model found at '{}'".format(
args.pretrained))
log.info("=> Will start from scratch.")
else:
log.info("=> Will start from scratch.")
start_full_time = time.time()
if args.evaluate:
test(TestImgLoader, model, log)
return
for epoch in range(args.start_epoch, args.epochs):
# log.info('This is {}-th epoch'.format(epoch))
adjust_learning_rate(optimizer, epoch)
train(TrainImgLoader, model, optimizer, log, epoch)
savefilename = args.save_path + '/checkpoint' + str(epoch) + '.tar'
torch.save({
'epoch': epoch,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
}, savefilename)
# if epoch > 10 and (epoch % 3):
test(TestImgLoader, model, log, epoch)
test(TestImgLoader, model, log)
log.info('full training time = {:.2f} Hours'.format((time.time() - start_full_time) / 3600))
def train(dataloader, model, optimizer, log, epoch=0):
stages = 3 + args.with_spn
losses = [AverageMeter() for _ in range(stages)]
length_loader = len(dataloader)
model.train()
for batch_idx, (imgL, imgR, disp_L) in tqdm.tqdm(enumerate(dataloader), ascii=True, desc=("training epoch " + str(epoch)), total=(length_loader), unit='iteration'):
imgL = imgL.cuda().float()
imgR = imgR.cuda().float()
disp_L = disp_L.cuda().float()
optimizer.zero_grad()
mask = disp_L > 0
mask.detach_()
outputs = model(imgL, imgR)
if args.with_spn:
if epoch >= args.start_epoch_for_spn:
num_out = len(outputs)
else:
num_out = len(outputs) - 1
else:
num_out = len(outputs)
outputs = [torch.squeeze(output, 1) for output in outputs]
loss = [args.loss_weights[x] * F.smooth_l1_loss(outputs[x][mask], disp_L[mask], reduction='mean')
for x in range(num_out)]
sum(loss).backward()
optimizer.step()
for idx in range(num_out):
losses[idx].update(loss[idx].item())
# if (batch_idx % args.print_freq) == 0:
# info_str = ['Stage {} = {:.2f}({:.2f})'.format(x, losses[x].val, losses[x].avg) for x in range(num_out)]
# info_str = '\t'.join(info_str)
# log.info('Epoch{} [{}/{}] {}'.format(
# epoch, batch_idx, length_loader, info_str))
torch.cuda.empty_cache()
info_str = '\t'.join(['Stage {} = {:.4f}'.format(x, losses[x].avg) for x in range(stages)])
log.info('Average train loss at {}: '.format(epoch) + info_str)
def test(dataloader, model, log, epoch=-1):
stages = 3 + args.with_spn
D1s = [AverageMeter() for _ in range(stages)]
length_loader = len(dataloader)
Error = {
"0": [],
"1": [],
"2": [],
"3": [],
}
model.eval()
for batch_idx, (imgL, imgR, disp_L) in tqdm.tqdm(enumerate(dataloader), ascii=True, desc="Testing", total=(length_loader), unit='iteration'):
imgL = imgL.cuda().float()
imgR = imgR.cuda().float()
disp_L = disp_L.cuda().float()
with torch.no_grad():
outputs = model(imgL, imgR)
for x in range(stages):
output = torch.squeeze(outputs[x], 1)
D1s[x].update(error_estimating(output, disp_L).item())
Error[str(x)].append(D1s[x].val)
# info_str = '\t'.join(['Stage {} = {:.4f}({:.4f})'.format(x, D1s[x].val, D1s[x].avg) for x in range(stages)])
# log.info('[{}/{}] {}'.format(
# batch_idx, length_loader, info_str))
info_str = ', '.join(['Stage {}={:.3f}%'.format(x, D1s[x].avg * 100) for x in range(stages)])
Error3 = np.asarray(Error["3"], dtype=np.float32)
log.info("Max Error is {}, while Min Error is {}".format(np.max(Error3), np.min(Error3)))
if epoch > -1:
log.info('Average test 3-Pixel Error at Epoch {}: '.format(epoch) + info_str)
else:
log.info('Average test 3-Pixel Error: ' + info_str)
torch.cuda.empty_cache()
def error_estimating(disp, ground_truth, maxdisp=192):
gt = ground_truth
mask = gt > 0
mask = mask * (gt < maxdisp)
errmap = torch.abs(disp - gt)
err3 = ((errmap[mask] > 3.) & (errmap[mask] / gt[mask] > 0.05)).sum()
return err3.float() / mask.sum().float()
def adjust_learning_rate(optimizer, epoch):
if epoch <= 110:
lr = args.lr
elif epoch <= 150:
lr = args.lr * 0.1
else:
lr = args.lr * 0.01
for param_group in optimizer.param_groups:
param_group['lr'] = lr
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
if __name__ == '__main__':
main()
| [
"mamoanwar97@gmail.com"
] | mamoanwar97@gmail.com |
4ff9ebdceafbe22d330788d06fdea227a8394ee1 | 544c02ae44595f8ad8cd33ff71d93bddc01f8e49 | /src/GUI/ControlMainWindow.py | 4c3b7c05548a54e1a57c062697ddafb7618c9656 | [] | no_license | Thoross/PS2-Stats | ca4e9a8e00cbf1acf8df01e19312227ae0b89d94 | c89007f34c5b51da7585fcd50d0b500ec2575620 | refs/heads/master | 2021-01-21T07:39:42.689357 | 2013-10-25T16:48:08 | 2013-10-25T16:48:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,725 | py | '''
PS2-Stats : Python module for PlanetSide 2 Stat tracking.
Copyright (C) 2013 Brendan Betts (brendan.betts@live.com)
License: GNU LGPL
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
Created on Jul 2, 2013
'''
from PySide.QtGui import QMainWindow
from src.GUI.mainwindow import Ui_MainWindow
from src.controllers.CharacterController import CharacterController
from datetime import timedelta
from src.Utils.utils import *
import sys
from src.GUI.ControlErrorDialog import ControlErrorDialog
class ControlMainWindow(QMainWindow):
def __init__(self, parent=None):
super(ControlMainWindow, self).__init__(parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.btn_search.clicked.connect(self.on_search)
self.ui.txt_name.returnPressed.connect(self.on_search)
self.ui.btn_clear.clicked.connect(self.on_clear)
self.ui.actionQuit.triggered.connect(sys.exit)
def on_search(self):
controller = CharacterController()
if self.ui.txt_name.text() != "":
name = self.ui.txt_name.text()
player = controller.get_character(name)
if player != "Not found.":
self.set_character_info(player)
self.set_kills_info(player)
else:
error = ControlErrorDialog(None, "No Player")
error.show()
def on_clear(self):
self.ui.txt_name.clear()
self.ui.txt_name.hasFocus()
self.clear_character_info_tab()
self.clear_kills_info_tab()
def set_character_info(self, player):
self.ui.lbl_name_value.setText(player.name)
self.ui.lbl_rank_value.setText(player.level)
self.ui.lbl_score_value.setText(comma_thousands(player.score))
self.ui.lbl_timeplayed_value.setText(get_playtime(int(player.time_played)))
self.ui.lbl_spm_value.setText(round_to_two_decimal(player.score_per_minute))
self.ui.lbl_sph_value.setText(round_to_two_decimal(player.score_per_hour))
self.ui.lbl_faction_value.setText(player.faction)
self.ui.lbl_earned_certs_value.setText(comma_thousands(player.earned_certs))
self.ui.lbl_available_certs_value.setText(comma_thousands(player.available_certs))
self.ui.lbl_total_certs_value.setText(comma_thousands(player.total_certs))
self.ui.lbl_spent_certs_value.setText(comma_thousands(player.spent_certs))
self.ui.lbl_gifted_certs_value.setText(comma_thousands(player.gifted_certs))
self.ui.lbl_cpm_value.setText(round_to_two_decimal(player.certs_per_minute))
self.ui.lbl_cph_value.setText(round_to_two_decimal(player.certs_per_hour))
self.ui.lbl_percent_to_next_cert_value.setText(round_to_two_decimal(float(player.percentage_to_next)*100)+"%")
self.ui.lbl_server_value.setText(player.server)
self.ui.lbl_outfit_value.setText(player.outfit)
self.ui.lbl_percent_to_next_rank_value.setText(player.percentage_to_next_level+"%")
self.ui.lbl_facility_defended_value.setText(comma_thousands(player.facilities_defended))
self.ui.lbl_facility_captured_value.setText(comma_thousands(player.facilities_captured))
self.ui.lbl_medals_earned_value.setText(comma_thousands(player.medals))
self.ui.lbl_ribbons_earned_value.setText(comma_thousands(player.ribbons))
def set_kills_info(self, player):
self.ui.lbl_kills_value.setText(comma_thousands(player.kills))
self.ui.lbl_kph_value.setText(round_to_two_decimal(player.kills_per_hour))
self.ui.lbl_kpm_value.setText(round_to_two_decimal(player.kills_per_minute))
self.ui.lbl_death_value.setText(comma_thousands(player.deaths))
self.ui.lbl_dpm_value.setText(round_to_two_decimal(player.deaths_per_minute))
self.ui.lbl_dph_value.setText(round_to_two_decimal(player.deaths_per_hour))
self.ui.lbl_kdr_value.setText(round_to_two_decimal(player.kill_death_ratio))
self.ui.lbl_assists_value.setText(comma_thousands(player.assists))
self.display_faction_stats(player)
self.ui.lbl_revenge_value.setText(comma_thousands(player.revenge_count))
self.ui.lbl_dominations_value.setText(comma_thousands(player.dominations))
self.ui.lbl_aph_value.setText(round_to_two_decimal(player.assists_per_hour))
self.ui.lbl_apm_value.setText(round_to_two_decimal(player.assists_per_minute))
def clear_character_info_tab(self):
self.ui.lbl_name_value.clear()
self.ui.lbl_available_certs_value.clear()
self.ui.lbl_earned_certs_value.clear()
self.ui.lbl_rank_value.clear()
self.ui.lbl_cph_value.clear()
self.ui.lbl_spent_certs_value.clear()
self.ui.lbl_total_certs_value.clear()
self.ui.lbl_server_value.clear()
self.ui.lbl_faction_value.clear()
self.ui.lbl_timeplayed_value.clear()
self.ui.lbl_gifted_certs_value.clear()
self.ui.lbl_percent_to_next_cert_value.clear()
self.ui.lbl_cpm_value.clear()
self.ui.lbl_outfit_value.clear()
self.ui.lbl_percent_to_next_rank_value.clear()
self.ui.lbl_score_value.clear()
self.ui.lbl_spm_value.clear()
self.ui.lbl_sph_value.clear()
self.ui.lbl_facility_defended_value.clear()
self.ui.lbl_facility_captured_value.clear()
self.ui.lbl_medals_earned_value.clear()
self.ui.lbl_ribbons_earned_value.clear()
def clear_kills_info_tab(self):
self.ui.lbl_kills_value.clear()
self.ui.lbl_kph_value.clear()
self.ui.lbl_kpm_value.clear()
self.ui.lbl_death_value.clear()
self.ui.lbl_dpm_value.clear()
self.ui.lbl_dph_value.clear()
self.ui.lbl_kdr_value.clear()
self.ui.lbl_assists_value.clear()
self.ui.lbl_NC_deaths_value.clear()
self.ui.lbl_NC_kills_value.clear()
self.ui.lbl_NC_domination_value.clear()
self.ui.lbl_NC_revenge_value.clear()
self.ui.lbl_TR_dominations_value.clear()
self.ui.lbl_TR_revenge_value.clear()
self.ui.lbl_TR_kills_value.clear()
self.ui.lbl_TR_deaths_value.clear()
self.ui.lbl_TR_kills_value.clear()
self.ui.lbl_VS_deaths_value.clear()
self.ui.lbl_VS_kills_value.clear()
self.ui.lbl_VS_dominations_value.clear()
self.ui.lbl_VS_revenge_value.clear()
self.ui.lbl_revenge_value.clear()
self.ui.lbl_dominations_value.clear()
self.ui.lbl_aph_value.clear()
self.ui.lbl_apm_value.clear()
def display_faction_stats(self, player):
if player.faction == "New Conglomerate":
self.toggle_faction_labels(player.faction)
self.ui.lbl_TR_kills_value.setText(comma_thousands(player.kills_per_faction["TR"]))
self.ui.lbl_VS_kills_value.setText(comma_thousands(player.kills_per_faction["VS"]))
self.ui.lbl_TR_deaths_value.setText(comma_thousands(player.killed_by_faction["TR"]))
self.ui.lbl_VS_deaths_value.setText(comma_thousands(player.killed_by_faction["VS"]))
self.ui.lbl_TR_dominations_value.setText(comma_thousands(player.dominations_per_faction["TR"]))
self.ui.lbl_VS_dominations_value.setText(comma_thousands(player.dominations_per_faction["VS"]))
self.ui.lbl_TR_revenge_value.setText(comma_thousands(player.revenge_count_per_faction["TR"]))
self.ui.lbl_VS_revenge_value.setText(comma_thousands(player.revenge_count_per_faction["VS"]))
elif player.faction == "Terran Republic":
self.toggle_faction_labels(player.faction)
self.ui.lbl_VS_kills_value.setText(comma_thousands(player.kills_per_faction["VS"]))
self.ui.lbl_NC_kills_value.setText(comma_thousands(player.kills_per_faction["NC"]))
self.ui.lbl_NC_deaths_value.setText(comma_thousands(player.killed_by_faction["NC"]))
self.ui.lbl_VS_deaths_value.setText(comma_thousands(player.killed_by_faction["VS"]))
self.ui.lbl_NC_domination_value.setText(comma_thousands(player.dominations_per_faction["NC"]))
self.ui.lbl_VS_dominations_value.setText(comma_thousands(player.dominations_per_faction["VS"]))
self.ui.lbl_NC_revenge_value.setText(comma_thousands(player.revenge_count_per_faction["NC"]))
self.ui.lbl_VS_revenge_value.setText(comma_thousands(player.revenge_count_per_faction["VS"]))
else:
self.toggle_faction_labels(player.faction)
self.ui.lbl_TR_kills_value.setText(comma_thousands(player.kills_per_faction["TR"]))
self.ui.lbl_NC_kills_value.setText(comma_thousands(player.kills_per_faction["NC"]))
self.ui.lbl_NC_deaths_value.setText(comma_thousands(player.killed_by_faction["NC"]))
self.ui.lbl_TR_deaths_value.setText(comma_thousands(player.killed_by_faction["TR"]))
self.ui.lbl_TR_dominations_value.setText(comma_thousands(player.dominations_per_faction["TR"]))
self.ui.lbl_NC_domination_value.setText(comma_thousands(player.dominations_per_faction["NC"]))
self.ui.lbl_TR_revenge_value.setText(comma_thousands(player.revenge_count_per_faction["TR"]))
self.ui.lbl_NC_revenge_value.setText(comma_thousands(player.revenge_count_per_faction["NC"]))
def toggle_faction_labels(self, faction):
if faction == "New Conglomerate":
self.ui.lbl_NC_kills.setVisible(False)
self.ui.lbl_NC_kills_value.setVisible(False)
self.ui.lbl_NC_deaths.setVisible(False)
self.ui.lbl_NC_deaths_value.setVisible(False)
self.ui.lbl_NC_domination.setVisible(False)
self.ui.lbl_NC_domination_value.setVisible(False)
self.ui.lbl_NC_revenge.setVisible(False)
self.ui.lbl_NC_revenge_value.setVisible(False)
self.ui.lbl_TR_kills.setVisible(True)
self.ui.lbl_TR_kills_value.setVisible(True)
self.ui.lbl_TR_deaths.setVisible(True)
self.ui.lbl_TR_deaths_value.setVisible(True)
self.ui.lbl_TR_dominations.setVisible(True)
self.ui.lbl_TR_dominations_value.setVisible(True)
self.ui.lbl_TR_revenge.setVisible(True)
self.ui.lbl_TR_revenge_value.setVisible(True)
self.ui.lbl_VS_kills.setVisible(True)
self.ui.lbl_VS_kills_value.setVisible(True)
self.ui.lbl_VS_deaths.setVisible(True)
self.ui.lbl_VS_deaths_value.setVisible(True)
self.ui.lbl_VS_dominations.setVisible(True)
self.ui.lbl_VS_dominations_value.setVisible(True)
self.ui.lbl_VS_revenge.setVisible(True)
self.ui.lbl_VS_revenge_value.setVisible(True)
elif faction == "Terran Republic":
self.ui.lbl_NC_kills.setVisible(True)
self.ui.lbl_NC_kills_value.setVisible(True)
self.ui.lbl_NC_deaths.setVisible(True)
self.ui.lbl_NC_deaths_value.setVisible(True)
self.ui.lbl_NC_domination.setVisible(True)
self.ui.lbl_NC_domination_value.setVisible(True)
self.ui.lbl_NC_revenge.setVisible(True)
self.ui.lbl_NC_revenge_value.setVisible(True)
self.ui.lbl_TR_kills.setVisible(False)
self.ui.lbl_TR_kills_value.setVisible(False)
self.ui.lbl_TR_deaths.setVisible(False)
self.ui.lbl_TR_deaths_value.setVisible(False)
self.ui.lbl_TR_dominations.setVisible(False)
self.ui.lbl_TR_dominations_value.setVisible(False)
self.ui.lbl_TR_revenge.setVisible(False)
self.ui.lbl_TR_revenge_value.setVisible(False)
self.ui.lbl_VS_kills.setVisible(True)
self.ui.lbl_VS_kills_value.setVisible(True)
self.ui.lbl_VS_deaths.setVisible(True)
self.ui.lbl_VS_deaths_value.setVisible(True)
self.ui.lbl_VS_dominations.setVisible(True)
self.ui.lbl_VS_dominations_value.setVisible(True)
self.ui.lbl_VS_revenge.setVisible(True)
self.ui.lbl_VS_revenge_value.setVisible(True)
else:
self.ui.lbl_NC_kills.setVisible(True)
self.ui.lbl_NC_kills_value.setVisible(True)
self.ui.lbl_NC_deaths.setVisible(True)
self.ui.lbl_NC_deaths_value.setVisible(True)
self.ui.lbl_NC_domination.setVisible(True)
self.ui.lbl_NC_domination_value.setVisible(True)
self.ui.lbl_NC_revenge.setVisible(True)
self.ui.lbl_NC_revenge_value.setVisible(True)
self.ui.lbl_TR_kills.setVisible(True)
self.ui.lbl_TR_kills_value.setVisible(True)
self.ui.lbl_TR_deaths.setVisible(True)
self.ui.lbl_TR_deaths_value.setVisible(True)
self.ui.lbl_TR_dominations.setVisible(True)
self.ui.lbl_TR_dominations_value.setVisible(True)
self.ui.lbl_TR_revenge.setVisible(True)
self.ui.lbl_TR_revenge_value.setVisible(True)
self.ui.lbl_VS_kills.setVisible(False)
self.ui.lbl_VS_kills_value.setVisible(False)
self.ui.lbl_VS_deaths.setVisible(False)
self.ui.lbl_VS_deaths_value.setVisible(False)
self.ui.lbl_VS_dominations.setVisible(False)
self.ui.lbl_VS_dominations_value.setVisible(False)
self.ui.lbl_VS_revenge.setVisible(False)
self.ui.lbl_VS_revenge_value.setVisible(False) | [
"brendan.betts@live.com"
] | brendan.betts@live.com |
f49c0d8dfdd6361a5f7158798a5989104e06803c | bfcf59a22724d4b8bafe5674ff7a9ccde8882d62 | /added_8_height_ctr.py | 13b38a24a14a09552dd9c1806162a1601c419915 | [] | no_license | rwqzcq/basemap_storm_plot | cebb74e1edb0c4acb4abe7cb0db59c92c062c571 | bc40ef888d7ef4a3903e810b417a802f9183a5b3 | refs/heads/main | 2023-08-21T07:51:06.712484 | 2021-10-10T10:15:15 | 2021-10-10T10:15:15 | 415,549,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,918 | py | from matplotlib import pyplot as plt
import matplotlib.cm as cm
import pandas as pd
import numpy as np
import seaborn as sns
from tqdm import tqdm
from utils import *
from math import ceil
import os
df = read_file()
def get_qujian(num, _max=1):
"""
获取区间
"""
qujian = range(0, 10, _max)
i = 0
for i in range(len(qujian)):
j = i + 1
if j == len(qujian):
break
start = qujian[i]
end = qujian[j]
if start <= num < end:
return (f'{start}-{end}', i)
return (f'>= 10', i)
def main(city):
city_df = df[df[city] == True]
def _plot(city_df, path):
city_df['qujian'] = city_df['Height_ctr'].apply(get_qujian)
data = city_df.groupby('qujian')['Index_stm'].count().to_frame()
data = data.reset_index()
data.columns = ['区间', '数量']
data['占比'] = data['数量'].apply(lambda x: round(x / data['数量'].sum(), 4) * 100)
data['索引'] = data['区间'].apply(lambda x: x[1])
data['区间'] = data['区间'].apply(lambda x: x[0])
data = data.sort_values(['索引'])
plt.figure(figsize=(12, 10))
ax = sns.barplot(data=data, x='区间', y='占比', color='#96bad6')
ax.set_xlabel('Height_stm(km)', fontsize=16) #设置x轴名称
ax.set_ylabel('Frequency(%)', fontsize=16) #设置y轴名称
plt.savefig(path, dpi=200)
# data.to_excel(path.replace('png', 'xlsx'), index=False)
base_path = './images/新增_8_高度_ctr'
if not os.path.exists(base_path):
os.mkdir(base_path)
_plot(city_df, f'{base_path}/{city}_all.png')
for month in range(6, 9):
_city_df = city_df[city_df['Month'] == month]
_plot(_city_df, f'{base_path}/{city}_{month}.png')
if __name__ == '__main__':
for city in ['tianshui', 'lanzhou', 'zhangye']:
print(city)
main(city) | [
"rwqCCNUimd@163.com"
] | rwqCCNUimd@163.com |
751c4f954046428b61efaafd22a8356d4489ddcf | 7e9c0243c48bbf0ddca9779ef03fc13bb9ac0496 | /t20.py | 12d2b72ba0d2fda0af04cbc0ed30cab0ad37b4ce | [] | no_license | suchismitarout/tt | c47f1f59659d2678392e2f0c3aaee8cfaa147ff4 | 54a5b625a82dab854b679050d67e340e74d71edd | refs/heads/master | 2020-09-16T20:25:34.146741 | 2019-11-25T06:52:07 | 2019-11-25T06:52:07 | 223,880,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py |
with open("foo.txt", "r") as fr:
print(fr.name)
print(fr.closed)
print(fr.mode)
| [
"suchismitarout47@gmail.com"
] | suchismitarout47@gmail.com |
a3d924be75abc189e38a765eea034cee51e33435 | 4a1a319ef5040a5b417f0ba63a8751b424b82bc3 | /programming_101/find_number_2.py | 2c66efd4c6542233e8025b4482080c8839738add | [] | no_license | saratrickett/python-kata | b574795bd17f5a563b1fb44cc0614cbf91fc478e | 3397ee42f94d8e5b88446a179ad8b93d95c5dc4c | refs/heads/master | 2021-05-05T23:55:16.978691 | 2018-02-27T01:23:53 | 2018-02-27T01:23:53 | 116,843,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,326 | py | # DATA TYPES
#typing.io - typing practice for programmers
# str - string
_str = 'test'
_str2 = "tst"
_str3 = '{_str2} test'.format(_str2=_str2)
_str4 = f'{_str} test'
# bool - boolean
_bool = True
_bool_false = False
# int - integer
_int = 1
_int_negative = -1
# float
_float = 1.0
_float_negative = -1.0
# list
# dict
_list = ['alex', 'grande', 'sara',
'danie', 'laura', 'jen']
for name in _list:
print(name)
_list2 = list()
# casting
output = int('1') + 1
output2 = '1' + str(1)
print(output)
print(output2)
print(bool(0))
print(bool(1))
print(bool(.1))
print('---')
print(len(''))
print(len('false'))
print(bool('false'))
print(bool(None))
print(bool([]))
# for {variable_name} in <collection>:
# <action>
name = 'name'
for character in name:
print(character)
"""
practice: create a function that takes an input, then prints each character of the input
"""
def print_character(input):
for character in input:
print(character)
print_character('supercalifragilistic')
"""practice: create a function that takes two inputs, then prints True/False whether or not
the first input is contained within the second input """
# == compares
def search_character(search, find):
for character in find:
if character == search:
print(True)
search_character('a','Sara') | [
"strickett4@gmail.com"
] | strickett4@gmail.com |
6376c84e434aa382680dcd27d2d6633c72ff2c51 | 8c681ef14f5447c83c96c28f4b458e823afd91a9 | /notes/models.py | 38998f03e18a654311e184bf375c99a9c5a50375 | [] | no_license | saurabht16/Note_App | 03d811016bdc3813a8327fa5356044c6cb7dd008 | e69ff4e7a8dfea2b49971705ce098e8c7da4d3f4 | refs/heads/master | 2020-03-30T06:34:14.417415 | 2019-01-15T15:19:18 | 2019-01-15T15:19:18 | 150,871,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | from django.db import models
# Create your models here.
from django.db import models
from django.contrib.auth.models import User
class Notes(models.Model):
title = models.CharField(max_length=200)
description = models.TextField()
labels = models.CharField(max_length=400)
created_by = models.ForeignKey(User, related_name='notes')
def __str__(self):
"""A string representation of the model."""
return self.title | [
"tewary.saurabh@gmail.com"
] | tewary.saurabh@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.