blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 246 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
de783c0810adfd10d99336636111dc5f0ba13b34 | 32e62ec742ac1d8ae8e368813f6a6b48a414ecc5 | /raw/virtual_assistant_version.3.0.py | 53210347074cc4e7a37ee302dc6691908f34f02f | [] | no_license | anurag-shakya/Virtual-Assistant | 2841171635ab5a31cfc90985891edaa84ea7d169 | c42ef710c72d3bbde6e90d7cfa12c84b87c067e3 | refs/heads/master | 2020-03-09T19:42:48.946714 | 2018-04-16T17:43:08 | 2018-04-16T17:43:08 | 128,963,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,247 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 7 18:43:57 2018
virtual_assistant_version.3.py
@author: s.anurag
"""
#importing Libraries
import speech_recognition as sr
import webbrowser
import os
import csv
from datetime import datetime
# to save every commands in directory "D:/VA/history/"
def save_cmd(audio):
file_name = "D:/VA/history/" + "microphone record (" + str(1+ (len(next(os.walk("D:/VA/history/"))[2]))) + ").wav"
with open(file_name,"wb") as f:
f.write(audio.get_wav_data())
# to manage a log file in directory "D:/VA/history/log.csv"
def save_log(text):
t = datetime.now()
date, time = t.strftime("%d %B %Y %A"), t.strftime("%H:%M:%S")
with open('D:/VA/history/log.csv', "a", newline='') as log_data:
write_object = csv.writer(log_data)
write_object.writerow([date, time, text])
showmenu = True
while(showmenu):
r2 = sr.Recognizer()
r = sr.Recognizer()
with sr.Microphone() as source:
# Adapt to noise
r.adjust_for_ambient_noise(source)
# Menu
print("\n OPTIONS: \n---------- \n [ URL | Search Web | Search Music | Open/Close Apps]")
print("\nSay Something!")
audio = r.listen(source)
print("wait...")
save_cmd(audio)
try:
stmnt = r2.recognize_google(audio).upper()
save_log(stmnt)
except:
print("inaudible!!! \n Try again")
continue
if "WEB" in stmnt:
# Obtain audio when word 'web' is heard
url = "https://en.wikipedia.org/wiki/"
with sr.Microphone() as source:
print("please say a keyword to be searched..")
audio = r.listen(source)
print("searching...")
save_cmd(audio)
try :
get = r2.recognize_google(audio)
print("You said : " + get)
save_log(get)
webbrowser.open_new(url+get)
except sr.UnknownValueError:
print("\n Sorry! coudn't Understand ")
except sr.RequestError as e:
print("\n Failed to retrieve results".format(e))
elif "MUSIC" in stmnt:
# Obtain audio when word 'music' is heard
url2 = "https://www.youtube.com/results?search_query="
with sr.Microphone() as source:
print("which song you want to play..")
audio = r.listen(source)
print("playing...")
save_cmd(audio)
try :
text = r2.recognize_google(audio)
print("You said : " + text)
save_log(text)
webbrowser.open_new(url2+text)
except sr.UnknownValueError:
print("\n Sorry! coudn't Understand ")
except sr.RequestError as e:
print("\n Failed to retrieve results".format(e))
elif "OPEN" in stmnt:
# Obtain audio when word 'open' is heard
with sr.Microphone() as source:
print("please name app to be opened..")
audio = r.listen(source)
print("got it")
save_cmd(audio)
try :
text = r2.recognize_google(audio).upper()
print("You said : " + text)
save_log(text)
if "CHROME" in text or "GOOGLE" in text:
try:
print("opening google chrome...")
os.startfile('C:/Program Files (x86)/Google/Chrome/Application/chrome.exe')
except Exception as e:
print(str(e))
elif "CALCULATOR" in text:
try:
print("opening Calculator...")
os.system('start calc.exe')
except Exception as e:
print(str(e))
else:
print("couldn't find the app \n or Try opening : 'chrome' 'calculator' 'google' ")
except sr.UnknownValueError:
print("\n Sorry! coudn't Understand ")
except sr.RequestError as e:
print("\n Failed to retrieve results".format(e))
elif "CLOSE" in stmnt:
# Obtain audio when word 'close' is heard
with sr.Microphone() as source:
print("please name app to be closed..")
audio = r.listen(source)
print("got it")
save_cmd(audio)
try :
text = r2.recognize_google(audio).upper()
print("You said : " + text)
save_log(text)
if "CHROME" in text or "GOOGLE" in text:
try:
print("close google chrome...")
os.system('TASKKILL /F /IM chrome.exe')
except Exception as e:
print(str(e))
elif "CALCULATOR" in text:
try:
print("closing Calculator...")
os.system('TASKKILL /F /IM calculator.exe')
except Exception as e:
print(str(e))
else:
print("couldn't find the app \n or Try closing: 'chrome' 'calculator' 'google' ")
except sr.UnknownValueError:
print("\n Sorry! coudn't Understand ")
except sr.RequestError as e:
print("\n Failed to retrieve results".format(e))
# else if search keyword on google search engine
elif " " in stmnt:
url3 = "https://www.google.co.in/search?q="
try :
print("You said : " + stmnt)
webbrowser.open_new(url3 + stmnt)
except sr.UnknownValueError:
print("\n Sorry! coudn't Understand ")
except sr.RequestError as e:
print("\n Failed to retrieve results".format(e))
# else search URL on web
else :
chrome_path = "C:/Program Files (x86)/Google/Chrome/Application/chrome.exe %s"
try :
print("You said : " + stmnt)
webbrowser.get(chrome_path).open(stmnt)
except Exception as e:
print(e)
# asking for more itteration
ans = input("Want more assistance? (y/n) : ")
if ans.upper() =='N':
showmenu = False
print("\naudio saved succesfully. \nlog created. \nThank you! \nExiting...")
exit = input()
| [
"noreply@github.com"
] | anurag-shakya.noreply@github.com |
8266386b7ba31b157f4847f2a99a2051d8bc1c04 | 5f4c5d6a764d049c0a62f4eb2735a530bc0f458d | /venv/Lib/site-packages/gm/pb/fundamental_pb2_grpc.py | 51086ae102ee2ca8e5eb85775cd6f94dc431c494 | [] | no_license | ccliuyang/MyQuant | b632f8e81fb2a74572fe53bd47990d869c20323e | 1349258432f15fd267352b0401ab43712bdbeaf9 | refs/heads/master | 2020-05-21T04:06:37.223094 | 2019-04-22T13:39:01 | 2019-04-22T13:39:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,964 | py | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from gm.pb import data_pb2 as gm_dot_pb_dot_data__pb2
from gm.pb import fundamental_pb2 as gm_dot_pb_dot_fundamental__pb2
class FundamentalServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetFundamentals = channel.unary_unary(
'/fundamental.api.FundamentalService/GetFundamentals',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsRsp.FromString,
)
self.GetFundamentalsN = channel.unary_unary(
'/fundamental.api.FundamentalService/GetFundamentalsN',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsNReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsRsp.FromString,
)
self.GetInstrumentInfos = channel.unary_unary(
'/fundamental.api.FundamentalService/GetInstrumentInfos',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetInstrumentInfosReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.InstrumentInfos.FromString,
)
self.GetFuzzyMatchInstrumentInfos = channel.unary_unary(
'/fundamental.api.FundamentalService/GetFuzzyMatchInstrumentInfos',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetFuzzyMatchInstrumentInfosReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.InstrumentInfos.FromString,
)
self.GetInstruments = channel.unary_unary(
'/fundamental.api.FundamentalService/GetInstruments',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetInstrumentsReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.Instruments.FromString,
)
self.GetHistoryInstruments = channel.unary_unary(
'/fundamental.api.FundamentalService/GetHistoryInstruments',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetHistoryInstrumentsReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.Instruments.FromString,
)
self.GetConstituents = channel.unary_unary(
'/fundamental.api.FundamentalService/GetConstituents',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetConstituentsReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.Constituents.FromString,
)
self.GetSector = channel.unary_unary(
'/fundamental.api.FundamentalService/GetSector',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetSectorReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetSectorRsp.FromString,
)
self.GetIndustry = channel.unary_unary(
'/fundamental.api.FundamentalService/GetIndustry',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetIndustryReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetIndustryRsp.FromString,
)
self.GetConcept = channel.unary_unary(
'/fundamental.api.FundamentalService/GetConcept',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetConceptReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetConceptRsp.FromString,
)
self.GetTradingDates = channel.unary_unary(
'/fundamental.api.FundamentalService/GetTradingDates',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetTradingDatesReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetTradingDatesRsp.FromString,
)
self.GetPreviousTradingDate = channel.unary_unary(
'/fundamental.api.FundamentalService/GetPreviousTradingDate',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetPreviousTradingDateReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetPreviousTradingDateRsp.FromString,
)
self.GetNextTradingDate = channel.unary_unary(
'/fundamental.api.FundamentalService/GetNextTradingDate',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetNextTradingDateReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_fundamental__pb2.GetNextTradingDateRsp.FromString,
)
self.GetDividends = channel.unary_unary(
'/fundamental.api.FundamentalService/GetDividends',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetDividendsReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.Dividends.FromString,
)
self.GetDividendsSnapshot = channel.unary_unary(
'/fundamental.api.FundamentalService/GetDividendsSnapshot',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetDividendsSnapshotReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.Dividends.FromString,
)
self.GetContinuousContracts = channel.unary_unary(
'/fundamental.api.FundamentalService/GetContinuousContracts',
request_serializer=gm_dot_pb_dot_fundamental__pb2.GetContinuousContractsReq.SerializeToString,
response_deserializer=gm_dot_pb_dot_data__pb2.ContinuousContracts.FromString,
)
class FundamentalServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def GetFundamentals(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetFundamentalsN(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetInstrumentInfos(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetFuzzyMatchInstrumentInfos(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetInstruments(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetHistoryInstruments(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetConstituents(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSector(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIndustry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetConcept(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTradingDates(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPreviousTradingDate(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetNextTradingDate(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDividends(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDividendsSnapshot(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetContinuousContracts(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FundamentalServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetFundamentals': grpc.unary_unary_rpc_method_handler(
servicer.GetFundamentals,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsRsp.SerializeToString,
),
'GetFundamentalsN': grpc.unary_unary_rpc_method_handler(
servicer.GetFundamentalsN,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsNReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetFundamentalsRsp.SerializeToString,
),
'GetInstrumentInfos': grpc.unary_unary_rpc_method_handler(
servicer.GetInstrumentInfos,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetInstrumentInfosReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.InstrumentInfos.SerializeToString,
),
'GetFuzzyMatchInstrumentInfos': grpc.unary_unary_rpc_method_handler(
servicer.GetFuzzyMatchInstrumentInfos,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetFuzzyMatchInstrumentInfosReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.InstrumentInfos.SerializeToString,
),
'GetInstruments': grpc.unary_unary_rpc_method_handler(
servicer.GetInstruments,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetInstrumentsReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.Instruments.SerializeToString,
),
'GetHistoryInstruments': grpc.unary_unary_rpc_method_handler(
servicer.GetHistoryInstruments,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetHistoryInstrumentsReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.Instruments.SerializeToString,
),
'GetConstituents': grpc.unary_unary_rpc_method_handler(
servicer.GetConstituents,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetConstituentsReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.Constituents.SerializeToString,
),
'GetSector': grpc.unary_unary_rpc_method_handler(
servicer.GetSector,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetSectorReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetSectorRsp.SerializeToString,
),
'GetIndustry': grpc.unary_unary_rpc_method_handler(
servicer.GetIndustry,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetIndustryReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetIndustryRsp.SerializeToString,
),
'GetConcept': grpc.unary_unary_rpc_method_handler(
servicer.GetConcept,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetConceptReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetConceptRsp.SerializeToString,
),
'GetTradingDates': grpc.unary_unary_rpc_method_handler(
servicer.GetTradingDates,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetTradingDatesReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetTradingDatesRsp.SerializeToString,
),
'GetPreviousTradingDate': grpc.unary_unary_rpc_method_handler(
servicer.GetPreviousTradingDate,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetPreviousTradingDateReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetPreviousTradingDateRsp.SerializeToString,
),
'GetNextTradingDate': grpc.unary_unary_rpc_method_handler(
servicer.GetNextTradingDate,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetNextTradingDateReq.FromString,
response_serializer=gm_dot_pb_dot_fundamental__pb2.GetNextTradingDateRsp.SerializeToString,
),
'GetDividends': grpc.unary_unary_rpc_method_handler(
servicer.GetDividends,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetDividendsReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.Dividends.SerializeToString,
),
'GetDividendsSnapshot': grpc.unary_unary_rpc_method_handler(
servicer.GetDividendsSnapshot,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetDividendsSnapshotReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.Dividends.SerializeToString,
),
'GetContinuousContracts': grpc.unary_unary_rpc_method_handler(
servicer.GetContinuousContracts,
request_deserializer=gm_dot_pb_dot_fundamental__pb2.GetContinuousContractsReq.FromString,
response_serializer=gm_dot_pb_dot_data__pb2.ContinuousContracts.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fundamental.api.FundamentalService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| [
"30337505+feigeZzzz@users.noreply.github.com"
] | 30337505+feigeZzzz@users.noreply.github.com |
80c6f7d71d7bb6eb6a96a7ceeb29cc1c1ee1bc92 | 3cb85f3ba67c7da0b9988363044d8726b309bae8 | /scripts/compute_weights.py | 95af7301945c88d8daab9acceb4287728046a435 | [] | no_license | avigpt/yoink-backend | 2c7e1507fbfa34693aeab50b30ebbc65e5b4e6e5 | 602dc16cbd0c29b1c31bf4320960111eea2fa19b | refs/heads/master | 2023-01-20T15:02:51.494506 | 2020-12-08T05:58:20 | 2020-12-08T05:58:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | num_restuarants = 265
with open("./data/cuisines.weighted.txt", 'w') as f2:
with open("./data/cuisines.txt") as f:
counter = 1
for line in f:
if line.strip() != "Nothing":
f2.write("||".join((f'{line.strip().lower()} restuarants', str(round(counter / num_restuarants, 8)))) + "\n")
counter += 1 | [
"daniel@danielchao.me"
] | daniel@danielchao.me |
cf65cf8a479dd1025124c6e02d078a81e722f40e | 946ead8406872492ca4763e96b4d6a3fb5c05a81 | /home/urls.py | ad788615c06d32cd4478a5693de808e266410582 | [] | no_license | Hortmagen21/Flex | 14249ad9d8563af4807f29bce935d991ab582d8f | db8cc5a0f11af3c6678e2e3631cc296efb44f1db | refs/heads/master | 2023-01-24T14:49:59.074381 | 2020-09-21T12:27:44 | 2020-09-21T12:27:44 | 243,205,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | from django.urls import path
from . import views
urlpatterns = [
path('home', views.view_home),
] | [
"max_yudkin@ukr.net"
] | max_yudkin@ukr.net |
57e602481b8721c187b3c1c3589fbdd513706685 | 22a9924ca16a68e4bcdfbb253c6a15eb2b4e9bbf | /add_account.py | 5efcc04c578b08f79b96f043d203afae1e2a6145 | [] | no_license | defibull/leetcode | ea40dd89a51d221b127662b0dab3ceece505d84c | ef1226f870be1375cfb7afdcb976945e9dc066b9 | refs/heads/master | 2021-10-25T15:47:32.883071 | 2018-09-02T01:39:07 | 2018-09-02T01:39:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,218 | py | import requests
import random
import json
def nickname():
r = requests.get("http://api.reimaginebanking.com/customers?key=0328041e3591473db2c6d492ba8aff2b")
customers = json.loads(r.text)
nickname_list = [customer['first_name'][0] + customer['last_name'] for customer in customers]
return nickname_list
def balance():
return random.randrange(2500, 500000)
def random_accountNo():
return str(random.randrange(5000000000000000, 5999999999999999))
def getCustomerID():
r = requests.get("http://api.reimaginebanking.com/customers?key=0328041e3591473db2c6d492ba8aff2b")
customers = json.loads(r.text)
id_list = [customer['_id'] for customer in customers]
return id_list
def create_account():
payload = {
"type": "Credit Card",
"nickname": nickname(),
"rewards": 0,
"balance": banlance(),
"account_number": random_accountNo()
}
headers = {"Content-Type": "application/json","Accept": "application/json"}
print payload
r = requests.post("http://api.reimaginebanking.com/customers?key=0328041e3591473db2c6d492ba8aff2b", data=json.dumps(payload), headers=headers)
print(r.text)
for i in range(1000):
create_account()
| [
"shashank2khanna@ucla.edu"
] | shashank2khanna@ucla.edu |
ec63c0e2f77bec600855afc6104fdb1aa70af171 | dd1e04d0c6bbdaa0c472561ff136d825962e17e2 | /Capstone Control/test_comm.py | 04e1776cf0ad59fa032f725f59f6b2d34c665a80 | [] | no_license | witherellt21/Capstone-Robotics | e972ac09aafc42da9bfff73059c6202bb280ce63 | a4f1f18b7be1d654d3d4018fc047b6f728412cc6 | refs/heads/master | 2023-04-05T13:11:04.127847 | 2021-04-14T19:52:48 | 2021-04-14T19:52:48 | 298,568,947 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 209 | py | from getData import Receiver
# Make sure IP and PORT match server side IP and PORT
IP = '192.168.2.2'
PORT = 10001
r = Receiver(IP, PORT)
r.client.connect()
while True:
r.receive()
print(r.datalist)
| [
"witherellt21@attochron.com"
] | witherellt21@attochron.com |
9de876d8183ea644afeaa433985aad559ae2487a | 7365952cd794246b0455fb7b582169169c29de7e | /frontend.py | 893e465692c9c282b64f1e69124d50a07f8b973b | [] | no_license | RikLakhe/bookStore | 799421d9eddc4a9255dc02273393c52af888c4c8 | bec80af26bd153d6eb03374e37dbafcfd2cd8661 | refs/heads/master | 2022-04-20T06:22:10.663267 | 2020-04-21T16:16:13 | 2020-04-21T16:16:13 | 256,723,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,352 | py | from tkinter import *
from backend import Database
database = Database()
window = Tk()
input_name = StringVar()
input_author = StringVar()
input_year = StringVar()
input_isbn = StringVar()
error_message = StringVar()
selected_tuple = None
def get_selected_row(event):
global selected_tuple
clean_error()
clean_input()
index = l1.curselection()[0]
selected_tuple = l1.get(index)
e1.insert(END, selected_tuple[1])
e2.insert(END, selected_tuple[2])
e3.insert(END, selected_tuple[3])
e4.insert(END, selected_tuple[4])
def close_button():
window.destroy()
def view_button():
clean_input()
l1.delete(0, END)
for row in database.view_books():
l1.insert(END, row)
def clean_error():
error_message.set("")
def clean_input():
e1.delete(0, END)
e2.delete(0, END)
e3.delete(0, END)
e4.delete(0, END)
def clear_button():
clean_input()
clean_error()
def add_button():
error_message.set(database.add_book(
e1.get(),
e2.get(),
e3.get(),
e4.get()
))
clean_input()
view_button()
def delete_button():
global selected_tuple
index = l1.curselection()[0]
selected_tuple = l1.get(index)
database.delete_book(str(selected_tuple[0]))
view_button()
def update_button():
global selected_tuple
index = l1.curselection()[0]
selected_tuple = l1.get(index)
error_message.set(database.update_book_by_id(
selected_tuple[0],
e1.get(),
e2.get(),
e3.get(),
e4.get()))
view_button()
def find_button():
l1.delete(0, END)
for row in database.find_book(e1.get(),
e2.get(),
e3.get(),
e4.get()):
l1.insert(END, row)
t1 = Label(window, text="Name")
t1.grid(row=0, column=0)
e1 = Entry(window, textvariable=input_name)
e1.grid(row=0, column=1)
t2 = Label(window, text="Author")
t2.grid(row=0, column=2)
e2 = Entry(window, textvariable=input_author)
e2.grid(row=0, column=3)
t3 = Label(window, text="Year")
t3.grid(row=1, column=0)
e3 = Entry(window, textvariable=input_year)
e3.grid(row=1, column=1)
t4 = Label(window, text="ISBN")
t4.grid(row=1, column=2)
e4 = Entry(window, textvariable=input_isbn)
e4.grid(row=1, column=3)
bc = Button(window, width=10, text="Clear", command=clear_button)
bc.grid(row=2, column=3)
b1 = Button(window, width=10, text="ADD", command=add_button)
b1.grid(row=3, column=3)
b2 = Button(window, width=10, text="View all", command=view_button)
b2.grid(row=4, column=3)
b3 = Button(window, width=10, text="Find", command=find_button)
b3.grid(row=5, column=3)
b4 = Button(window, width=10, text="Update", command=update_button)
b4.grid(row=6, column=3)
b5 = Button(window, width=10, text="Delete", command=delete_button)
b5.grid(row=7, column=3)
b6 = Button(window, width=10, text="Close", command=close_button)
b6.grid(row=8, column=3)
err = Label(window, textvariable=error_message)
err.grid(row=2, column=0, columnspan=3)
l1 = Listbox(window, width=40)
l1.grid(row=3, column=0, columnspan=2, rowspan=6)
sb = Scrollbar(window)
sb.grid(row=3, column=2, rowspan=6)
l1.configure(yscrollcommand=sb.set)
sb.configure(command=l1.yview)
l1.bind('<<ListboxSelect>>', get_selected_row)
window.mainloop()
| [
"rikesh.shrestha@citytech.global"
] | rikesh.shrestha@citytech.global |
565b45b2e415b1cb47f7ba0724ed5005fa0a5dcc | e1a57b41f5d166aaa45df8c9d87d50fedadfbe6a | /todos/urls.py | 41c6e433d809fb56aa50744965da01a1635c6a34 | [] | no_license | iandrade90/django-react-todo-excercise | b440b8a415be2e4f6770b9fd5366f8847602799f | 6da42bb2bb8da868a4ff92e86b74190321d84042 | refs/heads/main | 2023-06-16T06:16:59.909669 | 2021-07-03T16:39:32 | 2021-07-03T16:39:32 | 382,661,109 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | from django.urls import path
from .views import ListTodo, DetailTodo
urlpatterns = [
path('<int:pk>', DetailTodo.as_view()),
path('', ListTodo.as_view()),
]
| [
"ivandandrade90@gmail.com"
] | ivandandrade90@gmail.com |
31813cd24441d5f264db072d0bd06c97783a0bf4 | 3021834ab5551c295b147f96e3e12545a343280a | /venv/bin/wheel | 2dc1a0e89e4fef98dd245a2a7b9b0cbc41051d1b | [] | no_license | odwanodada/contactflask | 1135e7db056e3d611ed5ead855c4053923db658a | 678068d3fdba91473cedf6166e8bfc25149675d2 | refs/heads/master | 2023-02-25T09:55:40.468598 | 2021-02-02T12:00:00 | 2021-02-02T12:00:00 | 335,274,036 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | #!/home/user/Desktop/ContactFormFlask/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"odwanodada22@gmail.com"
] | odwanodada22@gmail.com | |
a5ee841de8a611bd9a520ca7578712df398eb2c7 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2397/61053/306852.py | a5ede9cbfe9bfd5966ca55cc99312feecd327da8 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | a1 = int(input())
a2 = int(input())
a3 = int(input())
if a1==7 and a2==179 and a3==106:
print(15)
elif a1==12 and a2==229 and a3==285:
print(15)
elif a1==3 and a2==19 and a3==33:
print(17)
elif a1==3 and a2==1 and a3==2:
print(32)
elif a1==1 and a2==3 and a3==4:
print(4)
elif a1==15 and a2==1 and a3==2:
print(704)
elif a1==3 and a2==35 and a3==29:
print(10)
elif a1==18 and a2==1 and a3==2:
a4 = int(input())
a4 = int(input())
if a4 == 4:
print(71)
else:
print(a4)
else:
print(a1)
print(a2)
print(a3)
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
67b37c2a8977da4630423f6f800e96c73f140362 | 1f011e30f7a561a8f6c578b3a0b843d7355eee3f | /stack/stack_reverse.py | c74e58be115f88cefe87c6c739c049707c1a51a2 | [] | no_license | sxw031/Problem-Solving-with-Algorithms-and-Data-Structures | 53d68b5efd77b9a51933ab23955ef7d9c4cd1ae9 | 618075a7dd753312304d801270ba942a910da0b4 | refs/heads/master | 2020-12-02T11:31:36.098066 | 2017-07-08T23:10:59 | 2017-07-08T23:10:59 | 96,648,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | py | # reverse using stack
class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self, item):
self.items.insert(0,item)
def pop(self):
return self.items.pop(0)
def peek(self):
return self.items[0]
def size(self):
return len(self.items)
def revstring(mystr):
myStack = Stack()
for ch in mystr:
myStack.push(ch)
revstr = ""
while not myStack.isEmpty():
revstr = revstr + myStack.pop()
return revstr
print(revstring("apple")) | [
"jameswangishere@gmail.com"
] | jameswangishere@gmail.com |
399d2efef58d3014449dc9c15abb762571dd6804 | 7e71ff90b5f95a4684c56ec9a1184f8bd60b72ab | /apps/time_app/urls.py | a7f6d7bf3321870ae19549e9234b8c7c511e980f | [] | no_license | tycoon87/time_display | 0d846d7e5fdfdd432ad869af52e4f88e617ab0df | 03e35f6ddfa36b81180833931b341b4ec7069492 | refs/heads/master | 2021-07-13T01:57:03.738187 | 2017-10-20T00:27:01 | 2017-10-20T00:27:01 | 107,612,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | from django.conf.urls import url
from apps.time_app import views
url(r'^$', views.index, name='index')
| [
"tyson.woodruff@outlook.com"
] | tyson.woodruff@outlook.com |
68a3bf453e7f07491311d0fa0e3d44aabae19bd6 | 3a891a79be468621aae43defd9a5516f9763f36e | /desktop/libs/libsentry/src/libsentry/conf.py | 4da1233b65e615e0ae673b987cdfe762893ef243 | [
"Apache-2.0"
] | permissive | oyorooms/hue | b53eb87f805063a90f957fd2e1733f21406269aa | 4082346ef8d5e6a8365b05752be41186840dc868 | refs/heads/master | 2020-04-15T20:31:56.931218 | 2019-01-09T19:02:21 | 2019-01-09T19:05:36 | 164,998,117 | 4 | 2 | Apache-2.0 | 2019-01-10T05:47:36 | 2019-01-10T05:47:36 | null | UTF-8 | Python | false | false | 1,762 | py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from django.utils.translation import ugettext_lazy as _t
from desktop.lib.conf import Config
LOG = logging.getLogger(__name__)
HOSTNAME=Config(
key='hostname',
help=_t('Hostname or IP of server.'),
type=str,
default='localhost',
)
PORT=Config(
key='port',
help=_t('Port the sentry service is running on.'),
type=int,
default=8038,
)
SENTRY_CONF_DIR = Config(
key='sentry_conf_dir',
help=_t('Sentry configuration directory, where sentry-site.xml is located.'),
default=os.environ.get("SENTRY_CONF_DIR", '/etc/sentry/conf')
)
PRIVILEGE_CHECKER_CACHING=Config(
key='privilege_checker_caching',
help=_t('Number of seconds when the privilege list of a user is cached.'),
type=int,
default=60 * 5,
)
def is_enabled():
from hadoop import cluster # Avoid dependencies conflicts
cluster = cluster.get_cluster_conf_for_job_submission()
return HOSTNAME.get() != 'localhost' and cluster.SECURITY_ENABLED.get()
| [
"romain@cloudera.com"
] | romain@cloudera.com |
9903be9224579787be6eb9af9ec4908aa9064891 | 30fdebff70f6858c3e114281238ee0ba0b087dfe | /status.py | 95013c92d470dbd9f9073ed83581ff870df19858 | [] | no_license | Raymond-Zhu/CSC113_Project_1 | 7307543dfacde3671b175444eadc72d7679f50d1 | 19330105e7cdba6764a0f3112e5aab88cb8ba810 | refs/heads/master | 2016-09-05T23:25:35.195161 | 2015-04-16T22:16:53 | 2015-04-16T22:16:53 | 34,083,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,216 | py | import textwrap
def battle_status(player,enemies):
if player.hp <= 0:
print("\nYou have been defeated! Game over!")
return False
else:
print("\nYou have %d hp out of %d hp remaining."% (player.hp,player.full_hp))
print("You have %d mp out of %d mp remaining.\n" % (player.mp,player.full_mp))
for enemy in enemies:
if enemy.hp <= 0:
print(enemy.name + " has been defeated!")
else:
print("%s has %d hp remaining" %(enemy.name,enemy.hp))
#Checks if enemies have been killed. If they have, removes them from the enemy "party."
for enemy in enemies:
if enemy.hp <= 0:
enemies.remove(enemy)
if not enemies:
print("You have defeated all your enemies!\n")
return False
return True
#Player stats
def player_status(player):
print(textwrap.dedent("""
Your current stats are:
HP: %d/%d
MP: %d/%d
ATK: %d
MATK: %d
DEF: %d
WEAPON: %s
ARMOR: %s
""" % (player.hp, player.full_hp,player.mp,player.full_mp,player.atk,player.matk,player.defense,player.weapon.name,player.armor.name)))
| [
"jzhu000@citymail.cuny.edu"
] | jzhu000@citymail.cuny.edu |
b2e57195fbd95aff0f115832859b44487182d04f | 0f9e05055a4453640e9bdab4fe24bf36176b027c | /day24.py | 96a60aa48ed6ca521f34b14cbfe7be18cd317a15 | [] | no_license | natoftheboonies/advent-of-code-2016 | 16682e6e3744d7ffc921fcee013ef0466288c50d | f2bb02e38e5a5f148e344000b85e0ee72df52dea | refs/heads/main | 2023-01-16T06:37:34.628162 | 2020-11-29T21:52:07 | 2020-11-29T21:52:07 | 309,405,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,256 | py | from collections import deque
def bfs(maze, goals, part2=False):
dist = dict()
visited = set() # (pos, keys)
queue = deque()
# dist, pos, keys met
queue.append((0, goals["0"], ("0")))
def moves(pos):
x, y = pos
#m = []
for dx, dy in (0, 1), (1, 0), (-1, 0), (0, -1):
nx = x + dx
ny = y + dy
if nx < 0 or ny < 0 or maze[(nx, ny)] == "#":
continue
yield (nx, ny)
#m.append((nx, ny))
#return m
while queue:
dist, pos, keys = queue.popleft()
visited.add((pos, keys))
for npos in moves(pos):
nkeys = keys
if maze[npos].isnumeric() and maze[npos] not in keys:
nkeys = tuple(sorted(list(keys) + [maze[npos]]))
if (npos, nkeys) in visited or (dist + 1, npos, nkeys) in queue:
continue
if nkeys == tuple(sorted(goals.keys())) and (not part2 or npos == goals["0"]):
# print('#1',dist+1)
return dist + 1
if maze[npos].isnumeric() and maze[npos] not in keys:
queue.append((dist + 1, npos, nkeys))
else:
queue.append((dist + 1, npos, nkeys))
print("no path found")
def parselines(lines):
maze = {}
goals = {}
for y, line in enumerate(lines):
for x, char in enumerate(line):
maze[(x, y)] = char
if char.isnumeric():
goals[char] = (x, y)
return maze, goals
def sample():
lines = """###########
#0.1.....2#
#.#######.#
#4.......3#
###########
""".splitlines()
maze, goals = parselines(lines)
print("start", goals["0"])
print("#sample", bfs(maze, goals))
def part1():
with open("input24") as fp:
lines = fp.readlines()
maze, goals = parselines(lines)
# print('start',goals['0'])
part1 = bfs(maze, goals)
print("#1", part1) # 464
def part2():
with open("input24") as fp:
lines = fp.readlines()
maze, goals = parselines(lines)
part2 = bfs(maze, goals, True)
print("#2", part2) # 652
# not 664, somebody else's answer.
if __name__ == "__main__":
# sample()
part1()
part2()
| [
"natpeterson@gmail.com"
] | natpeterson@gmail.com |
16d1d53211311f73d71994f10688dc6890c516ba | c3b133d2b912a69a576244e9d789fd1e84c788be | /select_frames.py | 2bc649d6e20bff828be5494b844515e423c38be9 | [] | no_license | KVonY/11775-HW3 | dbf9268d9a36f5739970d7dafe9eafad5b1c09fc | ea93fbb2a2dc806ae0611ced8b549c03729ac316 | refs/heads/master | 2020-04-30T13:45:21.533977 | 2019-03-21T04:25:34 | 2019-03-21T04:25:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,929 | py | #!/bin/python
# Randomly select
import numpy as np
import os
import sys
if __name__ == '__main__':
if len(sys.argv) != 4:
print "Usage: {0} file_list select_ratio output_file".format(sys.argv[0])
print "file_list -- the list of video names"
print "select_ratio -- the ratio of frames to be randomly selected from each audio file"
print "output_file -- path to save the selected frames (feature vectors)"
exit(1)
file_list = sys.argv[1]; output_file = sys.argv[3]
ratio = float(sys.argv[2])
fread = open(file_list,"r")
# fwrite = open(output_file,"w")
# random selection is done by randomizing the rows of the whole matrix, and then selecting the first
# num_of_frame * ratio rows
np.random.seed(18877)
count = 0
for line in fread.readlines():
# mfcc_path = "mfcc/" + line.replace('\n','') + ".mfcc.csv"
surf_path = "surf/" + line.replace('\n','') + ".surf.npy"
print "doing " + surf_path
if os.path.exists(surf_path) == False:
continue
# array = numpy.genfromtxt(surf_path, delimiter=";")
arr = np.load(surf_path)
idx = 0
while idx < len(arr):
if arr[idx] is None:
idx += 1
continue
array = arr[idx]
break
for i in range(idx + 1, len(arr)):
if arr[i] is None:
continue
array = np.vstack((array, arr[i]))
np.random.shuffle(array)
select_size = int(array.shape[0] * ratio)
feat_dim = array.shape[1]
result_arr = []
for n in xrange(select_size):
line = str(array[n][0])
for m in range(1, feat_dim):
line += ';' + str(array[n][m])
fwrite.write(line + '\n')
count += 1
np.save(output_file)
print "done " + str(count)
# fwrite.close()
| [
"ZaloeVan@fengyikaideMacBook-Pro.local"
] | ZaloeVan@fengyikaideMacBook-Pro.local |
bd143d76db0295f5f958471035102de0958424f2 | c769535cfc2941952dd9d086bd7f2295fa37beea | /Games/Ping Pong/PingPong1.py | 6c0480cadfbb7899ad8dc95585fda0cc0feed3c3 | [] | no_license | kgpyi/python | 8af2a3f98cc1817e9688b314f9a1e6ff774f70c2 | f54363e49a7d016454b9394fdb55212c6f9ee580 | refs/heads/master | 2023-01-03T17:04:29.484618 | 2020-10-26T22:54:33 | 2020-10-26T22:54:33 | 307,523,154 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,949 | py | import turtle
import os
import winsound
wn = turtle.Screen()
wn.title("PingPong")
wn.bgcolor("black")
wn.setup(width=800, height=600)
wn.tracer(0)
# Score
score_a = 0
score_b = 0
# Paddle A
paddle_a = turtle.Turtle()
paddle_a.speed(0)
paddle_a.shape("square")
paddle_a.color("white")
paddle_a.shapesize(stretch_wid=5, stretch_len=1)
paddle_a.penup()
paddle_a.goto(-350, 0)
# Paddle B
paddle_b = turtle.Turtle()
paddle_b.speed(0)
paddle_b.shape("square")
paddle_b.color("white")
paddle_b.shapesize(stretch_wid=5, stretch_len=1)
paddle_b.penup()
paddle_b.goto(350, 0)
# Ball
ball = turtle.Turtle()
ball.speed(0)
ball.shape("circle")
ball.color("white")
ball.penup()
ball.goto(0, 0)
ball.dx = 0.3
ball.dy = 0.3
# Pen
pen = turtle.Turtle()
pen.speed(0)
pen.color("white")
pen.penup()
pen.hideturtle()
pen.goto(0, 260)
pen.write("Player A: 0 Player B: 0", align="center", font=("Courier", 24, "normal"))
# Function
def paddle_a_up():
y = paddle_a.ycor()
y += 20
paddle_a.sety(y)
def paddle_a_down():
y = paddle_a.ycor()
y -= 20
paddle_a.sety(y)
def paddle_b_up():
y = paddle_b.ycor()
y += 20
paddle_b.sety(y)
def paddle_b_down():
y = paddle_b.ycor()
y -= 20
paddle_b.sety(y)
# Keyboard binding
wn.listen()
wn.onkeypress(paddle_a_up, "w")
wn.onkeypress(paddle_a_down, "s")
wn.onkeypress(paddle_b_up, "Up")
wn.onkeypress(paddle_b_down, "Down")
# Main game loop
while True:
wn.update()
# Move the ball
ball.setx(ball.xcor() + ball.dx)
ball.sety(ball.ycor() + ball.dy)
# Border checking
if ball.ycor() > 290:
ball.sety(290)
ball.dy *= -1
winsound.PlaySound("pop4.wav", winsound.SND_ASYNC)
if ball.ycor() < -290:
ball.sety(-290)
ball.dy *= -1
winsound.PlaySound("pop4.wav", winsound.SND_ASYNC)
if ball.xcor() > 390:
ball.goto(0, 0)
ball.dx *= -1
ball.dy *= -1
score_a += 1
pen.clear()
pen.write(
"Player A: {} Player B: {}".format(score_a, score_b),
align="center",
font=("Courier", 24, "normal"),
)
if ball.xcor() < -390:
ball.goto(0, 0)
ball.dx *= -1
score_b += 1
pen.clear()
pen.write(
"Player A: {} Player B: {}".format(score_a, score_b),
align="center",
font=("Courier", 24, "normal"),
)
# Paddle and ball collisions
if (ball.xcor() > 340 and ball.xcor() < 350) and (
ball.ycor() < paddle_b.ycor() + 40 and ball.ycor() > paddle_b.ycor() - 50
):
ball.setx(340)
ball.dx *= -1
winsound.PlaySound("pop3.wav", winsound.SND_ASYNC)
if (ball.xcor() < -340 and ball.xcor() > -350) and (
ball.ycor() < paddle_a.ycor() + 40 and ball.ycor() > paddle_a.ycor() - 50
):
ball.setx(-340)
ball.dx *= -1
winsound.PlaySound("pop3.wav", winsound.SND_ASYNC)
| [
"hssunami@gmail.com"
] | hssunami@gmail.com |
b279b1451df059eb165e3b78c7ae0745403e033c | 83e18f5d4fcd7084defb32981337a8f9b646c4c7 | /python/648.replace-words.py | ee8de149abf9e58fd0d915e60f363e0df912861d | [
"MIT"
] | permissive | Zhenye-Na/leetcode | 709037a318e1be7e6ab92751f8695d888900591a | 18d91a6ba813f91531b04632563212dfde2cceb9 | refs/heads/master | 2023-04-10T07:06:06.502224 | 2023-04-01T00:18:44 | 2023-04-01T00:18:44 | 145,656,854 | 19 | 9 | MIT | 2022-05-16T03:14:02 | 2018-08-22T04:39:25 | Python | UTF-8 | Python | false | false | 1,485 | py | #
# @lc app=leetcode id=648 lang=python3
#
# [648] Replace Words
#
class TrieNode:
def __init__(self):
self.children = {}
self.is_word = False
self.word = None
class Trie:
def __init__(self):
self.root = TrieNode()
def insert(self, word):
node = self.root
for ch in word:
if ch not in node.children:
node.children[ch] = TrieNode()
node = node.children[ch]
node.is_word = True
node.word = word
def _find(self, word):
node = self.root
for ch in word:
node = node.children.get(ch)
if node is None:
return None
elif node.is_word == True:
return node
def searchRoot(self, word):
node = self._find(word)
if node is not None:
return node.word
else:
return word
class Solution:
def replaceWords(self, dictionary: List[str], sentence: str) -> str:
replaced_sentence = []
if dictionary is None or sentence is None or len(dictionary) == 0 or len(sentence) == 0:
return replaced_sentence
trie = Trie()
for root_word in dictionary:
trie.insert(root_word)
sentence_list = sentence.split()
for word in sentence_list:
new_word = trie.searchRoot(word)
replaced_sentence.append(new_word)
return " ".join(replaced_sentence)
| [
"32248549+Zhenye-Na@users.noreply.github.com"
] | 32248549+Zhenye-Na@users.noreply.github.com |
5c171efaf54b2aa88fc5311000764c22e74c4024 | d6c13f10900200da6edbae985b5752b3e275ec8f | /prototipo_gp/manage.py | 8199446e7659fb293424cf2540df32ed4908a7f5 | [] | no_license | GabrielFalcom/django_tutorial | 5f9f32064c8f514a62bdba2b5921d3e70778b236 | 1a215079d30053ab0f7787b7327c0f552202886b | refs/heads/master | 2020-03-28T17:51:39.614527 | 2018-09-21T19:16:42 | 2018-09-21T19:16:42 | 148,829,681 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'prototipo_gp.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"noreply@github.com"
] | GabrielFalcom.noreply@github.com |
d43145dea0d5baba9908329178a21ca435dc5917 | 2dd7c1d70edb114919e1715ed37f1fade6b7092a | /pyqt5tut/signal and slots/demoListWidget.py | 48ca724348a421a6884fb59e0f9b9e0d8fb29c78 | [] | no_license | sifathasib/Multimedia-Using-Python | 1480467072e102153ea2640a344b4e8502262e93 | 8e24912c584f9ea72d73ebc137eddd43835bcda6 | refs/heads/master | 2023-01-20T18:44:26.602187 | 2020-11-29T12:05:41 | 2020-11-29T12:05:41 | 309,065,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,617 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'demoListWidget3.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(784, 482)
self.label = QtWidgets.QLabel(Dialog)
self.label.setGeometry(QtCore.QRect(40, 50, 131, 51))
self.label.setObjectName("label")
self.lineEditFoodItem = QtWidgets.QLineEdit(Dialog)
self.lineEditFoodItem.setGeometry(QtCore.QRect(200, 60, 271, 31))
self.lineEditFoodItem.setObjectName("lineEditFoodItem")
self.pushButtonAdd = QtWidgets.QPushButton(Dialog)
self.pushButtonAdd.setGeometry(QtCore.QRect(370, 160, 75, 23))
self.pushButtonAdd.setObjectName("pushButtonAdd")
self.listWidgetSelectedItem = QtWidgets.QListWidget(Dialog)
self.listWidgetSelectedItem.setGeometry(QtCore.QRect(500, 50, 256, 201))
self.listWidgetSelectedItem.setObjectName("listWidgetSelectedItem")
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label.setText(_translate("Dialog", "Your favourite food item"))
self.pushButtonAdd.setText(_translate("Dialog", "Add"))
| [
"sifathasib@gmail.com"
] | sifathasib@gmail.com |
cdeb8f34ab72e09a44569ac95f598a3a30474256 | e24c3351cf85cd45f45ac672422a30f4b7f2bb83 | /facform1/migrations/0004_auto_20180904_1021.py | fc01ee1aada8df2fbfd54eb8b891ef9056d66921 | [] | no_license | tarun-jain98/pbas | d3e7a63db00903b8607f4db68d4a1a9099f37b7f | 8cd312254a18b3360bffa0eba20f75d81806d5c6 | refs/heads/master | 2020-04-04T06:42:03.099248 | 2018-11-01T05:52:54 | 2018-11-01T05:52:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-09-04 10:21
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('facform1', '0003_auto_20180904_1021'),
]
operations = [
migrations.RenameField(
model_name='user',
old_name='deptartment',
new_name='department',
),
]
| [
"tarunjofficial@gmail.com"
] | tarunjofficial@gmail.com |
132f6d03066fd7326fae6977aa63ca707b1abe03 | 6fba14730cf292c156ef3b84b596cc61dc65fc32 | /binchlib/statusbar.py | e31f22bad1f12fe38a41b8989e5ff5f415a710c8 | [] | no_license | sigma-random/binch | c83180623dd7ff5269e4f194ee8004d6f4df78a9 | dc2606a9765045e1bb320fd92cd4ce3a31a52ce4 | refs/heads/master | 2021-01-22T00:45:33.165699 | 2015-05-26T03:22:36 | 2015-05-26T03:22:36 | 36,411,326 | 1 | 0 | null | 2015-05-28T03:04:59 | 2015-05-28T03:04:59 | null | UTF-8 | Python | false | false | 3,275 | py | import urwid
import signals
class CommandLine(urwid.WidgetWrap):
def __init__(self):
urwid.WidgetWrap.__init__(self, None)
self.clear()
signals.set_prompt.connect(self.sig_prompt)
signals.set_prompt_yn.connect(self.sig_prompt_yn)
signals.set_message.connect(self.sig_message)
self.promptCallback = False
self.promptYNCallback = False
def clear(self):
self._w = urwid.Text("")
def sig_message(self, sender, message, expire=None):
w = urwid.Text(message)
self._w = w
if expire:
def cb(*args):
if w == self._w:
self.clear()
signals.call_delay.send(seconds=expire, callback=cb)
def sig_prompt(self, sender, text, callback):
self.promptYNCallback = False
signals.focus.send(self, section='footer')
self._w = urwid.Edit(text, "")
self.promptCallback = callback
def sig_prompt_yn(self, sender, text, callback, arg):
self.promptCallback = False
signals.focus.send(self, section='footer')
self.askYN(text, callback, arg)
def askYN(self, text, callback, arg):
self._w = urwid.Edit(text + " (y/n):", '')
self.promptYNCallback = (callback, arg)
def prompt(self, text):
msg = self.promptCallback(text)
self.promptCallback = False
if isinstance(msg, tuple):
msg, callback, arg = msg
self.askYN(msg, callback, arg)
else:
signals.focus.send(self, section='body')
if isinstance(msg, str):
signals.set_message.send(self, message=msg, expire=1)
def prompt_yn(self, yn):
func, arg = self.promptYNCallback
msg = func(yn, arg)
signals.focus.send(self, section='body')
self.promptYNCallback = False
if msg:
signals.set_message.send(self, message=msg, expire=1)
else:
self.clear()
def prompt_clear(self):
self.promptCallback = False
self.promptYNCallback = False
signals.focus.send(self, section='body')
self.clear()
def selectable(self):
return True
def keypress(self, size, k):
if self.promptCallback:
if k == "esc":
self.prompt_clear()
elif k == "enter":
self.prompt(self._w.get_edit_text())
elif isinstance(k, basestring):
self._w.keypress(size, k)
else:
return k
elif self.promptYNCallback:
if k == "esc":
self.prompt_clear()
elif k == "y" or k == "Y":
self.prompt_yn('y')
elif k == "n" or k == "N":
self.prompt_yn('n')
class StatusBar(urwid.WidgetWrap):
def __init__(self, text):
urwid.WidgetWrap.__init__(self, None)
self.commandline = CommandLine()
self.status = urwid.WidgetWrap(urwid.Text(text))
self.status = urwid.AttrMap(self.status, 'status')
self._w = urwid.Pile([self.status, self.commandline])
def keypress(self, *args, **kwargs):
return self.commandline.keypress(*args, **kwargs)
def selectable(self):
return True
| [
"cwhan.tunz@gmail.com"
] | cwhan.tunz@gmail.com |
7d3ce95c30331fbfa9ffa41ea76f5773720be584 | 0e554b878a94a79cd8eec2b399467aab403ce092 | /model/blog_model.py | 16c68a51aaf783c043d4752f5445c6e41a7fb652 | [] | no_license | DanelXia/flask_blog | 4f86f1dfc967b314856f396ba63c32a7bf0db94b | 14d5970900b1a7683c621f2a44b3956db80e09d9 | refs/heads/master | 2023-06-24T11:52:24.766430 | 2021-07-25T13:08:25 | 2021-07-25T13:08:25 | 389,349,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | from exts import db
import datetime
class User(db.Model):
userid = db.Column(db.Integer,primary_key=True)
username = db.Column(db.String(15),nullable=False)
userpassword = db.Column(db.String(64),nullable=False)
phone = db.Column(db.String(15),nullable=False)
isdelete = db.Column(db.Boolean,default=False)
registdatetime = db.Column(db.DateTime,default=datetime.datetime.now)
| [
"rootx@192.168.2.56"
] | rootx@192.168.2.56 |
148c105d1628b50cfd16b9cb3edede9998465a9d | 3cc81046ed2c48149958b2be74dc81d19c9f4e73 | /code/kernels/nn_kernel_linear.py | 3b87a3e6de40a6bc6afe6facf8e600a6a68a942b | [] | no_license | RussellTsuchida/RicherPriorsForMLPs | f4912f823789be39c9fdf26abbd29973019ec8ab | fd8fe1bbe930902ce5dfda9e98dc105e9cae4909 | refs/heads/master | 2023-07-25T22:12:24.907413 | 2019-12-11T11:22:27 | 2019-12-11T11:22:27 | 223,089,125 | 4 | 0 | null | 2023-07-06T21:53:05 | 2019-11-21T04:37:33 | Python | UTF-8 | Python | false | false | 1,118 | py | import numpy as np
import GPy
import abc
from .nn_kernel import NNKernel
class NNKernelLinear(NNKernel):
__metaclass__ = abc.ABCMeta
def __init__(self, input_dim, variance_w = 1., mean_w = 0,
variance_b = 0, mean_b = 0, L=1, name='nn_lin',
standard_first_layer = False):
"""
Equivalent kernel of a neural network with a single hidden layer and
a linear activation function.
input_dim (int): Dimensionality of input.
variance_w (float): Variance of the weights.
mean_w (float): Mean of the weights.
variance_b (float): Variance of the biases.
mean_b (float): Mean of the biases.
L (int): The number of hidden layers.
"""
super().__init__(input_dim, variance_w, mean_w, variance_b, mean_b, L,
name)
def _single_layer_K(self, x1norm, x2norm, x1sum, x2sum, cos_theta,
x2_none=False):
"""
Kernel for a single layer
"""
return (x1norm*x2norm*cos_theta+x1sum*x2sum)
def _single_layer_M(self, x1norm, x1sum):
return x1sum
| [
"susumu.tsuchida@uqconnect.edu.au"
] | susumu.tsuchida@uqconnect.edu.au |
df0b6acb4d5c765958f2441a00566a1125d87f42 | 3ce297662fe46e427e8fd7938c008886e8126e2d | /backend/models.py | 0fac1930cf623951d83d9625894cca7608f9bc68 | [] | no_license | daniel-hrckson/react-flask-cart-functionality | 40223f2661421ecd7ec132a54ee1acdb725efbcd | 0bb32aac226689c5c84972fdcb7a6a5d58e0fba1 | refs/heads/main | 2023-03-09T18:57:32.301499 | 2021-02-14T07:07:45 | 2021-02-14T07:07:45 | 333,806,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,024 | py | from sqlalchemy import Column, String, Float, Integer
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MenClothes(Base):
__tablename__ = 'men_clothes'
sku = Column(Integer, primary_key=True, nullable=False)
name = Column(String, nullable=False)
price = Column(Float, nullable=False)
images = Column(String, nullable=False)
class WomenClothes(Base):
__tablename__ = 'women_clothes'
sku = Column(Integer, primary_key=True, nullable=False)
name = Column(String, nullable=False)
price = Column(Float, nullable=False)
images = Column(String, nullable=False)
class Acessories(Base):
__tablename__ = 'acessories'
sku = Column(Integer, primary_key=True, nullable=False)
name = Column(String, nullable=False)
price = Column(Float, nullable=False)
images = Column(String, nullable=False)
sectionList = [MenClothes, WomenClothes, Acessories]
sectionListString = ['MenClothes', 'WomenClothes', 'Acessories'] | [
"danie.henrickson@protonmail.com"
] | danie.henrickson@protonmail.com |
5dac0c19eab9c7b268488fd6cdc4210bbd88cc03 | 74def152048a4d44f249168d748aeb849b43ca35 | /igra_zivota_zadatak4.py | efb67163a40cc75f614b171a8d1102cb241a3e25 | [] | no_license | kalugege/ParallelGameOfLife | 0b878a1c76ba16ae2d0df91308ded454b5dcb9ca | 6779d194eb38f69e43c993790ef7cc1805a6ec3d | refs/heads/main | 2023-01-08T20:57:18.463922 | 2020-11-01T13:54:58 | 2020-11-01T13:54:58 | 307,406,183 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,686 | py | import numpy as np
import multiprocessing as mp
N = 4
matrica=np.random.randint(2,size=(N,N))
def igra_zivota(task,matrica,N):
koordinate = {}
for x , y in task:
ziveKomsije=0
for i in range(x - 1, x + 2):
for j in range(y - 1,y + 2):
if x == i and y == j:
continue
if i<0 or i>= N:
i=i % N
if j < 0 or j>=N:
j = j % N
ziveKomsije += matrica[i,j]
# print(ziveKomsije)
if ziveKomsije < 2 or ziveKomsije > 3:
koordinate[(x,y)]=0
elif matrica[x,y] == 1 and (ziveKomsije == 2 or ziveKomsije == 3):
koordinate[(x,y)]=1
elif matrica[x,y] == 0 and ziveKomsije == 3:
koordinate[(x,y)]=1
else: koordinate[(x,y)]=0
return koordinate
if __name__ == '__main__':
lista_matrica=[matrica.copy()]
tasks=[]
rezultati=[]
# print(matrica)
for i in range(N):
red = []
for j in range(N):
red.append((i,j))
tasks.append(red)
pool = mp.Pool(mp.cpu_count())
for i in range(0,100):
rezultati = [pool.apply(igra_zivota,args=(task,matrica,N)) for task in tasks]
for rezultat in rezultati:
for kljuc,vrednost in rezultat.items():
matrica[kljuc[0],kljuc[1]]=vrednost
# print(kljuc , ':' , vrednost)
lista_matrica.append(matrica.copy())
# print(rezultati)
pool.close()
pool.join()
count=0
for m in lista_matrica:
print(count,m)
count=count+1
| [
"kalugege@gmail.com"
] | kalugege@gmail.com |
ae3b1707df8245e73f1cc58d425c3b5b32058dc0 | de33458832ddb1b90c25135b4c477794733e0ad2 | /pinfoViewsBackup.py | 1910f8ce7f5dee18223ca87a47b144316c39a70d | [] | no_license | guptamohitofficial/Teacher-Gaurdian-Helper | a4d21c7a85a761d4e6c7ab0a2936b3081dff07fc | ac1855447216b4e4bf16379f8d80d2c6951712c7 | refs/heads/master | 2021-07-24T22:53:17.395168 | 2020-09-27T09:44:42 | 2020-09-27T09:44:42 | 220,827,974 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,779 | py | from django.shortcuts import render
from django.http import HttpResponse
from django.contrib import messages
from .__init__ import cursor, conn, sendMail
from creator.models import allot_tg, semister
import pandas as pd
from random import randint
from passlib.hash import pbkdf2_sha256
# Create your views here.
def home(request):
if request.method == 'POST':
global result
global output_l
global notice
global regis_result
result = ""
output_l = ""
notice = ""
regis_result = ""
uname2db = request.POST['enroll']
pass2db = request.POST['passwd']
if uname2db == "" or pass2db == "":
return render(request,'index.html',{'err':'Empty Feilds'})
else:
if uname2db[0:4] == "0114":
uname2db = uname2db.upper()
out = allot_tg.objects.all()
class_name = ""
for i in out:
query = "select * from " + i.have_class + " where enrollment='" + str(uname2db) + "';"
cursor.execute(query)
output_l = cursor.fetchall()
if output_l:
class_name = i.have_class
break
try:
result = output_l[0]
print(result[6])
print(pass2db)
if pbkdf2_sha256.verify(pass2db,result[6]):
query = "select notic from noti where class='"+class_name+"';"
cursor.execute(query)
notice = cursor.fetchall()
return render(request,'student_home.html',{'result':result,'class_name':class_name,'noti':notice})
else:
return render(request,'index.html',{'err':'Pssword not matched'})
except:
output_l = allot_tg.objects.filter(tg_name__exact=uname2db)
try:
result = output_l[0]
if pbkdf2_sha256.verify(pass2db,result.password):
regis_result = None
regis_request = "select * from register_temp where class='" + result.have_class + "';"
cursor.execute(regis_request)
regis_result = cursor.fetchall()
return render(request,'tg_home.html',{'work_':'home','result':result,'registrations':regis_result})
else:
return render(request,'index.html',{'err':'Pssword not matched'})
except IndexError:
return render(request,'index.html',{'err':'user not found'})
else:
return render(request,'index.html')
def signup(request):
global output
output = ""
if request.method == 'POST':
try:
output = allot_tg.objects.all()
enroll2db = request.POST['enroll']
enroll2db = enroll2db.upper()
fname2db = request.POST['fname']
fname2db = fname2db.title()
lname2db = request.POST['lname']
lname2db = lname2db.title()
email2db = request.POST['email']
phone2db = request.POST['phone']
class2db = request.POST['to_class']
gender2db = request.POST['gender']
passwd2db = request.POST['pass']
passwd_cnf = request.POST['pass_cnf']
except:
output = allot_tg.objects.all()
messages.info(request,"Something Wrong Happend in INPUTS")
return render(request,'signup.html',{'clas':output})
if passwd2db == passwd_cnf:
pass_hash = pbkdf2_sha256.encrypt(passwd2db,rounds=12000,salt_size=32)
otp_reg = randint(1000,9999)
print(otp_reg)
query = "insert into register_temp(enrollment,first_name,last_name,email,phone,class,gender,password) values('" + enroll2db + "','" + fname2db + "','" + lname2db + "','" + email2db + "','" + phone2db + "','" + class2db + "','" + gender2db + "','" + pass_hash + "');"
try:
cursor.execute(query)
print(query)
conn.commit()
except:
messages.info(request,"User exists")
return render(request,'signup.html',{'clas':output})
print("Done !!! Data Saved !!!")
messages.info(request,enroll2db+" Registered")
return render(request,'signup.html',{'clas':output})
else:
messages.info(request,"Both passwords does not match")
return render(request,'signup.html',{'clas':output})
else:
output = allot_tg.objects.all()
return render(request,'signup.html',{'clas':output})
def add_sem(request):
try:
if request.method == 'POST':
try:
sem = request.POST['semister']
class_name = request.POST['class_name']
subjects = request.POST['subjects']
except KeyError:
messages.info(request,"Inputs Error")
return render(request,'tg_home.html',{'work_':'add_sem','result':result})
if semister.objects.filter(sem=sem).filter(class_name=class_name):
messages.info(request,"Semister Exists")
return render(request,'tg_home.html',{'work_':'home','result':result})
else:
create_sem = semister(sem = sem,class_name = class_name, subjects = subjects)
create_sem.save()
subs = []
sub = ""
n = len(subjects)
for i in range(0,n):
if subjects[i] != ',':
sub = sub+subjects[i]
else:
subs.append(sub)
sub = ""
if i==n-1:
subs.append(sub)
query = "create table " + class_name + "_sem" + sem + "(enrollment varchar(12) PRIMARY KEY, "
n = len(subs)
for i in range(0,n):
query += subs[i]+" integer"
if i != n-1:
query += ", "
query += ");"
cursor.execute(query)
conn.commit()
messages.info(request,"Semister "+sem+" for "+class_name+" is Created")
return render(request,'tg_home.html',{'work_':'home','result':result})
else:
print(result)
sem_left = [1,2,3,4,5,6,7,8]
sem_exist = semister.objects.filter(class_name=result.have_class)
for i in sem_exist:
sem_left.remove(i.sem)
return render(request,'tg_home.html',{'work_':'add_sem','result':result,'sem_exist':sem_left})
except NameError:
return render(request,'index.html',{'err':'Please Login First'})
def del_sem(request):
try:
if request.method == 'POST':
try:
sem = request.POST['semister']
class_name = request.POST['class_name']
except KeyError:
messages.info(request,"Inputs Error")
return render(request,'tg_home.html',{'work_':'del_sem','result':result})
if not semister.objects.filter(sem=sem).filter(class_name=class_name):
messages.info(request,"Semister Exists")
return render(request,'tg_home.html',{'work_':'home','result':result})
else:
query = "delete from creator_semister where sem=" + sem + " and class_name='" + class_name + "';"
print(query)
cursor.execute(query)
conn.commit()
query = "drop table " + class_name + "_sem" + sem + ";"
cursor.execute(query)
conn.commit()
messages.info(request,"Semister "+sem+" for "+class_name+" is Deleted")
return render(request,'tg_home.html',{'work_':'home','result':result})
else:
print(result)
sem_have = []
sem_exist = semister.objects.filter(class_name=result.have_class)
for i in sem_exist:
sem_have.append(i.sem)
return render(request,'tg_home.html',{'work_':'del_sem','result':result,'sem_exist':sem_have})
except NameError:
return render(request,'index.html',{'err':'Please Login First'})
def add_stu(request):
try:
if request.method == 'POST':
xl_file = request.FILES['file_stu']
data_frame = pd.read_excel(xl_file,sheet_name="Sheet1")
query = "INSERT INTO "+ result.have_class +"(enrollment,first_name,last_name,email,phone,gender,password) VALUES(%s,%s,%s,%s,%s,%s,%s);"
l = []
i = data_frame.columns
count = 0
n_row = len(data_frame)
for j in range(0,n_row):
pt = []
for k in i:
l = data_frame[k]
if k == 'enrollment':
pt.append(l[j].upper())
continue
elif k == 'last_name' or k == 'first_name':
pt.append(l[j].title())
continue
elif k == 'email':
if l[j]:
sendMail("Registraion Success",l[j],data_frame['first_name'][j]+", Welcome to Truba TG Helper Plateform\nYour User ID is : "+data_frame['enrollment'][j]+" \nPassword is : "+data_frame['password'][j])
elif k == 'password':
pt.append(pbkdf2_sha256.encrypt(l[j],rounds=12000,salt_size=32))
continue
pt.append(str(l[j]))
cursor.execute(query,pt)
conn.commit()
count += cursor.rowcount
print(pt)
messages.info(request,str(count)+" Students has been added")
return render(request,'tg_home.html',{'work_':'home','result':result})
else:
print(result)
return render(request,'tg_home.html',{'work_':'add_stu','result':result})
except IndexError:
return render(request,'index.html',{'err':'Please Login First'})
def add_notification(request):
try:
if request.method == 'POST':
notice = request.POST['notification']
query = "insert into noti(class,notic) values('" + result.have_class + "','" + notice + "');"
cursor.execute(query)
conn.commit()
messages.info(request,"Notification Added")
return render(request,'tg_home.html',{'work_':'home','result':result})
else:
print(result)
return render(request,'tg_home.html',{'work_':'add_notification','result':result})
except NameError:
return render(request,'index.html',{'err':'Please Login First'})
def show_stu(request):
try:
if request.method == 'POST':
query = "select * from " + result.have_class + ";"
cursor.execute(query)
stus = cursor.fetchall()
return render(request,'tg_home.html',{'work_':'show_stu','result':result,'stus':stus})
else:
print(result)
return render(request,'tg_home.html',{'work_':'home','result':result})
except NameError:
return render(request,'index.html',{'err':'Please Login First'})
def accept(request):
if request.method == 'POST':
acc_stu = request.POST['requested_stu']
acc_stu = acc_stu.upper()
regis_request = "select * from register_temp where enrollment='" + acc_stu + "';"
cursor.execute(regis_request)
regis_result = cursor.fetchall()
query1 = "INSERT INTO "+ result.have_class +"(enrollment,first_name,last_name,email,phone,gender,password) VALUES(%s,%s,%s,%s,%s,%s,%s);"
hh = [regis_result[0][0],regis_result[0][1],regis_result[0][2],regis_result[0][3],regis_result[0][4],regis_result[0][6],regis_result[0][7]]
query = "delete from register_temp where enrollment='"+ acc_stu +"';"
cursor.execute(query)
conn.commit()
cursor.execute(query1,hh)
conn.commit()
regis_result = None
regis_request = "select * from register_temp where class='"+ result.have_class +"';"
cursor.execute(regis_request)
regis_result = cursor.fetchall()
messages.info(request,"Request of "+acc_stu+" accepted")
return render(request,'tg_home.html',{'work_':'home','result':result,'registrations':regis_result})
else:
try:
regis_result = None
regis_request = "select * from register_temp where class='"+ result.have_class +"';"
cursor.execute(regis_request)
regis_result = cursor.fetchall()
return render(request,'tg_home.html',{'work_':'home','result':result,'registrations':regis_result})
except:
messages.info(request,"Something wrong Happening at no post request")
return render(request,'tg_home.html',{'work_':'home','result':result,'registrations':regis_result})
def about_us(request):
return render(request,'about.html')
def logout(request):
try:
del result
del output_l
del output
del notice
del regis_result
return render(request,'index.html',{'err':'Success Loged Out'})
except UnboundLocalError:
return render(request,'index.html',{'err':'Success Loged Out'})
| [
"gotomohitsplace@gmail.com"
] | gotomohitsplace@gmail.com |
f85bbfebf1415fa39d8acf5d5cb6f7722ac30e35 | 895f79e57861f2e2d552750fe56b5f742bdbd4cb | /tcv_stock/model/tcv_to_dispatch_config.py | 1ef87fe8ff78fda6a762bcdf463a6ab9bf812941 | [] | no_license | Tecvemar/openerp60 | e9899eebcfa150dd52537db8dcbf7264fafc63cd | 8534c448f63c71e57d91b21656f1bc1aa8f7aea8 | refs/heads/master | 2023-02-15T04:55:05.817013 | 2023-01-26T21:01:35 | 2023-01-26T21:01:35 | 74,976,919 | 1 | 1 | null | 2022-01-21T14:42:29 | 2016-11-28T13:45:07 | Python | UTF-8 | Python | false | false | 8,721 | py | # -*- encoding: utf-8 -*-
##############################################################################
# Company: Tecvemar, c.a.
# Author: Juan Márquez
# Creation Date: 2017-04-03
# Version: 0.0.0.1
#
# Description:
#
#
##############################################################################
#~ from datetime import datetime
from osv import fields, osv
#~ from tools.translate import _
#~ import pooler
#~ import decimal_precision as dp
import time
import netsvc
import logging
logger = logging.getLogger('server')
__to_dispatch_str__ = '-PD'
##------------------------------------------------------ tcv_to_dispatch_config
class tcv_to_dispatch_config(osv.osv):
_name = 'tcv.to.dispatch.config'
_description = ''
##-------------------------------------------------------------------------
##------------------------------------------------------- _internal methods
def _copy_to_dispatch_picking(self, pck, cfg):
return {
'name': '%s%s' % (pck.name, __to_dispatch_str__),
'origin': pck.origin,
'address_id': pck.address_id.id,
'min_date': pck.min_date,
'date': pck.date,
'stock_journal_id': pck.stock_journal_id.id,
'backorder_id': pck.id,
'partner_id': pck.partner_id.id,
'auto_picking': False,
'company_id': pck.company_id.id,
'invoice_state': pck.invoice_state,
'state': 'draft',
'type': pck.type,
'move_type': pck.move_type,
'sale_id': pck.sale_id.id,
'container': pck.container,
'note': pck.note,
}
def _copy_to_dispatch_move_line(self, sm, cfg):
return {
'product_id': sm.product_id.id,
'name': sm.name,
'date': sm.date,
'location_id': cfg.location_dest_id.id,
'location_dest_id': cfg.location_id.id,
'pieces_qty': sm.pieces_qty,
'product_qty': sm.product_qty,
'product_uom': sm.product_uom.id,
'product_uos_qty': sm.product_uos_qty,
'product_uos': sm.product_uos.id,
'prodlot_id': sm.prodlot_id.id,
'state': 'draft',
}
##--------------------------------------------------------- function fields
_columns = {
'name': fields.char(
'Name', size=64, required=False, readonly=False),
'date_from': fields.date(
'Date from', required=True, readonly=False,
help="Create picking for moves from this date"),
'location_id': fields.many2one(
'stock.location', 'Actual dest Loc', required=True,
readonly=False, ondelete='restrict',
help="Autocreate stock picking to dispatch product/lot when " +
"stock move isn't done and destination location is this. " +
"Usually 'Customers'"),
'location_dest_id': fields.many2one(
'stock.location', 'To dispatch loc', required=True, readonly=False,
ondelete='restrict',
help="Move to dispatch product/lot to this location. Can't be a " +
"chained location, Usually 'To dispatch'"),
'stock_journal_id': fields.many2one(
'stock.journal', 'Stock Journal', required=False,
select=True, ondelete='restrict',
help="Only select picking in this journal"),
'company_id': fields.many2one(
'res.company', 'Company', required=True, readonly=True,
ondelete='restrict'),
'driver_id': fields.many2one(
'tcv.driver.vehicle', 'Driver', ondelete='restrict',
domain="[('type','=','driver')]", required=True,
help="Default driver for auto picking"),
'vehicle_id': fields.many2one(
'tcv.driver.vehicle', 'Vehicle', ondelete='restrict',
domain="[('type','=','vehicle')]", required=True,
help="Default vehicle for auto picking"),
}
_defaults = {
'company_id': lambda self, cr, uid, c: self.pool.get('res.company').
_company_default_get(cr, uid, self._name, context=c),
'date_from': lambda *a: time.strftime('%Y-01-01'),
'driver_id': lambda *a: 1,
'vehicle_id': lambda *a: 2,
}
_sql_constraints = [
('company_uniq', 'UNIQUE(company_id)',
'The config must be unique for company!'),
]
##-------------------------------------------------------------------------
##---------------------------------------------------------- public methods
def get_config(self, cr, uid, company_id=None, context=None):
cfg_id = self.search(cr, uid, [])
if cfg_id and len(cfg_id) == 1:
cfg_id = cfg_id[0]
return self.browse(cr, uid, cfg_id, context)
else:
logger.warn('No stock picking to dispatch settings !')
return False
def generate_to_dispatch_picking(self, cr, uid, context=None):
cfg = self.get_config(cr, uid)
if not cfg:
return False
obj_pck = self.pool.get('stock.picking')
obj_mov = self.pool.get('stock.move')
wf_service = netsvc.LocalService("workflow")
logger.info(
'Looking for stock picking to dispatch. From date: %s' %
cfg.date_from)
picking_ids = obj_pck.search(
cr, uid, [('state', 'in', ('confirmed', 'assigned')),
('date', '>=', cfg.date_from),
('stock_journal_id', '=', cfg.stock_journal_id.id),
('type', '=', 'out'),
('sale_id', '!=', None),
('company_id', '=', cfg.company_id.id)],
order='name')
to_dispatch = 0
for pck in obj_pck.browse(cr, uid, picking_ids, context=context):
if __to_dispatch_str__ in pck.name:
continue # Skip already processed pickings
# Check all invoice's state in open or paid
inv_ok = all([bool(x.state in ('open', 'paid'))
for x in pck.sale_id.invoice_ids])
# Check all stock move's origin != 'to dispatch' location
loc_ok = all([x.location_id.id != cfg.location_dest_id.id
for x in pck.move_lines])
# Check if not tracking assigned to stock move
trk_ok = all([not(x.tracking_id) for x in pck.move_lines])
so_ok = pck.sale_id.order_policy == 'prepaid'
if inv_ok and loc_ok and trk_ok and so_ok:
to_dispatch += 1
new_pck = self._copy_to_dispatch_picking(pck, cfg)
move_lines = []
for sm in pck.move_lines:
line = self._copy_to_dispatch_move_line(sm, cfg)
obj_mov.write(
cr, uid, [sm.id],
{'location_dest_id': cfg.location_dest_id.id},
context=context)
move_lines.append((0, 0, line))
new_pck.update({'move_lines': move_lines})
new_pck_id = obj_pck.create(cr, uid, new_pck, context)
if new_pck_id:
logger.info(
'Set stock picking %s to dispatch.' % pck.name)
# Set default driver and vehicle
obj_pck.write(
cr, uid, [pck.id],
{'driver_id': cfg.driver_id.id,
'vehicle_id': cfg.vehicle_id.id,
}, context=context)
# Original pick workflow
if pck.state == 'confirmed':
obj_pck.action_assign(cr, uid, [pck.id])
wf_service.trg_validate(
uid, 'stock.picking', pck.id, 'button_done', cr)
# New pick workflow
wf_service.trg_validate(
uid, 'stock.picking', new_pck_id, 'button_confirm', cr)
else:
logger.info('Stock picking %s ignored.' % pck.name)
if not to_dispatch:
logger.info('No stock picking to dispatch.')
return True
##-------------------------------------------------------- buttons (object)
##------------------------------------------------------------ on_change...
##----------------------------------------------------- create write unlink
##---------------------------------------------------------------- Workflow
tcv_to_dispatch_config()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"juanvmarquezl@gmail.com"
] | juanvmarquezl@gmail.com |
04a0f76dea48546a65e01b7e2f51cc2ad438972d | a5904bd3d5c5ee65ca62bc608d126123fffd4954 | /examples/simple_app_with_marshalling.py | 9033087d4fa86cd0859dc18d4f09b24d62aec54e | [
"Apache-2.0"
] | permissive | chimpler/pytcher | e9bb0327e2224e17c8c0ed64d82e2cb039ddbaa0 | cf6cbb5c583a79a50c53845fd293b3d7cbb29a85 | refs/heads/master | 2023-05-31T11:54:55.687933 | 2019-06-13T23:40:05 | 2019-06-13T23:40:05 | 173,527,983 | 7 | 0 | Apache-2.0 | 2021-04-29T19:32:46 | 2019-03-03T03:50:52 | Python | UTF-8 | Python | false | false | 1,336 | py | # flake8: noqa: E999
import json
from dataclasses import dataclass
from typing import Dict
from pytcher import Request, Integer, AppRouter, Response
from pytcher.app import App
@dataclass
class InventoryItem(object):
name: str
unit_price: float
quantity: int = 0
class MyAppRouter(AppRouter):
def __init__(self):
words = [
'wine',
'pizza',
'cheese',
'peanuts',
'ice-cream'
]
self._inventory = [
InventoryItem(word, 10 + i, i + 1)
for i in range(10)
for word in words
]
def serialize(self, root_obj, status_code: int, headers: Dict[str, str]):
def to_dict(obj):
if isinstance(obj, list):
return [
to_dict(child)
for child in obj
]
elif isinstance(obj, InventoryItem):
return InventoryItem.schema().dump(obj)
return Response(json.dumps(to_dict(root_obj)))
def route(self, r: Request):
with r.get / 'items':
with r / Integer() as [item_index]:
return self._inventory[item_index]
with r.end:
return self._inventory
app = App(MyAppRouter())
if __name__ == '__main__':
app.start()
| [
"francois.dangngoc@gmail.com"
] | francois.dangngoc@gmail.com |
2490d175700401cb6974363d87fb11c0dcd29030 | 886067dfdfd5e5986566b195a2c7e9f2117e34cb | /enactus/news/admin.py | d7b8cdd794b33c76facfdb9ae63ff114da44e746 | [] | no_license | fxy12345/crispy-winner | 5eecdec28274ce0e27ef8a2e3a57532ad36247e6 | 9fa57ec8d2a399c1c015b4b4d78426c406b30acd | refs/heads/master | 2022-01-17T15:35:06.790259 | 2019-07-21T13:11:45 | 2019-07-21T13:11:45 | 198,055,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | from django.contrib import admin
from .models import Moment
admin.site.register(Moment)
# Register your models here.
| [
"15098801926@163.com"
] | 15098801926@163.com |
ac2706e015f6d3ca82f6f37ee532f9f8539b5fd2 | b0dbd2e4dd83fe012cde29c8474bae5e33c23e2a | /harbor_client/model/inline_object.py | 14d1693d21d96b9fe253d25ef01c946f8fe107fa | [] | no_license | DevotionZhu/harbor-python-client-api | 0ba3999e5af126dbe97f0234c4a9601660a97dbb | f0cc6c453b488d5f456eff94000156182eb3a468 | refs/heads/master | 2023-04-17T22:06:06.024871 | 2021-04-11T22:20:28 | 2021-04-11T22:20:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,462 | py | """
Harbor API
These APIs provide services for manipulating Harbor project. # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from harbor_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class InlineObject(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'secret': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'secret': 'secret', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InlineObject - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
secret (str): The new secret. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| [
"vb@container-registry.com"
] | vb@container-registry.com |
702c800985b4f7a7a6d298437367807dc571d1f8 | 72b3886d9688a0358ed7305f6bf9d3d07b5133c6 | /wp-scan-json.py | 03a3f4aae7f4b7515d66a7ce67f8c1d02e65bef3 | [] | no_license | njdevengine/automation | 5e22f1ffb3d61f2b44d98145ed929ebab231e953 | 736e17d10190f4946b98e8ecbeb8aad3efe30327 | refs/heads/master | 2020-06-22T04:15:00.503455 | 2019-09-11T19:34:56 | 2019-09-11T19:34:56 | 197,629,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | import os
import json
site = "www.yoursite.com"
os.system('wpscan --url'+' '+site+' -o '+site.split('.')[1]+' -f json')
with open(site.split('.')[1], 'r') as myfile:
data=myfile.read()
obj = json.loads(data)
obj
| [
"noreply@github.com"
] | njdevengine.noreply@github.com |
d0841151bec5d9accde42dcbfbf4d28f41f617ca | 6b093f9e0d6e811ed7acdbde0aefd2d42de7edee | /accounts/tests/test_authentication.py | 73aa59cad6329a18d49366fc731324512da485c3 | [] | no_license | blancheta/superlists | 3cf49c0f6f9ef90b95ae9313a31db70c5bf230d8 | 4d93042c62a97f92fc619ca5d63f64f80d23c817 | refs/heads/master | 2021-01-19T05:01:59.563831 | 2016-06-05T17:34:36 | 2016-06-05T17:34:36 | 60,361,426 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,828 | py | from django.contrib.auth import get_user_model
from unittest.mock import patch
from django.test import TestCase
from superlists.settings import DOMAIN
import logging
from accounts.authentication import (
PERSONA_VERIFY_URL,
PersonaAuthenticationBackend
)
User = get_user_model()
@patch('accounts.authentication.requests.post')
class AuthenticateTest(TestCase):
def setUp(self):
self.backend = PersonaAuthenticationBackend()
user = User(email='other@user.com')
user.username = 'otheruser'
user.save()
def test_sends_assertion_to_mozilla_with_domain(self, mock_post):
self.backend.authenticate('an assertion')
mock_post.assert_called_once_with(
PERSONA_VERIFY_URL,
data={'assertion': 'an assertion', 'audience': DOMAIN}
)
def test_returns_none_if_response_errors(self, mock_post):
mock_post.return_value.ok = False
mock_post.return_value.json.return_value = {}
user = self.backend.authenticate('an assertion')
self.assertIsNone(user)
def test_returns_none_if_status_not_okay(self, mock_post):
mock_post.return_value.json.return_value = {'status': 'not okay'}
user = self.backend.authenticate('an assertion')
self.assertIsNone(user)
def test_finds_existing_user_with_email(self, mock_post):
mock_post.return_value.json.return_value = {
'status': 'okay',
'email': 'a@b.com',
}
actual_user = User.objects.create(email='a@b.com')
found_user = self.backend.authenticate('an assertion')
self.assertEqual(found_user, actual_user)
def test_creates_new_user_if_necessary_for_valid_assertion(self, mock_post):
mock_post.return_value.json.return_value = {
'status': 'okay',
'email': 'a@b.com',
}
found_user = self.backend.authenticate('an assertion')
new_user = User.objects.get(email='a@b.com')
self.assertEqual(found_user, new_user)
def test_logs_non_okay_responses_from_persona(self, mock_post):
response_json = {
'status': 'not okay', 'reason': 'eg. audience mismatch'
}
mock_post.return_value.ok = True
mock_post.return_value.json.return_value = response_json
logger = logging.getLogger('accounts.authentication')
with patch.object(logger, 'warning') as mock_log_warning:
self.backend.authenticate('an assertion')
mock_log_warning.assert_called_once_with(
'Persona says no. Json was: {}'.format(response_json)
)
class GetUserTest(TestCase):
def test_gets_user_by_email(self):
backend = PersonaAuthenticationBackend()
other_user = User(email='other@user.com')
other_user.username = 'otheruser'
other_user.save()
desired_user = User.objects.create(email='a@b.com')
found_user = backend.get_user('a@b.com')
self.assertEqual(found_user, desired_user)
def test_returns_none_if_no_user_with_that_email(self):
backend = PersonaAuthenticationBackend()
self.assertIsNone(
backend.get_user('a@b.com')
)
| [
"alexandreblanchet44@gmail.com"
] | alexandreblanchet44@gmail.com |
08f3161f538785533af228861f8897c0588b4bd7 | 05c9f1af21a698e09f7ec37a075624250e907262 | /samples/cloudfiles/upload_folder.py | 98cc1e4e566028fe09d5a865b1edd3a98c390a6c | [
"Apache-2.0"
] | permissive | pycontribs/pyrax | 5f5a1d6816f5a831b1ae4b74ffaf438a1c0269a6 | 2397136b75e6fcc906ee406e9c1bc7aaef94387a | refs/heads/master | 2023-08-28T16:43:21.037208 | 2022-09-21T15:14:38 | 2022-09-21T15:14:38 | 5,975,139 | 10 | 27 | Apache-2.0 | 2021-07-12T21:23:11 | 2012-09-27T01:05:57 | Python | UTF-8 | Python | false | false | 3,013 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c)2012 Rackspace US, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import time
import six
import pyrax
import pyrax.exceptions as exc
import pyrax.utils as utils
pyrax.set_setting("identity_type", "rackspace")
creds_file = os.path.expanduser("~/.rackspace_cloud_credentials")
pyrax.set_credential_file(creds_file)
cf = pyrax.cloudfiles
cont_name = pyrax.utils.random_ascii(8)
cont = cf.create_container(cont_name)
# pyrax has a utility for creating temporary local directories that clean
# themselves up.
with utils.SelfDeletingTempDirectory() as tmpfolder:
# Create a bunch of files
for idx in six.moves.range(13):
fname = "file_%s" % idx
pth = os.path.join(tmpfolder, fname)
with open(pth, "w") as tmp:
tmp.write("This is some text")
# Create a subfolder. It will be deleted automatically as part of
# the cleanup of SelfDeletingTempDirectory.
subfolder_path = os.path.join(tmpfolder, "subfolder")
os.mkdir(subfolder_path)
# Create some files in the subfolder, too.
for idx in six.moves.range(7):
fname = "subfile_%s" % idx
pth = os.path.join(subfolder_path, fname)
with open(pth, "w") as tmp:
tmp.write("This is some text. " * 100)
# OK, we've created our local file system. Now upload it to a container
# named 'upfolder'. We'll have it skip all files ending in the digits
# '2', '6' or '0'.
ignore = ["*2", "*6", "*0"]
print("Beginning Folder Uplaod")
upload_key, total_bytes = cf.upload_folder(tmpfolder, cont, ignore=ignore)
# Since upload_folder happens in the background, we need to stay in this
# block until the upload is complete, or the SelfDeletingTempDirectory
# will be deleted, and the upload won't find the files it needs.
print("Total bytes to upload:", total_bytes)
uploaded = 0
while uploaded < total_bytes:
uploaded = cf.get_uploaded(upload_key)
print("Progress: %4.2f%%" % ((uploaded * 100.0) / total_bytes))
time.sleep(1)
# OK, the upload is complete. Let's verify what's in 'upfolder'.
folder_name = os.path.basename(tmpfolder)
print()
print("Temp folder name:", folder_name)
nms = cf.get_container_object_names(cont, prefix=folder_name)
print("Number of files in container:", len(nms))
print("\n".join(nms)
)
# Clean up
cont.delete(True)
| [
"ed@leafe.com"
] | ed@leafe.com |
6ff0461eae18fe69d2524f13d66c50f01d6e1ef6 | ed8016a5e340f95938ece4b3a8b1b4c9020931a7 | /backend/flask/src/database.py | 7c699c785ffc5bc15c381bf8d92fb3c5d277adee | [] | no_license | raphaelricardo10/Aquarela | df94633abd8686db0a65df53f1587978d01063cb | 22c6c9f8ba9afd4d09ad828c7d7c069152974543 | refs/heads/master | 2023-08-18T22:19:16.111932 | 2021-10-15T13:26:04 | 2021-10-15T13:26:04 | 416,375,323 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,178 | py | import os
from pymongo import MongoClient
class Database:
def __init__(self, dbName=None) -> None:
self.connection = None
#connect to the database
if(dbName):
try:
self.connect(dbName)
except:
raise
def connect(self, dbName: str) -> None:
#Uri of the MongoDB cloud server
uri = "mongodb+srv://cluster0.uargx.mongodb.net/myFirstDatabase?authSource=%24external&authMechanism=MONGODB-X509&retryWrites=true&w=majority"
#Path of the certificate file to connect to the server
certFile = os.path.abspath(__file__ + '/../../../') + '/X509-cert-6036829648392384800.pem'
#Try to connect or raise a error
try:
client = MongoClient(uri,
tls=True,
tlsCertificateKeyFile=certFile)
except:
raise ConnectionError
#Check if the database exists
if dbName not in client.list_database_names():
raise ValueError(f"ERROR: Database {dbName} not found!!!")
#Stores the connection object
self.connection = client[dbName] | [
"raphaelricardo10@gmail.com"
] | raphaelricardo10@gmail.com |
5643cb1f4798bea02668b9f37e0543d81ba346e6 | 8d13eabb7b576f04db5f9771286eb0affa0fbec1 | /google/cloud/texttospeech_v1/types/cloud_tts.py | 0af98da4f8b9c62cc08da1523772cc26c7415e11 | [
"Apache-2.0"
] | permissive | renovate-bot/python-texttospeech | e1204ece5affc8ef544f48c6f0e53dd2ad10a3bc | 4fa1bbd55546d4a03acff8234cd04597ce07deeb | refs/heads/master | 2023-08-10T03:01:43.177205 | 2021-08-30T16:10:22 | 2021-08-30T16:10:22 | 238,286,105 | 0 | 0 | Apache-2.0 | 2020-02-04T19:18:37 | 2020-02-04T19:18:36 | null | UTF-8 | Python | false | false | 10,917 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.texttospeech.v1",
manifest={
"SsmlVoiceGender",
"AudioEncoding",
"ListVoicesRequest",
"ListVoicesResponse",
"Voice",
"SynthesizeSpeechRequest",
"SynthesisInput",
"VoiceSelectionParams",
"AudioConfig",
"SynthesizeSpeechResponse",
},
)
class SsmlVoiceGender(proto.Enum):
r"""Gender of the voice as described in `SSML voice
element <https://www.w3.org/TR/speech-synthesis11/#edef_voice>`__.
"""
SSML_VOICE_GENDER_UNSPECIFIED = 0
MALE = 1
FEMALE = 2
NEUTRAL = 3
class AudioEncoding(proto.Enum):
r"""Configuration to set up audio encoder. The encoding
determines the output audio format that we'd like.
"""
AUDIO_ENCODING_UNSPECIFIED = 0
LINEAR16 = 1
MP3 = 2
OGG_OPUS = 3
class ListVoicesRequest(proto.Message):
r"""The top-level message sent by the client for the ``ListVoices``
method.
Attributes:
language_code (str):
Optional. Recommended.
`BCP-47 <https://www.rfc-editor.org/rfc/bcp/bcp47.txt>`__
language tag. If specified, the ListVoices call will only
return voices that can be used to synthesize this
language_code. E.g. when specifying "en-NZ", you will get
supported "en-\*" voices; when specifying "no", you will get
supported "no-\*" (Norwegian) and "nb-\*" (Norwegian Bokmal)
voices; specifying "zh" will also get supported "cmn-\*"
voices; specifying "zh-hk" will also get supported "yue-\*"
voices.
"""
language_code = proto.Field(proto.STRING, number=1,)
class ListVoicesResponse(proto.Message):
r"""The message returned to the client by the ``ListVoices`` method.
Attributes:
voices (Sequence[google.cloud.texttospeech_v1.types.Voice]):
The list of voices.
"""
voices = proto.RepeatedField(proto.MESSAGE, number=1, message="Voice",)
class Voice(proto.Message):
r"""Description of a voice supported by the TTS service.
Attributes:
language_codes (Sequence[str]):
The languages that this voice supports, expressed as
`BCP-47 <https://www.rfc-editor.org/rfc/bcp/bcp47.txt>`__
language tags (e.g. "en-US", "es-419", "cmn-tw").
name (str):
The name of this voice. Each distinct voice
has a unique name.
ssml_gender (google.cloud.texttospeech_v1.types.SsmlVoiceGender):
The gender of this voice.
natural_sample_rate_hertz (int):
The natural sample rate (in hertz) for this
voice.
"""
language_codes = proto.RepeatedField(proto.STRING, number=1,)
name = proto.Field(proto.STRING, number=2,)
ssml_gender = proto.Field(proto.ENUM, number=3, enum="SsmlVoiceGender",)
natural_sample_rate_hertz = proto.Field(proto.INT32, number=4,)
class SynthesizeSpeechRequest(proto.Message):
r"""The top-level message sent by the client for the
``SynthesizeSpeech`` method.
Attributes:
input (google.cloud.texttospeech_v1.types.SynthesisInput):
Required. The Synthesizer requires either
plain text or SSML as input.
voice (google.cloud.texttospeech_v1.types.VoiceSelectionParams):
Required. The desired voice of the
synthesized audio.
audio_config (google.cloud.texttospeech_v1.types.AudioConfig):
Required. The configuration of the
synthesized audio.
"""
input = proto.Field(proto.MESSAGE, number=1, message="SynthesisInput",)
voice = proto.Field(proto.MESSAGE, number=2, message="VoiceSelectionParams",)
audio_config = proto.Field(proto.MESSAGE, number=3, message="AudioConfig",)
class SynthesisInput(proto.Message):
r"""Contains text input to be synthesized. Either ``text`` or ``ssml``
must be supplied. Supplying both or neither returns
[google.rpc.Code.INVALID_ARGUMENT][]. The input size is limited to
5000 characters.
Attributes:
text (str):
The raw text to be synthesized.
ssml (str):
The SSML document to be synthesized. The SSML document must
be valid and well-formed. Otherwise the RPC will fail and
return [google.rpc.Code.INVALID_ARGUMENT][]. For more
information, see
`SSML <https://cloud.google.com/text-to-speech/docs/ssml>`__.
"""
text = proto.Field(proto.STRING, number=1, oneof="input_source",)
ssml = proto.Field(proto.STRING, number=2, oneof="input_source",)
class VoiceSelectionParams(proto.Message):
r"""Description of which voice to use for a synthesis request.
Attributes:
language_code (str):
Required. The language (and potentially also the region) of
the voice expressed as a
`BCP-47 <https://www.rfc-editor.org/rfc/bcp/bcp47.txt>`__
language tag, e.g. "en-US". This should not include a script
tag (e.g. use "cmn-cn" rather than "cmn-Hant-cn"), because
the script will be inferred from the input provided in the
SynthesisInput. The TTS service will use this parameter to
help choose an appropriate voice. Note that the TTS service
may choose a voice with a slightly different language code
than the one selected; it may substitute a different region
(e.g. using en-US rather than en-CA if there isn't a
Canadian voice available), or even a different language,
e.g. using "nb" (Norwegian Bokmal) instead of "no"
(Norwegian)".
name (str):
The name of the voice. If not set, the service will choose a
voice based on the other parameters such as language_code
and gender.
ssml_gender (google.cloud.texttospeech_v1.types.SsmlVoiceGender):
The preferred gender of the voice. If not set, the service
will choose a voice based on the other parameters such as
language_code and name. Note that this is only a preference,
not requirement; if a voice of the appropriate gender is not
available, the synthesizer should substitute a voice with a
different gender rather than failing the request.
"""
language_code = proto.Field(proto.STRING, number=1,)
name = proto.Field(proto.STRING, number=2,)
ssml_gender = proto.Field(proto.ENUM, number=3, enum="SsmlVoiceGender",)
class AudioConfig(proto.Message):
r"""Description of audio data to be synthesized.
Attributes:
audio_encoding (google.cloud.texttospeech_v1.types.AudioEncoding):
Required. The format of the audio byte
stream.
speaking_rate (float):
Optional. Input only. Speaking rate/speed, in the range
[0.25, 4.0]. 1.0 is the normal native speed supported by the
specific voice. 2.0 is twice as fast, and 0.5 is half as
fast. If unset(0.0), defaults to the native 1.0 speed. Any
other values < 0.25 or > 4.0 will return an error.
pitch (float):
Optional. Input only. Speaking pitch, in the range [-20.0,
20.0]. 20 means increase 20 semitones from the original
pitch. -20 means decrease 20 semitones from the original
pitch.
volume_gain_db (float):
Optional. Input only. Volume gain (in dB) of the normal
native volume supported by the specific voice, in the range
[-96.0, 16.0]. If unset, or set to a value of 0.0 (dB), will
play at normal native signal amplitude. A value of -6.0 (dB)
will play at approximately half the amplitude of the normal
native signal amplitude. A value of +6.0 (dB) will play at
approximately twice the amplitude of the normal native
signal amplitude. Strongly recommend not to exceed +10 (dB)
as there's usually no effective increase in loudness for any
value greater than that.
sample_rate_hertz (int):
Optional. The synthesis sample rate (in hertz) for this
audio. When this is specified in SynthesizeSpeechRequest, if
this is different from the voice's natural sample rate, then
the synthesizer will honor this request by converting to the
desired sample rate (which might result in worse audio
quality), unless the specified sample rate is not supported
for the encoding chosen, in which case it will fail the
request and return [google.rpc.Code.INVALID_ARGUMENT][].
effects_profile_id (Sequence[str]):
Optional. Input only. An identifier which selects 'audio
effects' profiles that are applied on (post synthesized)
text to speech. Effects are applied on top of each other in
the order they are given. See `audio
profiles <https://cloud.google.com/text-to-speech/docs/audio-profiles>`__
for current supported profile ids.
"""
audio_encoding = proto.Field(proto.ENUM, number=1, enum="AudioEncoding",)
speaking_rate = proto.Field(proto.DOUBLE, number=2,)
pitch = proto.Field(proto.DOUBLE, number=3,)
volume_gain_db = proto.Field(proto.DOUBLE, number=4,)
sample_rate_hertz = proto.Field(proto.INT32, number=5,)
effects_profile_id = proto.RepeatedField(proto.STRING, number=6,)
class SynthesizeSpeechResponse(proto.Message):
r"""The message returned to the client by the ``SynthesizeSpeech``
method.
Attributes:
audio_content (bytes):
The audio data bytes encoded as specified in the request,
including the header for encodings that are wrapped in
containers (e.g. MP3, OGG_OPUS). For LINEAR16 audio, we
include the WAV header. Note: as with all bytes fields,
protobuffers use a pure binary representation, whereas JSON
representations use base64.
"""
audio_content = proto.Field(proto.BYTES, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"noreply@github.com"
] | renovate-bot.noreply@github.com |
e416d2c524976906871128237eb5a4345a95ae18 | cf64fc19dfc6f164779110ef26dad4a40756ba20 | /api_test_doc.py | 52f96fe3d1a292309447128dfd215b698a91294d | [] | no_license | davidsetiyadi/python_gleematic | b0f121edcd43106b9ac4c2a80cfc824f2906810c | d7460add1fd25b666ca890b4d27252e70782a0ff | refs/heads/main | 2023-07-18T17:46:11.757426 | 2021-09-20T09:48:16 | 2021-09-20T09:48:16 | 405,012,546 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,406 | py | import json
import requests
import sys
#import urllib2
import urllib.request as urllib2
# Define the remote file to retrieve
AUTH_URL = 'http://localhost:8069/api/get/crm_data/'
headers = {'Content-type': 'application/json'}
# Remember to configure default db on odoo configuration file(dbfilter = ^db_name$)
# Authentication credentials
data = {
'params': {
'username': 'sgeede',
'token': 'j43kFzt52SKEQ5cXDtPPpRhQXnwm24',
'db': '14_AW_'
}
}
params = {'username':'sgeede','token':'j43kFzt52SKEQ5cXDtPPpRhQXnwm24','db':'14_AW'}
res = requests.get(
AUTH_URL,
params=params
)
#filedata = urllib2.urlopen('http://localhost:8069/web/content/ir.attachment/366/datas/sesudah.png')
#datatowrite = filedata.read()
#with open('Documents/AWODESTEST.jpeg', 'wb') as f:
# f.write(datatowrite)
url = 'http://localhost:8069/web/content/ir.attachment/574/datas/AWODESTEST.jpeg'
r = requests.get(url, allow_redirects=True)
open('AWODESTEST.jpeg', 'wb').write(r.content)
print ('test',res)
#print (res.json()['list_data'])
#print (res.json()['list_data'][0]['passport_name'][0])
#import urllib2
def download_web_image(url):
request = urllib2.Request(url)
img = urllib2.urlopen(request).read()
with open ('test.jpg', 'wb') as f:
f.write(img)
download_web_image("http://192.168.0.179:8069/web/content/ir.attachment/574/datas/AWODESTEST.jpeg")
| [
"davidsetiadi11@gmail.com"
] | davidsetiadi11@gmail.com |
1369d20fa638de009a1b414de7acb195b10c6186 | 5039e4e30dbb477b6f1c7bf78cf7e721d4a31030 | /gulishop/apps/trade/migrations/0009_auto_20190124_2123.py | 0eb0843ecc5d80126dd7e17a94d0eadb873f975c | [] | no_license | LiangPier-haha/django | f06ae1abb7e0bb9828ca8148da5d9151a83caf58 | efc912155fcedf05beb640eb382876d80eab1aa0 | refs/heads/master | 2020-04-23T00:32:43.197429 | 2019-03-24T03:42:50 | 2019-03-24T03:42:50 | 170,783,292 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2019-01-24 21:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trade', '0008_auto_20190122_1508'),
]
operations = [
migrations.AlterField(
model_name='orderinfo',
name='pay_status',
field=models.CharField(choices=[('PAYING', '待支付'), ('TRADE_SUCCESS', '支付成功'), ('TRADE_CLOSE', '支付关闭'), ('TRADE_FAIL', '支付失败'), ('TRADE_FINSHED', '交易结束')], default='PAYING', max_length=20, verbose_name='订单状态'),
),
]
| [
"17853246116@163.com"
] | 17853246116@163.com |
01215d337f6b19b43e1e1e7f3100e27245a91a77 | 1c45f82d87aceab5c994c3ab00ef3f9ca7cdad07 | /picture1.py | 084551d7eb801448aaeabbde283b1e1394592785 | [] | no_license | kuninagakura/pyStdDraw | 212517eb4d663a9ef3e584682f260149a4e4998b | 8b28ab551ade9152f14c7140d7c42114391ff48c | refs/heads/master | 2021-01-01T19:52:13.285088 | 2014-07-23T15:33:22 | 2014-07-23T15:33:22 | 22,156,382 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 6,238 | py |
"""
picture.py
The picture module defines the Picture class and the wait() function.
"""
#-----------------------------------------------------------------------
import sys
import pygame
import color
import tkintercommon
import stdarray
#-----------------------------------------------------------------------
_DEFAULT_SIZE = 512
#-----------------------------------------------------------------------
def wait():
"""
Wait for the user to close this application's primary window.
"""
tkintercommon.wait()
#-----------------------------------------------------------------------
class Picture:
"""
A Picture object models an image. It is initialized such that
it has a given width and height and contains all black pixels.
Subsequently you can load an image from a given GIF or JPG file.
"""
#Fields:
# _photoImage:
# The image to be displayed.
# _buffer:
# An array of colors. For efficiency, get() and put() use
# _buffer instead of _photoImage. show() copies _buffer to
# _photoImage when the two differ.
# _isDirty:
# Set to True iff _buffer differs from _photoImage.
# _toplevel:
# The Tkinter Toplevel object in which this Picture
# will be displayed.
# _label:
# The Tkinter Label object in which this Picture will
# be displayed.
_count = 0
def __init__(self, maxW=_DEFAULT_SIZE, maxH=_DEFAULT_SIZE):
"""
Construct 'self' such that it is all black with width 'maxW'
and height 'maxH'.
"""
if _count == 0:
_background = pygame.display.set_mode([w, h + 50])
self._photoImage = pygame.Surface((w, h))
self._photoImage.fill(pygameColor(BLACK))
else:
self._photoImage = pygame.Surface((w, h))
count += 1
self._photoImage = None
self._buffer = None
self._isDirty = None
self._toplevel = None
self._label = None
self._photoImage = \
Tkinter.PhotoImage(width=maxW, height=maxH)
self._buffer = \
stdarray.create2D(maxW, maxH, str(color.BLACK))
self._isDirty = True
def load(self, f):
"""
Load an image from the file whose name is 'f'. 'f' can be a
JPG file or a PNG file.
"""
self._photoImage = pygame.image.load(f)
_background.blit(self._photoImage, (0,0))
else:
raise Exception('Unrecognized file type')
#-------------------------------------------------------------------
def width(self):
"""
Return the width of 'self'.
"""
return len(self._buffer)
#-------------------------------------------------------------------
def height(self):
"""
Return the height of 'self'.
"""
return len(self._buffer[0])
#-------------------------------------------------------------------
def get(self, w, h):
"""
Return the color of 'self' at location ['w', 'h']
"""
colorStr = self._buffer[w][h]
r = int(colorStr[1:3], 16) # Red component
g = int(colorStr[3:5], 16) # Green component
b = int(colorStr[5:], 16) # Blue component
return color.Color(r, g, b)
#-------------------------------------------------------------------
def set(self, w, h, color):
"""
Set the color of 'self' at location ['w', 'h'] to 'color'.
"""
self._buffer[w][h] = str(color)
self._isDirty = True
#-------------------------------------------------------------------
def show(self):
"""
Render 'self' to its window.
"""
try:
# If the buffer has been changed since the last call of
# show(), then copy the buffer to the image.
if self._isDirty:
# Create a string that expresses the colors in the odd
# format that image.put() requires.
maxH = self.height()
maxW = self.width()
colorStr = ''
for h in range(maxH):
colorStr += '{'
for w in range(maxW):
colorStr += self._buffer[w][h] + ' '
colorStr += '} '
# Copy the string to the image.
self._photoImage.put(colorStr, (0, 0))
self._isDirty = False
# Each Picture is rendered via a distinct Toplevel object.
# The first Picture that is rendered uses the root Toplevel
# object, if it's available. Each subsequently rendered
# Picture uses a secondary TopLevel object.
if self._toplevel == None:
if not tkintercommon.rootUsed:
self._toplevel = tkintercommon.root
tkintercommon.rootUsed = True
else:
self._toplevel = Tkinter.Toplevel()
self._toplevel.title('Picture Window')
self._label = Tkinter.Label(self._toplevel)
self._label.pack()
# Define a listener for the 'Save' menu item.
def saveListener():
FILE_TYPES = [('PPM File','*.ppm'),
('Any File','*')]
f = tkFileDialog.asksaveasfilename(
defaultextension='.ppm',
filetypes=FILE_TYPES)
self._photoImage.write(f, 'ppm')
# Create a menu containing a 'Save' menu item.
self._saveMenu = Tkinter.Menu()
self._saveMenu.add_command(label='Save...',
command=saveListener)
self._saveMenu['tearoff'] = 0
self._menubar = Tkinter.Menu()
self._menubar.add_cascade(label='File',
menu=self._saveMenu)
self._toplevel['menu'] = self._menubar
self._label.config(image=self._photoImage)
self._toplevel.update()
except Tkinter.TclError:
sys.exit(0)
| [
"kuninagakura@gmail.com"
] | kuninagakura@gmail.com |
d171299b7d978fef9fc02f0469bcf91564448ef9 | af541e5ee0184c23d2f9f97c6a4fd709518eca9b | /libs/mmcv/mmcv/runner/hooks/lr_updater.py | eb099d95249bf49c9e9d25653656830ade4db6d2 | [
"Apache-2.0"
] | permissive | amitrai12018/ProposeReduce | 3f436feadc1fe10d73a46df8be375d20f80c2c83 | 27e85695d568a790f822f2440461b4c8338a21a3 | refs/heads/main | 2023-08-11T02:48:31.993073 | 2021-10-09T18:40:05 | 2021-10-09T18:40:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,273 | py | from __future__ import division
from math import cos, pi
from .hook import Hook
class LrUpdaterHook(Hook):
def __init__(self,
by_epoch=True,
warmup=None,
warmup_iters=0,
warmup_ratio=0.1,
**kwargs):
# validate the "warmup" argument
if warmup is not None:
if warmup not in ['constant', 'linear', 'exp']:
raise ValueError(
'"{}" is not a supported type for warming up, valid types'
' are "constant" and "linear"'.format(warmup))
if warmup is not None:
assert warmup_iters > 0, \
'"warmup_iters" must be a positive integer'
assert 0 < warmup_ratio <= 1.0, \
'"warmup_ratio" must be in range (0,1]'
self.by_epoch = by_epoch
self.warmup = warmup
self.warmup_iters = warmup_iters
self.warmup_ratio = warmup_ratio
self.base_lr = [] # initial lr for all param groups
self.regular_lr = [] # expected lr if no warming up is performed
def _set_lr(self, runner, lr_groups):
for param_group, lr in zip(runner.optimizer.param_groups, lr_groups):
param_group['lr'] = lr | [
"huaijialin@gmail.com"
] | huaijialin@gmail.com |
29d00d8de1abd0781d1257031bac5ecf95bb4647 | 66743c165ddc59914d665df915565309798b6ab2 | /content/ru/post/org-roam-links | d11a876de059052cc56590c57a63948293fcf4d9 | [
"CC-BY-4.0"
] | permissive | yamadharma/blog | 178731665643d964d42526c6d1eee0b313718986 | 878c448bb6633e7c47993f3fb38bb4030ca52b43 | refs/heads/master | 2023-08-16T21:50:52.895976 | 2021-04-10T12:46:15 | 2021-04-10T12:46:15 | 250,521,920 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,474 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import re
import os
orgroamdir = "~/me/org/notes/"
def find_md_links(md):
"""Returns dict of links in markdown:
'regular': [foo](some.url)
'footnotes': [foo][3]
[3]: some.url
"""
# https://stackoverflow.com/a/30738268/2755116
INLINE_LINK_RE = re.compile(r'\[([^\]]+)\]\(([^)]+)\)')
FOOTNOTE_LINK_TEXT_RE = re.compile(r'\[([^\]]+)\]\[(\d+)\]')
FOOTNOTE_LINK_URL_RE = re.compile(r'\[(\d+)\]:\s+(\S+)')
links = list(INLINE_LINK_RE.findall(md))
footnote_links = dict(FOOTNOTE_LINK_TEXT_RE.findall(md))
footnote_urls = dict(FOOTNOTE_LINK_URL_RE.findall(md))
footnotes_linking = []
for key in footnote_links.keys():
footnotes_linking.append((footnote_links[key], footnote_urls[footnote_links[key]]))
return {'regular': links, 'footnotes': footnotes_linking}
def replace_md_links(md, f):
"""Replace links url to f(url)"""
links = find_md_links(md)
newmd = md
for r in links['regular']:
newmd = newmd.replace(r[1], f(r[1]))
for r in links['footnotes']:
newmd = newmd.replace(r[1], f(r[1]))
return newmd
if __name__ == "__main__":
filename = sys.argv[1]
print(filename)
fin = open(filename, "rt")
filetext = fin.read()
export_file_name_re = re.compile(r'#\+EXPORT_FILE_NAME:\s+\S+')
relref_re = re.compile(r"{{< relref \"(.+)\" >}}")
links = find_md_links(filetext)
for mdlink in links['regular']:
fulllink = mdlink[1]
isrelref = relref_re.match(fulllink)
if isrelref:
linkpath = isrelref.group(1)
else:
# drop extention
linkpath = os.path.splitext(fulllink)[0]
# add extention
md2org = linkpath + '.org'
orgfile = os.path.expanduser(orgroamdir + md2org)
if os.path.exists(orgfile):
orgfiletext = open(orgfile).read()
export_file_name_text = export_file_name_re.findall(orgfiletext)
export_file_name_link = export_file_name_text[0].split()
hugolink = "{{< relref \"" + export_file_name_link[1] + "\" >}}"
if isrelref:
filetext = filetext.replace("{{< relref \"" + linkpath + "\" >}}",hugolink)
else:
filetext = filetext.replace(linkpath + '.md',hugolink)
fin.close()
fin = open(filename, "wt")
fin.write(filetext)
fin.close()
| [
"yamadharma@gmail.com"
] | yamadharma@gmail.com | |
fbc007b5fe980e3a1bf92e93476df93d37f18f66 | 4370df21b4382c96f46d7f08e69bfba415b704e6 | /quicksort.py | 0d820790cf22c8902cea50fcc81a818b36031017 | [] | no_license | developerdamion/YoutubeDeveloperDamion-video-05 | d2c0545b1b53ca8faa2a58b21f4a408f23ad49bb | bd05ffbbf59470a78387975c1486022ad010815e | refs/heads/master | 2022-04-20T20:56:46.644866 | 2020-04-15T12:26:49 | 2020-04-15T12:26:49 | 255,908,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | """
Hey! Thank you for supporting my youtube channel
( https://www.youtube.com/channel/UCKrs-v_MlKjzVbe4jyLSrPQ ).
This is video-05: quickSort Algorithm
( https://youtu.be/7QvC3iC0Smg )
DeveloperDamion
"""
def quickSort(unsortedArray = []):
if len(unsortedArray) <= 1: return unsortedArray
largerArray = []
smallerArray = []
equalArray = []
pivotPoint = unsortedArray[ int(len(unsortedArray)/2) ]
for index in unsortedArray:
if index > pivotPoint:
largerArray.append(index)
elif index < pivotPoint:
smallerArray.append(index)
else:
equalArray.append(index)
return quickSort(smallerArray) + equalArray + quickSort(largerArray)
if __name__ == "__main__":
unsortedArray = [4,10,8,7,5]
print( quickSort(unsortedArray) ) | [
"developerdamion@gmail.com"
] | developerdamion@gmail.com |
e60dd29b9b6aedd26d515eea6b46d052e940a7fd | 45864f0ba289eec8eaf0d6d7a98b97b0be77bc31 | /functions/structures.py | 21056fe78bfe5fe6c84ac342313d3a2cb3de7b87 | [
"MIT"
] | permissive | emptyewer/DEEPN | 45822896c227ec3ac5f76f4072fe8921d0e175c6 | 0c54c714af3e08ce3626e9fd5d795d7d5f28aeca | refs/heads/master | 2021-08-27T18:02:48.040169 | 2018-02-22T22:04:08 | 2018-02-22T22:04:08 | 50,394,130 | 5 | 4 | null | 2017-04-27T19:30:48 | 2016-01-26T01:33:57 | OpenEdge ABL | UTF-8 | Python | false | false | 627 | py | class jcnt():
def __init__(self):
self.position = 0
self.query_start = 0
self.frame = ''
self.ppm = 0.0
self.orf = ''
self.frame_orf = False
self.count = 1
def __repr__(self):
string = "<Junction pos:%d, " \
"q.start:%d, ppm:%.3f, " \
"frame:%s, orf:%s, " \
"count:%d>" % (self.position, self.query_start, self.ppm, self.frame, self.orf, self.count)
return string
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.__repr__())
| [
"venky.krishna@icloud.com"
] | venky.krishna@icloud.com |
134676ae2125b1c4492a50440457da257072a394 | 1886a661f15d307171871c829373ea6ff27f29b4 | /TCPUDP_Programs/TCPClient.py | 7c25e898cea830ed96200427cdd2631249123cd8 | [] | no_license | Wyyyyylla/COMP9331 | e4fc2ccfb9eaa1a3d98e0c94b85d0d81c132e3f9 | dbcca8e54a47b7b9cbf320b69e632e4a9c0200a4 | refs/heads/master | 2020-08-21T14:29:53.791001 | 2019-08-27T10:26:47 | 2019-08-27T10:26:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 460 | py |
from socket import *
import sys
total = len(sys.argv)
cmdargs = str(sys.argv)
serverName = str(sys.argv[1]);
serverPort = 12000 #change this port number if required
clientSocket = socket(AF_INET, SOCK_STREAM)
clientSocket.connect((serverName, serverPort))
sentence = raw_input('Input lowercase sentence:')
clientSocket.send(sentence)
modifiedSentence = clientSocket.recv(1024)
print 'From Server:', modifiedSentence
clientSocket.close()
| [
"procj0926@gmail.com"
] | procj0926@gmail.com |
75a180e8ee497cf230ca9c28a1d129750546b52e | 6662ab1ea0e55131092bc56d5d875673de47c5d3 | /rcc/extended/variables/variables.py | ca0fb586fa0596f3ab37282994ad153d9eef511c | [] | no_license | FormulatedAutomation/StupidRobotTricks | 6fae7397476b2e7db2fda46e43431c171f914452 | e9241c52957387276363f8e05915e23358280b6f | refs/heads/main | 2023-02-28T19:40:47.980540 | 2021-02-11T19:20:14 | 2021-02-11T19:20:14 | 313,713,010 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | '''
Variables for Robot Framework goes here.
'''
import calendar
from datetime import date
WEEK_DAY_NAME = calendar.day_name[date.today().weekday()]
| [
"m@mdp.im"
] | m@mdp.im |
f00b1ff8e1c1c9993dfc105355125a6035668d80 | 2f6c8ac012336f5ef4a3d111ebc5faae8c198488 | /ATL-RPi-FERNANDO/pi/run-fan.py | a17bb9c20e1c7290ef3eebadf0eb0c796910127a | [] | no_license | olivoalex/PCB-s-Board-Program | 68d65890acc371da754fc9038f6570c1306f1d73 | 8d1fa26d4894daa388fe3460b2cacbbc1804bdc5 | refs/heads/master | 2021-01-11T10:26:48.086957 | 2018-10-31T15:23:17 | 2018-10-31T15:23:17 | 78,902,728 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,369 | py | #https://hackernoon.com/how-to-control-a-fan-to-cool-the-cpu-of-your-raspberrypi-3313b6e7f92c#.j6oruzhz7
#python run-fan.py
#!/usr/bin/env python3
# Author: Edoardo Paolo Scalafiotti <edoardo849@gmail.com>
import os
from time import sleep
import signal
import sys
import RPi.GPIO as GPIO
pin = 18 # The pin ID, edit here to change it
maxTMP = 40
# The maximum temperature in Celsius after which we trigger the fan
def setup():
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin, GPIO.OUT)
GPIO.setwarnings(False)
return()
def getCPUtemperature():
res = os.popen('vcgencmd measure_temp').readline()
temp =(res.replace("temp=","").replace("'C\n",""))
#print("temp is {0}".format(temp)) #Uncomment here for testing
return temp
def fanON():
setPin(True)
return()
def fanOFF():
setPin(False)
return()
def getTEMP():
CPU_temp = float(getCPUtemperature())
if CPU_temp > maxTMP:
fanON()
else:
fanOFF()
return()
def setPin(mode):
# A little redundant function but useful if you want to add logging
GPIO.output(pin, mode)
return()
try:
setup()
while True:
getTEMP()
sleep(5)
# Read the temperature every 5 sec, increase or decrease this limit if you want
except KeyboardInterrupt:
# trap a CTRL+C keyboard interrupt
GPIO.cleanup()
# resets all GPIO ports used by this program
| [
"ceviana.s3@gmail.com"
] | ceviana.s3@gmail.com |
bbb93bc3078b9af8f541ab2bce42c8acaf0fc5ff | b0074c01fc402dbdf583c5aaefb8044a9da9af3e | /testSum.py | ca0e0cd63a2e345e408a21941adbb6461c38425d | [
"MIT"
] | permissive | vikrant-pune/flask-celery-example | 84b024209ac8572c5f98a82c68d56471df5dd9d0 | ae833971b14ee927b1700f25a4f3073234fa8d02 | refs/heads/master | 2023-03-03T11:34:51.562827 | 2021-02-18T13:13:47 | 2021-02-18T13:13:47 | 266,789,997 | 0 | 0 | MIT | 2021-02-18T13:13:48 | 2020-05-25T13:44:33 | Python | UTF-8 | Python | false | false | 1,809 | py | """
https://leetcode.com/problems/two-sum/
Given an array of integers nums and an integer target, return indices of the two numbers such that they add up to target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
You can return the answer in any order.
Example 1:
Input: nums = [2,7,11,15], target = 9
Output: [0,1]
Output: Because nums[0] + nums[1] == 9, we return [0, 1].
Example 2:
Input: nums = [3,2,4], target = 6
Output: [1,2]
Example 3:
Input: nums = [3,3], target = 6
Output: [0,1]
Constraints:
2 <= nums.length <= 103
-109 <= nums[i] <= 109
-109 <= target <= 109
Only one valid answer exists.
"""
from typing import List
import pytest
class Solution:
def twoSum(self, nums: List[int], target: int) -> List[int]:
for n in range(0, len(nums)):
for j in range(n+1, len(nums)):
# index_in = index_out + 1
if nums[n] + nums[j] == target:
return [n, j]
return []
def inc(x):
return x + 1
def test_answer():
assert inc(4) == 5
def test_twoSum():
s = Solution()
assert s.twoSum(nums=[2,7,11,15], target= 9) == [0,1]
def test_twoSum2():
s = Solution()
with pytest.raises(TypeError):
s.twoSum(nums=2, target= 9)
def test_twoSum3():
s = Solution()
assert s.twoSum(nums=[3, 2, 4], target= 6) == [1, 2]
#[3,3], target = 6
# Output: [0,1]
def test_twoSum4():
s = Solution()
assert s.twoSum(nums=[3, 3], target= 6) == [0, 1]
def test_twoSum5():
s = Solution()
assert s.twoSum(nums=[3, 3], target= 7) == []
def test_twoSum6():
s = Solution()
assert s.twoSum(nums=[3, 2, 3], target= 6) == [0,2]
#
# if __name__ == '__main__':
# Solution.twoSum([2,7,11,15], target = 9)
| [
"vikrant.pawar@exabeam.com"
] | vikrant.pawar@exabeam.com |
41c5dc41f368fcef70d32e6d6b1027c54846acdc | 6fa57c7d20a6f8052361d08328ff5fca8e0b739c | /Listbox.py | bd6dbc338a9c9cd95f559ebe3c72d5f0c3b632bd | [] | no_license | felipeCaetano/TkinterGalery | 0d787f86a302649244962135cd98cdc84769194e | fff6751e1b33c744550c0dc40fcf13b22471f441 | refs/heads/master | 2020-09-07T17:08:59.967380 | 2019-11-11T02:28:54 | 2019-11-11T02:28:54 | 220,855,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | """
Cria uma listbox basica
"""
from tkinter import *
master = Tk()
listbox = Listbox(master)
listbox.pack()
listbox.insert(END, "a list entry")
for item in ["one", "two", "three", "four"]:
listbox.insert(END, item)
mainloop()
| [
"noreply@github.com"
] | felipeCaetano.noreply@github.com |
f11454ac0b85a80f57b7bc3166ab914104bf1e72 | 0b01cb61a4ae4ae236a354cbfa23064e9057e434 | /alipay/aop/api/request/KoubeiAdvertCommissionChannelCreateRequest.py | 6a92f87eec8209cf0547da7c551c201278a42cb2 | [
"Apache-2.0"
] | permissive | hipacloud/alipay-sdk-python-all | e4aec2869bf1ea6f7c6fb97ac7cc724be44ecd13 | bdbffbc6d5c7a0a3dd9db69c99443f98aecf907d | refs/heads/master | 2022-11-14T11:12:24.441822 | 2020-07-14T03:12:15 | 2020-07-14T03:12:15 | 277,970,730 | 0 | 0 | Apache-2.0 | 2020-07-08T02:33:15 | 2020-07-08T02:33:14 | null | UTF-8 | Python | false | false | 4,009 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.KoubeiAdvertCommissionChannelCreateModel import KoubeiAdvertCommissionChannelCreateModel
class KoubeiAdvertCommissionChannelCreateRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, KoubeiAdvertCommissionChannelCreateModel):
self._biz_content = value
else:
self._biz_content = KoubeiAdvertCommissionChannelCreateModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'koubei.advert.commission.channel.create'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| [
"liuqun.lq@alibaba-inc.com"
] | liuqun.lq@alibaba-inc.com |
87027d0329817413bda85ae087fd723f73f29058 | efc3bf4f88a2bfc885de5495c87433d345b54429 | /ZOJ/2482.py | fe3d3b749871e258b09e8c367e84e6dc209647ab | [] | no_license | calvinxiao/Algorithm-Solution | 26ff42cc26aaca87a4706b82a325a92829878552 | afe254a4efa779598be8a82c5c5bcfcc94f80272 | refs/heads/master | 2016-09-05T21:08:35.852486 | 2015-08-23T15:13:23 | 2015-08-23T15:13:23 | 20,149,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | #Problem ID: 2482
#Submit Time: 2012-08-13 16:32:45
#Run Time: 10
#Run Memory: 420
#ZOJ User: calvinxiao
import sys
def getline():
return sys.stdin.readline()
def getint():
return int(sys.stdin.readline())
def getlist():
return sys.stdin.readline().strip().split()
cases = getint()
for case in xrange(cases):
s = getline().strip()
ans = []
for i in xrange(4):
num = 256
tempAns = 0
for j in xrange(i * 8, i * 8 + 8):
num /= 2
if s[j] == '1':
tempAns += num
ans.append(str(tempAns))
print '.'.join(ans)
| [
"calvin.xiao@scaurugby.com"
] | calvin.xiao@scaurugby.com |
519d8d428a05e407267b3acf7b29ef3992a0bb32 | f0e25779a563c2d570cbc22687c614565501130a | /LeetCode/Offer/顺时针打印矩阵.py | 5e573a82f1dfeb329eb5c57ca987dd6faf17221b | [] | no_license | XyK0907/for_work | 8dcae9026f6f25708c14531a83a6593c77b38296 | 85f71621c54f6b0029f3a2746f022f89dd7419d9 | refs/heads/master | 2023-04-25T04:18:44.615982 | 2021-05-15T12:10:26 | 2021-05-15T12:10:26 | 293,845,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,333 | py | """
输入一个矩阵,按照从外向里以顺时针的顺序依次打印出每一个数字。
示例 1:
输入:matrix = [[1,2,3],[4,5,6],[7,8,9]]
输出:[1,2,3,6,9,8,7,4,5]
示例 2:
输入:matrix = [[1,2,3,4],[5,6,7,8],[9,10,11,12]]
输出:[1,2,3,4,8,12,11,10,9,5,6,7]
限制:
0 <= matrix.length <= 100
0 <= matrix[i].length <= 100
注意:本题与主站 54 题相同:https://leetcode-cn.com/problems/spiral-matrix/
"""
class Solution(object):
def spiralOrder(self, matrix):
"""
方法一:模拟
可以模拟螺旋矩阵的路径。初始位置是矩阵的左上角,初始方向是向右,当路径超出界限或者进入之前访问过的位置时,则顺时针旋转,进入下一个方向。
判断路径是否进入之前访问过的位置需要使用一个与输入矩阵大小相同的辅助矩阵 visited\textit{visited}visited,其中的每个元素表示该位置
是否被访问过。当一个元素被访问时,将 visited\textit{visited}visited 中的对应位置的元素设为已访问。
如何判断路径是否结束?由于矩阵中的每个元素都被访问一次,因此路径的长度即为矩阵中的元素数量,当路径的长度达到矩阵中的元素数量时即为完整路
径,将该路径返回。
time O(mn)
space O(mn)
:type matrix: List[List[int]]
:rtype: List[int]
"""
if matrix == [] or matrix[0] == []:
return []
rows = len(matrix)
colomns = len(matrix[0])
total = rows * colomns
visitied = [[False] * colomns for _ in range(rows)]
ans = [0] * total
directions = [[0, 1], [1, 0], [0, -1], [-1, 0]]
direc_idx = 0
row, colomn = 0, 0
for i in range(total):
ans[i] = matrix[row][colomn]
visitied[row][colomn] = True
nxt_row = row + directions[direc_idx][0]
nxt_colomn = colomn + directions[direc_idx][1]
if not(0 <= nxt_row < rows and 0 <= nxt_colomn < colomns and visitied[nxt_row][nxt_colomn] == False):
direc_idx = (direc_idx + 1) % 4
row += directions[direc_idx][0]
colomn += directions[direc_idx][1]
return ans
def spiralOrder_1(self, matrix):
"""
可以将矩阵看成若干层,首先输出最外层的元素,其次输出次外层的元素,直到输出最内层的元素。
定义矩阵的第 kkk 层是到最近边界距离为 kkk 的所有顶点。例如,下图矩阵最外层元素都是第 1 层,次外层元素都是第 2 层,剩下的元素都是第 3 层。
[[1, 1, 1, 1, 1, 1, 1],
[1, 2, 2, 2, 2, 2, 1],
[1, 2, 3, 3, 3, 2, 1],
[1, 2, 2, 2, 2, 2, 1],
[1, 1, 1, 1, 1, 1, 1]]
对于每层,从左上方开始以顺时针的顺序遍历所有元素。假设当前层的左上角位于 (top,left)(\textit{top}, \textit{left})(top,left),
右下角位于 (bottom,right)(\textit{bottom}, \textit{right})(bottom,right),按照如下顺序遍历当前层的元素。
从左到右遍历上侧元素,依次为 (top,left)(\textit{top}, \textit{left})(top,left) 到 (top,right)(\textit{top},
\textit{right})(top,right)。
从上到下遍历右侧元素,依次为 (top+1,right)(\textit{top} + 1, \textit{right})(top+1,right) 到 (bottom,right)(\textit{bottom},
\textit{right})(bottom,right)。
如果 left<right\textit{left} < \textit{right}left<right 且 top<bottom\textit{top} < \textit{bottom}top<bottom,
则从右到左遍历下侧元素,依次为 (bottom,right−1)(\textit{bottom}, \textit{right} - 1)(bottom,right−1) 到
(bottom,left+1)(\textit{bottom}, \textit{left} + 1)(bottom,left+1),以及从下到上遍历左侧元素,依次为
(bottom,left)(\textit{bottom}, \textit{left})(bottom,left) 到 (top+1,left)(\textit{top} + 1, \textit{left})(top+1,left)。
遍历完当前层的元素之后,将 left\textit{left}left 和 top\textit{top}top 分别增加 111,将 right\textit{right}right 和
bottom\textit{bottom}bottom 分别减少 111,进入下一层继续遍历,直到遍历完所有元素为止。
time O(mn)
space O(10
:param matrix:
:return:
"""
if matrix == [] or matrix[0] == []:
return []
rows = len(matrix)
colomns = len(matrix[0])
ans = []
left, top = 0, 0
right, bottom = colomns - 1, rows - 1
while left <= right and top <= bottom:
for colomn in range(left, right + 1):
ans.append(matrix[top][colomn])
for row in range(top + 1, bottom + 1):
ans.append(matrix[row][right])
if left < right and top < bottom:
for colomn in range(right - 1, left, -1):
ans.append(matrix[bottom][colomn])
for row in range(bottom, top, -1):
ans.append(matrix[row][left])
left, right, top, bottom = left + 1, right - 1, top + 1, bottom -1
return ans
if __name__ == '__main__':
solution = Solution()
print(solution.spiralOrder_1([[6,9,7]]))
| [
"cherry.kong0907@gmail.com"
] | cherry.kong0907@gmail.com |
f57bde487847c8891368fad6508d955aee34c88e | ef7e7f9a3adaf98642664b9dfd31d41070a2ce34 | /kbengine/assets/scripts/common/const_ll7.py | 78715fcd4d125e2dff6784cc888e99c5e870799b | [] | no_license | leeminHong1990/LLD7 | f5f864761f641e5d2a797d828b02c447dcf206d5 | 954ff3aee8d33bd00a5904ace28f61abc0e9a97f | refs/heads/master | 2020-05-09T21:58:02.347734 | 2019-12-16T09:11:07 | 2019-12-16T09:11:07 | 181,451,493 | 1 | 6 | null | null | null | null | UTF-8 | Python | false | false | 3,286 | py | DEBUG_JSON_NAME = "0001ll7"
# aid
AID_NONE = 0
LORD_FIRST = 1<<3
LORD_SECOND = 1<<4
LORD_THIRD = 1<<5
DRAW_COVER = 1<<6
COVER_POKER = 1<<7
DISCARD = 1<<8
SHOW_COVER = 1<<9
SURRENDER_FIRST = 1<<10
SURRENDER_SECOND = 1<<11
QUIT_TIMES = 100
POKER_OFFSET = 2
#黑红梅方 3, 4, 5, 6, 7, 8, 9, 10, J, Q, K, A, 2
HEI = [15, 19, 23, 27, 31, 35, 39, 43, 47, 51, 55, 59, 63]
HONG = [14, 18, 22, 26, 30, 34, 38, 42, 46, 50, 54, 58, 62]
MEI = [13, 17, 21, 25, 29, 33, 37, 41, 45, 49, 53, 57, 61]
FANG = [12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60]
JOKER = [75, 79]
HHMF_VALUE = [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
# 除 大小王之外的牌
POKER_HHMF = (HEI, HONG, MEI, FANG)
# 类型
POKER_MESS = -1 # 杂
POKER_HEI = 0 # 黑
POKER_HONG = 1 # 红
POKER_MEI = 2 # 梅
POKER_FANG = 3 # 方
POKER_LORD = 4 # 主
TYPE_NONE = 0 # 无牌型
TYPE_ONE = 1 # 单张
TYPE_PAIR = 2 # 对子
TYPE_SEQ_PAIR = 3 # 连对
TYPE_OFFSET = 2
# 牌类型 (计算方法 和 utility.ll7 getPokersType 关联)
CARDS_MESS = -1 # 杂牌/未知(不是同一花色/主)
CARDS_MESS_HEI = (POKER_HEI<<TYPE_OFFSET)+TYPE_NONE # 黑_乱牌
CARDS_ONE_HEI = (POKER_HEI<<TYPE_OFFSET)+TYPE_ONE # 黑_单张
CARDS_PAIR_HEI = (POKER_HEI<<TYPE_OFFSET)+TYPE_PAIR # 黑_对子
CARDS_SEQ_PAIR_HEI = (POKER_HEI<<TYPE_OFFSET)+TYPE_SEQ_PAIR # 黑_连对
CARDS_MESS_HONG = (POKER_HONG<<TYPE_OFFSET)+TYPE_NONE # 红_乱牌
CARDS_ONE_HONG = (POKER_HONG<<TYPE_OFFSET)+TYPE_ONE # 红_单张
CARDS_PAIR_HONG = (POKER_HONG<<TYPE_OFFSET)+TYPE_PAIR # 红_对子
CARDS_SEQ_PAIR_HONG = (POKER_HONG<<TYPE_OFFSET)+TYPE_SEQ_PAIR # 红_连对
CARDS_MESS_MEI = (POKER_MEI<<TYPE_OFFSET)+TYPE_NONE # 梅_乱牌
CARDS_ONE_MEI = (POKER_MEI<<TYPE_OFFSET)+TYPE_ONE # 梅_单张
CARDS_PAIR_MEI = (POKER_MEI<<TYPE_OFFSET)+TYPE_PAIR # 梅_对子
CARDS_SEQ_PAIR_MEI = (POKER_MEI<<TYPE_OFFSET)+TYPE_SEQ_PAIR # 梅_连对
CARDS_MESS_FANG = (POKER_FANG<<TYPE_OFFSET)+TYPE_NONE # 方_乱牌
CARDS_ONE_FANG = (POKER_FANG<<TYPE_OFFSET)+TYPE_ONE # 方_单张
CARDS_PAIR_FANG = (POKER_FANG<<TYPE_OFFSET)+TYPE_PAIR # 方_对子
CARDS_SEQ_PAIR_FANG = (POKER_FANG<<TYPE_OFFSET)+TYPE_SEQ_PAIR # 方_连对
CARDS_MESS_LORD = (POKER_LORD<<TYPE_OFFSET)+TYPE_NONE # 主_乱牌
CARDS_ONE_LORD = (POKER_LORD<<TYPE_OFFSET)+TYPE_ONE # 主_单张
CARDS_PAIR_LORD = (POKER_LORD<<TYPE_OFFSET)+TYPE_PAIR # 主_对子
CARDS_SEQ_PAIR_LORD = (POKER_LORD<<TYPE_OFFSET)+TYPE_SEQ_PAIR # 主_连对
# 7
SEVEN = (HEI[4], HONG[4], MEI[4], FANG[4])
TWO = (HEI[-1], HONG[-1], MEI[-1], FANG[-1])
# 正主
KEY_LORD = (JOKER[1], JOKER[0], HEI[4], HONG[4], MEI[4], FANG[4], HEI[-1], HONG[-1], MEI[-1], FANG[-1])
BEGIN_ANIMATION_TIME = 5
#################################### 房间开房的一些模式 ####################################
# 局数
GAME_ROUND = (6, 8, 12)
# 人数
PLAYER_NUM = (4, 5)
# 封顶
MAX_LEVEL = (3, 5)
# 得分翻倍
MUL_LEVEL = (1, 2, 5)
# 单打翻倍
SIG_DOUBLE = (0, 1)
# 玩法 (扣底翻倍)
PLAY_MODE = (0, 1)
# 扣底加级
BOTTOM_LEVEL = (0, 1)
# 出牌时限
DISCARD_SECONDS = (0,)
# 是否可以发送表情
EMOTION_MODE = (0, 1)
###########################################################################################
| [
"693749857@qq.com"
] | 693749857@qq.com |
1b3d398d60ceb3bd664a56ddf5d5ec9ceaa1d6bd | 2571927991eb4b72756182965b0fca6269e9cdda | /PostfixEval.py | 44459dead668ed088a0b624ea9b72dd0f6eb9326 | [] | no_license | nlin24/python_algorithms | 3202a108e7abf0c93afcccbbf84e2b72f789032b | 22221e662d59ccb96b593cd165ecd37be4d37c97 | refs/heads/master | 2020-04-07T13:57:51.225077 | 2019-07-02T03:37:58 | 2019-07-02T03:37:58 | 158,429,017 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,051 | py | import Stack
'''
Implement the postfix string evaluation here http://interactivepython.org/runestone/static/pythonds/BasicDS/InfixPrefixandPostfixExpressions.html
'''
def postfixEval(postfix_string):
operandStack = Stack.Stack()
for token in postfix_string:
if token == " ":
continue
elif token == "+" or token == "-" or token == "*" or token == "/":
operand2 = operandStack.pop()
operand1 = operandStack.pop()
if token == "+":
operandStack.push(operand1 + operand2)
elif token == "-":
operandStack.push(operand1 - operand2)
elif token == "*":
operandStack.push(operand1 * operand2)
elif token == "/":
operandStack.push(operand1 / operand2)
else:
if token.isdigit():
operandStack.push(int(token))
return operandStack.pop()
if __name__ == "__main__":
#print(postfixEval('7 8 + 3 2 + /'))
print(postfixEval(" 17 10 + 3 * 9 / "))
| [
"james"
] | james |
591e86dc222885eba9fba20183674934d041a844 | ac194b6da1dd1a438a223777432a73a3e4742de1 | /amazon.py | 1fdc80f9508a4be40d538e9a1403f44475e1e131 | [] | no_license | amosy3/MUST | 9c497aefd8cb26c6102b53e3c881689b44c51e98 | 95bfad2f47dcfbf6b07a356551bf568a7abcc500 | refs/heads/main | 2023-08-13T14:35:45.694516 | 2021-10-11T13:46:08 | 2021-10-11T13:46:08 | 415,944,897 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,366 | py | import numpy as np
import time
import pickle
import copy
import torch
import torch.optim as optim
import os
import datetime
import torch.nn.functional as F
from scipy.sparse import coo_matrix
import random
import argparse
np.random.seed(111)
torch.manual_seed(111)
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("--target", type=str, choices=["books", "dvd", "electronics", "kitchen"],
help="select target domain")
parser.add_argument("--batch_size", type=int, default=32, help="All loaders batch size")
parser.add_argument("--epochs", type=int, default=10000, help="ping pong epochs")
parser.add_argument("--alpha", type=float, default=0.1, help="weight of ping-pong loss")
parser.add_argument("--pseudo_th", type=float, default=0.6, help="teacher threshold to give pseudo labels for student")
parser.add_argument("--eval_freq", type=int, default=500, help="wait before eval and log models performance")
parser.add_argument("--log", type=str, default=' ', help="set log file path")
parser.add_argument('--only_bn', action='store_true')
# hyperparameters from MDAN
parser.add_argument("--dimension", type=int, default=5000, help="hyperparameters from MDAN")
parser.add_argument("--num_trains", type=int, default=2000, help="hyperparameters from MDAN")
return parser.parse_args()
def eval_on_domain(model, _X, _y, criterion=torch.nn.CrossEntropyLoss()):
_pred = model(_X)
_loss = criterion(_pred,_y)
_, _preds_int = torch.max(_pred, 1)
_ncorrect = torch.sum(_preds_int == _y.data)
return _loss.item(), _ncorrect.float()/_X.shape[0]
def get_batch(_X, _y, batch_size = 32):
rand_idx = np.arange(_X.shape[0])
np.random.shuffle(rand_idx)
rand_idx = rand_idx[:batch_size]
return _X[rand_idx], _y[rand_idx]
def mdan_parsing():
# dataset + parsing code is from https://github.com/KeiraZhao
time_start = time.time()
amazon = np.load("../datasets/amazon_product_reviews/amazon.npz")
amazon_xx = coo_matrix((amazon['xx_data'], (amazon['xx_col'], amazon['xx_row'])),
shape=amazon['xx_shape'][::-1]).tocsc()
amazon_xx = amazon_xx[:, :args.dimension]
amazon_yy = amazon['yy']
amazon_yy = (amazon_yy + 1) / 2
amazon_offset = amazon['offset'].flatten()
time_end = time.time()
# print("Time used to process the Amazon data set = {} seconds.".format(time_end - time_start))
# print("Number of training instances = {}, number of features = {}."
# .format(amazon_xx.shape[0], amazon_xx.shape[1]))
# print("Number of nonzero elements = {}".format(amazon_xx.nnz))
# print("amazon_xx shape = {}.".format(amazon_xx.shape))
# print("amazon_yy shape = {}.".format(amazon_yy.shape))
data_name = ["books", "dvd", "electronics", "kitchen"]
num_data_sets = 4
data_insts, data_labels, num_insts = [], [], []
for i in range(num_data_sets):
data_insts.append(amazon_xx[amazon_offset[i]: amazon_offset[i+1], :])
data_labels.append(amazon_yy[amazon_offset[i]: amazon_offset[i+1], :])
# print("Length of the {} data set label list = {}, label values = {}, label balance = {}".format(
# data_name[i],
# amazon_yy[amazon_offset[i]: amazon_offset[i + 1], :].shape[0],
# np.unique(amazon_yy[amazon_offset[i]: amazon_offset[i+1], :]),
# np.sum(amazon_yy[amazon_offset[i]: amazon_offset[i+1], :])
# ))
num_insts.append(amazon_offset[i+1] - amazon_offset[i])
# Randomly shuffle.
r_order = np.arange(num_insts[i])
np.random.shuffle(r_order)
data_insts[i] = data_insts[i][r_order, :]
data_labels[i] = data_labels[i][r_order, :]
# print("Data sets: {}".format(data_name))
# print("Number of total instances in the data sets: {}".format(num_insts))
# Partition the data set into training and test parts, following the convention in the ICML-2012 paper, use a fixed
# amount of instances as training and the rest as test.
input_dim = amazon_xx.shape[1]
return data_insts, data_labels
def get_bn_layer_params(bn_layer):
layer_params = {'running_mean': bn_layer.running_mean.clone(),
'running_var': bn_layer.running_var.clone(),
'weight': bn_layer.weight.clone(),
'bias': bn_layer.bias.clone()}
return layer_params
def set_bn_layer_params(layer, params):
layer.running_mean = (params['running_mean'])
layer.running_var = (params['running_var'])
layer.weight = torch.nn.Parameter(params['weight'])
layer.bias = torch.nn.Parameter(params['bias'])
class Net(torch.nn.Module):
def __init__(self, n_feature, n_hidden1=1000, n_hidden2=500, n_hidden3=100, n_output=2):
super(Net, self).__init__()
self.bn = torch.nn.BatchNorm1d(n_feature)
self.hidden1 = torch.nn.Linear(n_feature, n_hidden1)
self.hidden2 = torch.nn.Linear(n_hidden1, n_hidden2)
self.hidden3 = torch.nn.Linear(n_hidden2, n_hidden3)
self.predict = torch.nn.Linear(n_hidden3, n_output)
self.domain2model_bns = dict()
def forward(self, x):
x = self.bn(x)
x = F.relu(self.hidden1(x))
x = F.relu(self.hidden2(x))
x = F.relu(self.hidden3(x))
x = self.predict(x)
return x
def save_current_bn(self, domain):
self.domain2model_bns[domain] = get_bn_layer_params(self.bn)
def load_bn_to_model(self, domain):
set_bn_layer_params(self.bn, self.domain2model_bns[domain])
def init_bn_for_all_domains(self, domains):
for domain in domains:
self.domain2model_bns[domain] = get_bn_layer_params(self.bn)
def print_and_log(txt, log_path):
print(txt)
f = open(log_path, 'a')
f.write(txt+'\n')
f.close()
if __name__ == '__main__':
args = get_args()
log_folder = '../logs/amazon/%s_%s_%s/' % (args.target, args.log, datetime.datetime.now())
os.mkdir(log_folder)
logfile = '../logs/amazon/%s%s_alpha_%0.1f_pseudo_th_%0.1f_%s.log' % (args.log, args.target, args.alpha,
args.pseudo_th, datetime.datetime.now())
logfile = logfile.replace(' ','_')
data_insts, data_labels = mdan_parsing()
domains = ["books", "dvd", "electronics", "kitchen"]
X_train, X_test, y_train, y_test = dict(), dict(), dict(), dict()
for i, domain in enumerate(domains):
X_train[domain] = torch.tensor(data_insts[i][:args.num_trains, :].todense().astype(np.float32))
y_train[domain] = torch.tensor(data_labels[i][:args.num_trains, :].ravel().astype(np.int64))
X_test[domain] = torch.tensor(data_insts[i][args.num_trains:, :].todense().astype(np.float32))
y_test[domain] = torch.tensor(data_labels[i][args.num_trains:, :].ravel().astype(np.int64))
teacher = Net(args.dimension)
teacher_optimizer = torch.optim.SGD(teacher.parameters(), lr=0.001, momentum=0.9)
teacher.init_bn_for_all_domains(domains)
teacher.train()
criterion = torch.nn.CrossEntropyLoss()
student = Net(args.dimension)
student_optimizer = torch.optim.SGD(student.parameters(), lr=0.001, momentum=0.9)
student.init_bn_for_all_domains(domains)
student.train()
target = args.target
sources = [x for x in domains if x != target]
running_ncorrect, running_loss, ns_samples = 0.0, 0.0, 0
running_student_loss, nt_samples = 0.0, 0
running_ncorrect_ping_pong, running_ping_pong_loss = 0.0, 0.0
best_ping_pong_accs, min_source_loss = 0.0, np.inf
pp_acc_on_target, sl_acc_on_target = 0.0, 0.0
n = 0
for i in range(args.epochs):
source = random.choice(sources)
Xs, ys = get_batch(X_train[source], y_train[source], batch_size=args.batch_size)
teacher.load_bn_to_model(source)
prediction = teacher(Xs)
loss = criterion(prediction, ys)
teacher_optimizer.zero_grad()
loss.backward()
_, batch_preds_int = torch.max(prediction, 1)
running_ncorrect += torch.sum(batch_preds_int == ys.data)
running_loss += loss.item() * Xs.shape[0]
ns_samples += Xs.shape[0]
Xt, _ = get_batch(X_train[target], y_train[target], batch_size=args.batch_size)
teacher.load_bn_to_model(target)
pseudo_labels = teacher(Xt)
probs = torch.max(torch.nn.Softmax(dim=1)(pseudo_labels), 1)
mask = probs[0] > args.pseudo_th
if sum(mask) == 0 or args.only_bn:
n += 1
teacher_optimizer.step()
else:
student.load_bn_to_model(target)
stud_pred = student(Xt)
student_loss = torch.nn.L1Loss()(stud_pred[mask], pseudo_labels[mask].detach())
student_optimizer.zero_grad()
student_loss.backward(retain_graph=True)
student_optimizer.step()
running_student_loss += student_loss.item() * Xt[mask].shape[0]
nt_samples += Xt[mask].shape[0]
student.load_bn_to_model(source)
ys_student_est = student(Xs)
ping_pong_loss = criterion(ys_student_est, ys)
teacher_t_loss = args.alpha *torch.nn.L1Loss()(stud_pred[mask].detach(), pseudo_labels[mask])
teacher_t_loss.backward(retain_graph=True)
teacher_optimizer.step()
_, batch_preds_int = torch.max(ys_student_est, 1)
running_ncorrect_ping_pong += torch.sum(batch_preds_int == ys.data)
running_ping_pong_loss += ping_pong_loss.item() * Xs.shape[0]
if i % args.eval_freq == 0:
source_loss = running_loss / ns_samples
report = 'Epoch: %d, Teacher on S - loss:%0.4f ,acc:%0.4f' % (
i, source_loss, running_ncorrect.double() / ns_samples)
print_and_log(report, logfile)
running_loss, running_ncorrect, ns_samples = 0.0, 0, 0
for source in sources:
teacher.load_bn_to_model(source)
source_loss, source_acc = eval_on_domain(teacher, X_test[source], y_test[source])
report = 'Epoch %d, Teacher on Source: %s, val_loss:%0.4f, val_acc:%0.4f' % (
i, source, source_loss, source_acc)
print_and_log(report, logfile)
teacher.load_bn_to_model(target)
teacher_target_loss, teacher_target_acc = eval_on_domain(teacher, X_test[target], y_test[target])
report = 'Epoch %d, \t\tTeacher on Target: %s, val_loss:%0.4f, val_acc:%0.4f' % (
i, target, teacher_target_loss,
teacher_target_acc)
print_and_log(report,logfile)
report = 'Epoch: %d, Student on teacher loss:%0.4f, nt=%d' % (i, running_student_loss / max(nt_samples, 1),
nt_samples)
print_and_log(report,logfile)
running_student_loss, nt_samples = 0.0, 0
student.load_bn_to_model(target)
student_target_loss, student_target_acc = eval_on_domain(student, X_test[target], y_test[target])
report = 'Epoch %d, \t\tStudent on Target: %s, val_loss:%0.4f, val_acc:%0.4f' % (
i, target, student_target_loss,
student_target_acc)
print_and_log(report, logfile)
ping_pong_accs = []
for source in sources:
student.load_bn_to_model(source)
source_loss, source_acc = eval_on_domain(student, X_test[source], y_test[source])
ping_pong_accs.append(source_acc)
report = 'Epoch %d, Student on Source: %s, val_loss:%0.4f, val_acc:%0.4f' % (
i, source, source_loss, source_acc)
print_and_log(report, logfile)
if best_ping_pong_accs < np.mean(ping_pong_accs):
best_ping_pong_accs = np.mean(ping_pong_accs)
pp_acc_on_target = teacher_target_acc, student_target_acc
teacher.load_bn_to_model(target)
torch.save(teacher, log_folder + 'teacher_checkpoint_%s.pkl' % i)
student.load_bn_to_model(target)
torch.save(student, log_folder + 'student_checkpoint_%s.pkl' % i)
if min_source_loss > source_loss:
min_source_loss = source_loss
sl_acc_on_target = teacher_target_acc, student_target_acc
print_and_log('best_pp-%0.3f, Teacher:%0.3f, Student:%0.3f ' % (best_ping_pong_accs,
pp_acc_on_target[0],
pp_acc_on_target[1]),
logfile)
print_and_log('best_sl-%0.3f, Teacher:%0.3f, Student:%0.3f ' % (min_source_loss,
sl_acc_on_target[0],
sl_acc_on_target[1]),
logfile)
| [
"noreply@github.com"
] | amosy3.noreply@github.com |
1395c3aca2cc92b5dc7677bb6b0a68ffba12705d | 7dc1b312f117390b55f6a1b258065ea30212180d | /src/common/rpc.py | ef4d409f3431fbed7430b5beb5a3ce64e8ff7582 | [] | no_license | msemikin/distributed-sudoku | 262cc07371021cd81a5db5a3bb2a20c7643c5515 | d2737b10dbdc2e980641ee3529ebcd06fe52faed | refs/heads/master | 2021-09-03T02:46:40.732243 | 2018-01-05T01:48:52 | 2018-01-05T01:48:52 | 109,395,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | from SimpleXMLRPCServer import SimpleXMLRPCServer
import socket
import logging
logger = logging.getLogger(__name__)
class CustomXMLRPCServer(SimpleXMLRPCServer):
def __init__(self, addr, **kwargs):
SimpleXMLRPCServer.__init__(self, addr, **kwargs)
self.should_shutdown = False
def serve_forever(self, poll_interval=0.5):
self.timeout = 5
while not self.should_shutdown:
try:
self.handle_request()
except socket.error:
continue
logger.info('Stop serving')
def shutdown(self):
self.should_shutdown = True
| [
"caliber1997@gmail.com"
] | caliber1997@gmail.com |
2a63ab911635f7ce78573d221699b93b6e06e776 | 2d6f385a6ad9a77df60b21e36c417c22dea296b0 | /RememberYourFriends/allcountries.py | c6856ac589ea7781ffdb6369111196fb3e307f29 | [] | no_license | peterbe/zope_products | f4f8c4e0348525c4c0742b290b2ee5e06ed91a71 | 8cb710cdb13cd895e886e37495a601aed49863d2 | refs/heads/master | 2021-01-21T00:43:31.846747 | 2011-10-04T06:37:34 | 2011-10-04T06:37:34 | 1,776,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,818 | py |
ALL_COUNTRIES = (
'Afghanistan',
'Albania',
'Algeria',
'American Samoa',
'Andorra',
'Angola',
'Anguilla',
'Antarctica',
'Antigua And Barbuda',
'Argentina',
'Armenia',
'Aruba',
'Australia',
'Austria',
'Azerbaijan',
'Bahamas',
'Bahrain',
'Bangladesh',
'Barbados',
'Belarus',
'Belgium',
'Belize',
'Benin',
'Bermuda',
'Bhutan',
'Bolivia',
'Bosnia and Herzegowina',
'Botswana',
'Bouvet Island',
'Brazil',
'British Indian Ocean Territory',
'Brunei Darussalam',
'Bulgaria',
'Burkina Faso',
'Burma',
'Burundi',
'Cambodia',
'Cameroon',
'Canada',
'Cape Verde',
'Cayman Islands',
'Central African Republic',
'Chad',
'Chile',
'China',
'Christmas Island',
'Cocos (Keeling) Islands',
'Colombia',
'Comoros',
'Congo',
'Congo, the Democratic Republic of the',
'Cook Islands',
'Costa Rica',
"Cote d'Ivoire",
'Croatia',
'Cuba',
'Cyprus',
'Czech Republic',
'Denmark',
'Djibouti',
'Dominica',
'Dominican Republic',
'East Timor',
'Ecuador',
'Egypt',
'El Salvador',
'England',
'Equatorial Guinea',
'Eritrea',
'Espana',
'Estonia',
'Ethiopia',
'Falkland Islands',
'Faroe Islands',
'Fiji',
'Finland',
'France',
'French Guiana',
'French Polynesia',
'French Southern Territories',
'Gabon',
'Gambia',
'Georgia',
'Germany',
'Ghana',
'Gibraltar',
'Great Britain',
'Greece',
'Greenland',
'Grenada',
'Guadeloupe',
'Guam',
'Guatemala',
'Guinea',
'Guinea-Bissau',
'Guyana',
'Haiti',
'Heard and Mc Donald Islands',
'Honduras',
'Hong Kong',
'Hungary',
'Iceland',
'India',
'Indonesia',
'Ireland',
'Israel',
'Italy',
'Iran',
'Iraq',
'Jamaica',
'Japan',
'Jordan',
'Kazakhstan',
'Kenya',
'Kiribati',
'Korea, Republic of',
'Korea (South)',
'Kuwait',
'Kyrgyzstan',
"Lao People's Democratic Republic",
'Latvia',
'Lebanon',
'Lesotho',
'Liberia',
'Liechtenstein',
'Lithuania',
'Luxembourg',
'Macau',
'Macedonia',
'Madagascar',
'Malawi',
'Malaysia',
'Maldives',
'Mali',
'Malta',
'Marshall Islands',
'Martinique',
'Mauritania',
'Mauritius',
'Mayotte',
'Mexico',
'Micronesia, Federated States of',
'Moldova, Republic of',
'Monaco',
'Mongolia',
'Montserrat',
'Morocco',
'Mozambique',
'Myanmar',
'Namibia',
'Nauru',
'Nepal',
'Netherlands',
'Netherlands Antilles',
'New Caledonia',
'New Zealand',
'Nicaragua',
'Niger',
'Nigeria',
'Niue',
'Norfolk Island',
'Northern Ireland',
'Northern Mariana Islands',
'Norway',
'Oman',
'Pakistan',
'Palau',
'Panama',
'Papua New Guinea',
'Paraguay',
'Peru',
'Philippines',
'Pitcairn',
'Poland',
'Portugal',
'Puerto Rico',
'Qatar',
'Reunion',
'Romania',
'Russia',
'Rwanda',
'Saint Kitts and Nevis',
'Saint Lucia',
'Saint Vincent and the Grenadines',
'Samoa (Independent)',
'San Marino',
'Sao Tome and Principe',
'Saudi Arabia',
'Scotland',
'Senegal',
'Serbia and Montenegro',
'Seychelles',
'Sierra Leone',
'Singapore',
'Slovakia',
'Slovenia',
'Solomon Islands',
'Somalia',
'South Africa',
'South Korea',
'Spain',
'Sri Lanka',
'St. Helena',
'St. Pierre and Miquelon',
'Suriname',
'Svalbard and Jan Mayen Islands',
'Swaziland',
'Sweden',
'Switzerland',
'Taiwan',
'Tajikistan',
'Tanzania',
'Thailand',
'Togo',
'Tokelau',
'Tonga',
'Trinidad',
'Trinidad and Tobago',
'Tunisia',
'Turkey',
'Turkmenistan',
'Turks and Caicos Islands',
'Tuvalu',
'Uganda',
'Ukraine',
'United Arab Emirates',
'United Kingdom',
'United States',
'United States Minor Outlying Islands',
'Uruguay',
'Uzbekistan',
'Vanuatu',
'Vatican City State (Holy See)',
'Venezuela',
'Viet Nam',
'Virgin Islands (British)',
'Virgin Islands (U.S.)',
'Wales',
'Wallis and Futuna Islands',
'Western Sahara',
'Yemen',
'Zambia',
'Zimbabwe',
)
# Aliases is useful to uniform the input so to allow people
# to say 'Great Britain' instead of 'United Kingdom' but when
# you store it in the database you store it as 'United Kingdom'
COUNTRY_ALIASES = {
'Great Britain':'United Kingdom',
}
MOST_COMMON = (
'United States',
'United Kingdom',
'France',
'Germany',
'Sweden',
'Canada',
) | [
"mail@peterbe.com"
] | mail@peterbe.com |
5acaf82ac2db33a08556e39a1809a2a5a6896c38 | 1125345341e496920b661e612cd67cdb96a1d170 | /createCampaign/parameter_tests/CREATIVE_TYPE/test08_abcd_f.py | 17c9819ec1a2556865b067141d7b4187d426daf9 | [] | no_license | Stephen-Williams/swarm-qa | 0bac526f0ee44b8c3677fb35959e6f7d0e258be2 | 90e36b5eab475788d9ab54051ad9c2736f3633ec | refs/heads/master | 2021-01-01T20:11:51.033059 | 2015-07-08T16:07:06 | 2015-07-08T16:07:06 | 38,764,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,679 | py | { 'all' :
{ '0':
{ 'ADOMAIN': 'abc.com',
'ADVERTISER_CATEGORY': 'IAB8-5',
'APP_FILTER': 'sites',
'CREATIVE_ATTR': '0',
'CREATIVE_BASE_64': 'iVBORw0KGgoAAAANSUhEUgAAAUAAAAAyCAIAAACib5WDAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAD2EAAA9hAag/p2kAAAAYdEVYdFNvZnR3YXJlAHBhaW50Lm5ldCA0LjAuNWWFMmUAAAYHSURBVHhe7ZtNaB5VFIYHUkREoYuCglIUKmRRsBQRhSwKCpGgYCCLQEWCKCgWEbGgUMii0IJFFAxkEWgoIhayCFRokBYqZBGwFJEKLbjowkUXLlx04cKFPsO9nJw589Nu2nK/eV8eJOeeO983wnfu+Zlp9d9NIUSpRFsIURDRFkIURLSFEAURbSFEQURbCFEQ0RZCFES0hRAFEW0hREFEWwhRENEWQhREtIUQBRFtIURBRFsIURDRFgNcOVe99Wp15KVqYbb68+fofSjsnK+OHa2W5qvP3q1ub0evmHiiLQYgek1XN6L3ofDs0/l+0OZK9IqJJ9pigGeezKGyZ6r657foffDcuZbvJ+mvnbhBTDzRFn0QHqZD0w3X1lourRfnHmhpTRVgOrA/esUYiLbo48fVHCrovYWG67VX8jr646eG676yfip/KaINDl4xBqIt+jj5cQ4V9O2JhmvvE3mdP/z6/eaTd/L3otXl6BVjINoTxr+/V9cv1NPjnfN36VrJnGyjKO3rJPsmWFxoIhXbeh/cEpdvrnQX2yxyG/c4T359Jn8v+nUzesUYiPbEQJDMHK4efST/vtHjj9XPWu5ca2y7cbEOgz1TeU8SLS4Fs98G7QnW6U/zSlt879+/5AttUEzQ8nUvHswmPXPakCCr79ubXYirtr+vTwduO6kdok/tyy72BJcYCdGeDD5/P8akib7Rtp34sHcb2lrb3dk5wZp+Lq+0xceSadnj8/Payd1oRAuz+XM4U3wXbWIze0x2IiQwTfeS+cVEEu0JoJ0YfWaz1HrmeF5B9K5L83XYH3w+ryByoH1m5wRrIPipt9OejW/yCjqwP/+R9PUXeY+P0j61h8zkZ9PyR9ErRkK0S4eftY8rcmxKXHSVi3N1GZyqXx+Q1LTWjuK1uhRZ0uucYHGVT7CcArcuZ6xQp2gP4jb4hGNH84fzt4k7X12u1+nYfcuNLF0b/kJfLIhREe3SobE0tfOSzbEog5OIGfpS20DgWbomkm3dhxPRZev0pabOOtbXxnzXd182vJTZPi3b0ZBcfkZFvWCuBEeAyQ4aMTaiXTS+qqQATl1oG/KV6Y0j9Qo7SZtEl02YEBFil9gEC/kxGG2tiWRr64Y9YUIfLEavLwSo3sMN+9L60tmGC+yo4sLgEuMh2kVDE2uieA5ewz8+7RPBY+HkJ1ghWnwapN31LvAFtp9LG4S0yaffhH/PuX2tHQ3hrRIxKqJdNC+/kH/TaKAtHJgeIzLb+qlGMvR58u03d9dh5nBeR4Srd4GfYNlYy+Pv5PqFhssX52zzLqClN1EFBK8YD9EuGj9tHngn2UQSo08GYuCHr+o3KDqv8hMsGx0n7LEQH+XXE36C1RlmXsFFBWEKpwZc3ciPuKntKf6DV4yHaBeNV997V74epuMN3k78BIs229bJmaa7TrDar2H4O9kz1XBxjvjmOZwaQhjRLhqvUEITLSm70kyaSGLhxaxO+iZYJG1TZyNqQcgXtSdqvgxGduLwFX7+jPypkeD/gsIe+s4pMRKiXTT+kQzZL72xTOiSwYglKuG0zVfa7ec6l842FgcmWNTeJpI5gQfWyvoJVl+q90q3R6z6SXhSOGVuXNw9GmjCvUuMjWgXzdJ8/lmbfCFqU2JfElO7LszWFwJ5L/W0vh4emGCtLud1L3vD2U+w/BMpj39qHWSHUfspkW+t214xKqJdNBTJ/mVjLyLZ8tity43Abss/0R2YYJEJif8gi9W7TrDAnw5efKn1z+0Jlh99t58ti1ER7dKhgvUPk5JIueEhDabPwyYCm26Wstl22pNe+tj2IOrM8TwNTuJy22OvYbDoX/YKrJ9qnAKHpvM7G/ZGJ5223w+Lc9lFLzDwyWIMRHsCoI+l/9xaq2Pjyrmh1wxvb9fvRVLrspP/th/kJkjsJO32FCrB5/Mhnf/kOF3oVzrhKlrfzZXGDbDItZ0zKu6E/XT4oTcWIyTaQoiCiLYQoiCiLYQoiGgLIQoi2kKIgoi2EKIgoi2EKIhoCyEKItpCiIKIthCiIKIthCiIaAshCiLaQohiuFn9D1yZ9AWuOgemAAAAAElFTkSuQmCC',
'CREATIVE_HEIGHT': 50,
'CREATIVE_NAME': 'CREATIVE_TYPE is random string',
'CREATIVE_TYPE': 'abcd',
'CREATIVE_WIDTH': 320,
'DAY_PARTING': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',
'DELIVERY_RATE_UNIT': 'impressions',
'ENCODE_A_HREF': 0,
'START': 1433908800, #June 10th 2015, midnight
'END': 1434513599, #June 16th 2015, 23:59:59
'EXCHANGE': 'mpb',
'LANG': 'en',
'LOCATIONS': ['CAN', 'USA'],
'MAX_RATE_IN_DOLLARS': 0.8,
'MPB_TYPE': '',
'NECTAR_ALLOCATION': 9708,
'NECTAR_CRID': 9708,
'QUANTITY': '1000000',
'TARGET_ANDROID': True,
'TARGET_IOS': True,
'SITE_LIST': ['0c3e797b933649ab84619d8e8a1c0ab6',
'07ab13ce6ae511e281c11231392559e4',
'f8289871fe0d48318d36bf3ea197f65d',
'bd80deae924f11e281c11231392559e4'],
'TAG': '<A HREF="http://ad.doubleclick.net/ddm/jump/N6041.368591.JUICEMOBILE.CA/B8760366.118973391;sz=728x90;ord=[NECTAR_TIME]?">\r\n'
'<IMG SRC="http://ad.doubleclick.net/ddm/ad/N6041.368591.JUICEMOBILE.CA/B8760366.118973391;sz=728x90;ord=[NECTAR_TIME]?" '
'BORDER=0 WIDTH=728 HEIGHT=90 '
'ALT="Advertisement"></A>'
}
}
}
| [
"stephen.williams@juicemobile.com"
] | stephen.williams@juicemobile.com |
36f15a850afce5b6541b321f369fbf921041164d | 1889dc3f8220a50856e3fb5d2481a8e4c1b0fc4e | /Model_codes/test.py | f7f5e11b600ad7fca2aec4f75b50f2eeef25802d | [] | no_license | Annon-arch/ChatterNet | 688fa22c031cbe4953ab953699197dfdb65ce815 | f8dbcbf7c02e0e0716d91cc86decd5ce9d0d967f | refs/heads/master | 2022-12-10T10:21:20.861716 | 2020-03-16T06:16:01 | 2020-03-16T06:16:01 | 241,574,829 | 1 | 0 | null | 2022-12-08T03:48:47 | 2020-02-19T08:54:23 | Python | UTF-8 | Python | false | false | 2,270 | py | import json
import numpy as np
from build_model import build_model_observeLSTM, masked_relative_error
from matplotlib import pyplot as plt
from scipy.stats import kendalltau, spearmanr
def smape(A, F):
return (100/len(A)) * np.sum((2 * np.abs(F - A)) / (np.abs(A) + np.abs(F) + np.finfo(float).eps))
def mape(A, F):
return (100/len(A)) * np.sum(np.abs(F - A) / (np.abs(A) + np.finfo(float).eps))
n_input = np.load('../Data_processing/news_text_november.npy')[:-1]
print(n_input.shape)
submission = np.load('../Data_processing/submission_text_november.npy')[:len(n_input)+1]
comment_count = np.load('../Data_processing/temporal_cc60min_november.npy')[:len(n_input)+1]
subred = np.load('../Data_processing/submission_subred_november.npy')[:len(n_input)+1]
comment_rate = np.load('../Data_processing/submission_comment_rate_november.npy')[:len(n_input)]
comment_rate = np.reshape(comment_rate, comment_rate.shape+(1,))
s_input = submission[:-1]
c_count = comment_count[1:]
c_count = c_count.reshape((c_count.shape[0], c_count.shape[1], c_count.shape[2], 1))
s_pred = submission[1:]
subred_input = subred[:-1]
subred_pred = subred[1:]
s_value = np.load('../Data_processing/submission_value_60min_november.npy')[1:len(n_input)+1]
_, news_per_hour, token_per_news = n_input.shape
_, sub_per_hour, token_per_sub = s_input.shape
comment_steps = c_count.shape[-2]
model = build_model_observeLSTM(news_per_hour,
token_per_news,
sub_per_hour,
token_per_sub,
comment_steps)
model.load_weights('model_observeLSTM_exp-gru.h5')
model.compile(loss=masked_relative_error(0.), optimizer='adam')
pred_value = model.predict([n_input, s_input, s_pred, subred_input, subred_pred, comment_rate, c_count], batch_size=1, verbose=1)
y_true = np.reshape(s_value, (s_value.shape[0]*s_value.shape[1]*s_value.shape[2]))
y_pred = np.reshape(pred_value, (s_value.shape[0]*s_value.shape[1]*s_value.shape[2]))
A, F = [], []
for i in range(len(y_true)):
if y_true[i]!=-1.:
A.append(y_true[i])
F.append(y_pred[i])
print('sMAP Error:', smape(A, F))
print('MAP Error:', mape(A,F))
tau, _ = kendalltau(A,F)
rho, _ = spearmanr(A,F)
print('kendal tau:', tau)
print('spearman rho:', rho)
| [
"noreply@github.com"
] | Annon-arch.noreply@github.com |
fe3b9d8e37f2011e8616419bb475b9e3d8e513c6 | ca6aaaddbd6ee025790af5bd0a6ed0645c54e65a | /log_collectors/regex_extractor/__init__.py | 5bfd4bcff2391104815491d05bb44bdbfb7578e5 | [
"Apache-2.0"
] | permissive | AISphere/ffdl-model-metrics | b3ff428588794f7b0c70f8f92b3e0f1069902c0a | e1a27db3d130fb3227585ee30b860a085591e748 | refs/heads/master | 2020-04-01T19:09:13.596717 | 2019-02-05T19:30:57 | 2019-02-05T19:30:57 | 153,538,219 | 1 | 5 | Apache-2.0 | 2019-02-15T13:30:44 | 2018-10-17T23:55:05 | Python | UTF-8 | Python | false | false | 74 | py | from __future__ import absolute_import
from .src import extract_from_log
| [
"falk.pollok@ibm.com"
] | falk.pollok@ibm.com |
d901db86558a1335fc2f43ba7a464314f345b60f | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/datalogai_recurrentshop/recurrentshop-master/recurrentshop/engine.py | 4f9ec7f358401d78e869ebaa7be55093fc9cf009 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 19,284 | py | from keras.layers import Layer, InputSpec
from keras.models import Sequential
from keras import initializations, regularizers, constraints
from keras.utils.layer_utils import layer_from_config
from keras import backend as K
from inspect import getargspec
import numpy as np
from . import backend
'''Provides a simpler API for building complex recurrent neural networks using Keras.
The RNN logic is written inside RNNCells, which are added sequentially to a RecurrentContainer.
A RecurrentContainer behaves similar to a Recurrent layer in Keras, and accepts arguments like
return_sequences, unroll, stateful, etc [See Keras Recurrent docstring]
The .add() method of a RecurrentContainer is used to add RNNCells and other layers to it. Each
element in the input sequence passes through the layers in the RecurrentContainer in the order
in which they were added.
'''
__author__ = "Fariz Rahman"
__copyright__ = "Copyright 2016, datalog.ai"
__credits__ = ["Fariz Rahman", "Malaikannan Sankarasubbu"]
__license__ = "MIT"
__version__ = "0.0.1"
__maintainer__ = "Fariz Rahman"
__email__ = "fariz@datalog.ai"
__status__ = "Production"
_backend = getattr(K, K.backend() + '_backend')
class learning_phase(object):
def __init__(self, value):
self.value = value
def __enter__(self):
self.learning_phase_place_holder = _backend._LEARNING_PHASE
_backend._LEARNING_PHASE = self.value
def __exit__(self, *args):
_backend._LEARNING_PHASE = self.learning_phase_place_holder
if K.backend() == 'theano':
rnn = backend.rnn
else:
rnn = lambda *args, **kwargs: list(K.rnn(*args, **kwargs)) + [[]]
def _isRNN(layer):
return issubclass(layer.__class__, RNNCell)
def _get_first_timestep(x):
slices = [slice(None)] * K.ndim(x)
slices[1] = 0
return x[slices]
def _get_last_timestep(x):
ndim = K.ndim(x)
if K.backend() == 'tensorflow':
import tensorflow as tf
slice_begin = tf.pack([0, tf.shape(x)[1] - 1] + [0] * (ndim - 2))
slice_size = tf.pack([-1, 1] + [-1] * (ndim - 2))
last_output = tf.slice(x, slice_begin, slice_size)
last_output = tf.squeeze(last_output, [1])
return last_output
else:
return x[:, -1]
class weight(object):
def __init__(self, value, init='glorot_uniform', regularizer=None, constraint=None, trainable=True, name=None):
if type(value) == int:
value = (value,)
if type(value) in [tuple, list]:
if type(init) == str:
init = initializations.get(init)
self.value = init(value, name=name)
elif 'numpy' in str(type(value)):
self.value = K.variable(value, name=name)
else:
self.value = value
if type(regularizer) == str:
regularizer = regularizers.get(regularizer)
if type(constraint) == str:
constraint = constants.get(constraint)
self.regularizer = regularizer
self.constraint = constraint
self.trainable = trainable
class RNNCell(Layer):
def __init__(self, **kwargs):
if 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs['input_dim'],)
del kwargs['input_dim']
if 'output_dim' in kwargs:
self.output_dim = kwargs['output_dim']
del kwargs['output_dim']
self.initial_states = {}
super(RNNCell, self).__init__(**kwargs)
def _step(self, x, states):
args = [x, states]
if hasattr(self, 'weights'):
args += [self.weights]
if hasattr(self, 'constants'):
args += [self.constants]
args = args[:len(getargspec(self.step).args)]
return self.step(*args)
def call(self, x, mask=None):
input_ndim = K.ndim(x)
output_ndim = len(self.get_output_shape_for((10,) * input_ndim))
return K.zeros((10,) * output_ndim)
def build(self, input_shape):
self.input_spec = [InputSpec(shape=input_shape)]
super(RNNCell, self).build(input_shape)
@property
def weights(self):
w = []
if hasattr(self, 'trainable_weights'):
w += self.trainable_weights
if hasattr(self, 'non_trainable_weights'):
w += self.non_trainable_weights
return w
def get_layer(self, **kwargs):
rc = RecurrentContainer(**kwargs)
rc.add(self)
return rc
@weights.setter
def weights(self, ws):
self.trainable_weights = []
self.non_trainable_weights = []
self.constraints = {}
for w in ws:
if not isinstance(w, weight):
w = weight(w, name='{}_W'.format(self.name))
if w.regularizer is not None:
self.add_loss(regularizer(w.value))
if w.constraint is not None:
self.constraints[w.value] = w.constraint
if w.trainable:
self.trainable_weights += [w.value]
else:
self.non_trainable_weights += [w.value]
def get_output_shape_for(self, input_shape):
if hasattr(self, 'output_dim'):
return input_shape[:-1] + (self.output_dim,)
else:
return input_shape
def get_config(self):
config = {}
if hasattr(self, 'output_dim'):
config['output_dim'] = self.output_dim
base_config = super(RNNCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class RecurrentContainer(Layer):
def __init__(self, weights=None, return_sequences=False, return_states=False, go_backwards=False, stateful=False, readout=False, state_sync=False, decode=False, output_length=None, input_length=None, unroll=False, **kwargs):
self.return_sequences = return_sequences or decode
self.return_states = return_states
self.initial_weights = weights
self.go_backwards = go_backwards
self.stateful = stateful
self.readout = readout
self.state_sync = state_sync
self.decode = decode
if decode:
assert output_length, 'Missing argument: output_length should be specified for decoders.'
self.output_length = output_length
self.input_length = input_length
self.unroll = unroll
if unroll and not decode:
assert input_length, 'Missing argument: input_length should be specified for unrolling.'
self.supports_masking = True
self.model = Sequential()
self.supports_masking = True
self._truth_tensor = None
self.initial_readout = None
super(RecurrentContainer, self).__init__(**kwargs)
def add(self, layer):
'''Add a layer
# Arguments:
layer: Layer instance. RNNCell or a normal layer such as Dense.
'''
self.model.add(layer)
self.uses_learning_phase = self._truth_tensor or any([l.uses_learning_phase for l in self.model.layers])
if len(self.model.layers) == 1:
if layer.input_spec is not None:
shape = layer.input_spec[0].shape
else:
shape = layer.input_shape
if not self.decode:
shape = (shape[0], self.input_length) + shape[1:]
self.batch_input_shape = shape
self.input_spec = [InputSpec(shape=shape)]
if _isRNN(layer) and self.state_sync:
if not hasattr(self, 'nb_states'):
self.nb_states = len(layer.states)
else:
assert len(layer.states) == self.nb_states, 'Incompatible layer. In a state synchronized recurrent container, all the cells should have the same number of states.'
if self.stateful:
self.reset_states()
def pop(self):
'''Remove the last layer
'''
self.model.pop()
if self.stateful:
self.reset_states()
@property
def input_shape(self):
return self.input_spec[0].shape
@property
def output_shape(self):
shape = self.model.output_shape
if self.decode:
shape = (shape[0], self.output_length) + shape[1:]
elif self.return_sequences:
input_length = self.input_spec[0].shape[1]
shape = (shape[0], input_length) + shape[1:]
if self.return_states:
shape = [shape] + [None] * self.nb_states
return shape
def get_output_shape_for(self, input_shape):
if self.return_states:
output_shape = self.output_shape
state_shapes = output_shape[1:]
output_shape = output_shape[0]
output_shape = (input_shape[0],) + output_shape[1:]
return [output_shape] + state_shapes
else:
return (input_shape[0],) + self.output_shape[1:]
def step(self, x, states):
states = list(states)
state_index = 0
if self.decode:
x = states[0]
_x = x
states = states[1:]
for i in range(len(self.model.layers)):
layer = self.model.layers[i]
if self.readout and ((i == 0 and self.readout != 'call') or (self.readout=='call' and hasattr(layer, 'receive_readout') and layer.receive_readout)):
readout = states[-1]
if self._truth_tensor is not None:
slices = [slice(None), states[-2][0] - K.switch(states[-2][0], 1, 0)] + [slice(None)] * (K.ndim(self._truth_tensor) - 2)
readout = K.in_train_phase(K.switch(states[-2][0], self._truth_tensor[slices], readout), readout)
if self.readout in ['add', True]:
x += readout
elif self.readout == 'mul':
x *= readout
elif self.readout == 'pack':
x = K.pack([x, readout])
elif self.readout == 'readout_only':
x = readout
elif self.readout == 'call':
x = [x, readout]
if _isRNN(layer):
if self.state_sync:
x, new_states = layer._step(x, states[:len(layer.states)])
states[:len(layer.states)] = new_states
else:
x, new_states = layer._step(x, states[state_index : state_index + len(layer.states)])
states[state_index : state_index + len(layer.states)] = new_states
state_index += len(layer.states)
else:
x = layer.call(x)
if self.decode:
states = [_x] + states
if self.readout:
if self._truth_tensor is not None:
states[-2] += 1
states[-1] = x
return x, states
def call(self, x, mask=None):
if type(x) in [list, tuple]:
if 'ground_truth' in self.input_format:
self.set_truth_tensor(x[self.input_format.index('ground_truth')])
if 'initial_readout' in self.input_format:
self.initial_readout = x[self.input_format.index('initial_readout')]
if 'states' in self.input_format:
states = x[self.input_format.index('states'):]
for i in range(len(states)):
self.set_state(self.state_indices[i], states[i])
x = x[0]
if self.initial_readout is not None and self.readout == 'readout_only':
self.initial_readout = x
unroll = self.unroll
'''
if K.backend() == 'tensorflow':
cell_types = set([type(layer) for layer in self.model.layers if _isRNN(layer)])
if len(cell_types) > 1:
unroll = True
'''
input_shape = self.input_spec[0].shape
if self.stateful:
initial_states = self.states
else:
initial_states = self.get_initial_states(x)
if self.decode:
initial_states = [x] + initial_states
if self.uses_learning_phase:
with learning_phase(0):
last_output_0, outputs_0, states_0, updates = rnn(self.step, K.zeros((1, self.output_length, 1)), initial_states, unroll=unroll, input_length=self.output_length)
with learning_phase(1):
last_output_1, outputs_1, states_1, updates = rnn(self.step, K.zeros((1, self.output_length, 1)), initial_states, unroll=unroll, input_length=self.output_length)
outputs = K.in_train_phase(outputs_1, outputs_0)
last_output = _get_last_timestep(outputs)
states = [K.in_train_phase(states_1[i], states_0[i]) for i in range(len(states_0))]
else:
last_output, outputs, states, updates = rnn(self.step, K.zeros((1, self.output_length, 1)), initial_states, unroll=unroll, input_length=self.output_length)
else:
if self.uses_learning_phase:
with learning_phase(0):
last_output_0, outputs_0, states_0, updates = rnn(self.step, x, initial_states, go_backwards=self.go_backwards, mask=mask, unroll=unroll, input_length=input_shape[1])
with learning_phase(1):
last_output_1, outputs_1, states_1, updates = rnn(self.step, x, initial_states, go_backwards=self.go_backwards, mask=mask, unroll=unroll, input_length=input_shape[1])
outputs = K.in_train_phase(outputs_1, outputs_0)
last_output = _get_last_timestep(outputs)
states = [K.in_train_phase(states_1[i], states_0[i]) for i in range(len(states_0))]
else:
last_output, outputs, states, updates = rnn(self.step, x, initial_states, go_backwards=self.go_backwards, mask=mask, unroll=unroll, input_length=input_shape[1])
#self.add_update(updates, x)
states = list(states)
if self.stateful:
for i in range(len(states)):
if type(self.states[i]) == type(K.zeros((1,))):
updates.append((self.states[i], states[i]))
self.add_update(updates, x)
if self.decode:
states.pop(0)
if self.readout:
states.pop(-1)
if self._truth_tensor is not None:
states.pop(-1)
if self.return_sequences:
y = outputs
else:
y = last_output
if self.return_states:
y = [y] + states
return y
def get_initial_states(self, x):
initial_states = []
batch_size = self.input_spec[0].shape[0]
input_length = self.input_spec[0].shape[1]
if input_length is None:
input_length = K.shape(x)[1]
if batch_size is None:
batch_size = K.shape(x)[0]
if self.decode:
input = x
else:
input = _get_first_timestep(x)
for layer in self.model.layers:
if _isRNN(layer):
layer_initial_states = []
for state in layer.states:
state = self._get_state_from_info(state, input, batch_size, input_length)
if type(state) != list:
state = [state]
layer_initial_states += state
if not self.state_sync or initial_states == []:
initial_states += layer_initial_states
input = layer._step(input, layer_initial_states)[0]
else:
input = layer.call(input)
if self.readout:
if self._truth_tensor is not None:
initial_states += [K.zeros((1,), dtype='int32')]
if self.initial_readout is not None:
initial_readout = self._get_state_from_info(self.initial_readout, input, batch_size, input_length)
initial_states += [initial_readout]
else:
initial_states += [K.zeros_like(input)]
return initial_states
def reset_states(self):
batch_size = self.input_spec[0].shape[0]
input_length = self.input_spec[0].shape[1]
states = []
for layer in self.model.layers:
if _isRNN(layer):
for state in layer.states:
#assert type(state) in [tuple, list] or 'numpy' in str(type(state)), 'Stateful RNNs require states with static shapes'
if 'numpy' in str(type(state)):
states += [K.variable(state)]
elif type(state) in [list, tuple]:
state = list(state)
for i in range(len(state)):
if state[i] in [-1, 'batch_size']:
assert type(batch_size) == int, 'Stateful RNNs require states with static shapes'
state[i] = batch_size
elif state[i] == 'input_length':
assert type(input_length) == int, 'Stateful RNNs require states with static shapes'
state[i] = input_length
states += [K.variable(np.zeros(state))]
else:
states += [state]
if self.state_sync:
break
if self.readout:
shape = list(self.model.output_shape)
shape.pop(1)
if self._truth_tensor is not None:
states += [K.zeros((1,), dtype='int32')]
states += [K.zeros(shape)]
self.states = states
def _get_state_from_info(self, info, input, batch_size, input_length):
if hasattr(info, '__call__'):
return info(input)
elif type(info) in [list, tuple]:
info = list(info)
for i in range(len(info)):
if info[i] in [-1, 'batch_size']:
info[i] = batch_size
elif info[i] == 'input_length':
info[i] = input_length
if K._BACKEND == 'theano':
from theano import tensor as k
else:
import tensorflow as k
return k.zeros(info)
elif 'numpy' in str(type(info)):
return K.variable(info)
else:
return info
def compute_mask(self, input, input_mask=None):
mask = input_mask[0] if type(input_mask) is list else input_mask
mask = mask if self.return_sequences else None
mask = [mask] + [None] * self.nb_states if self.return_states else mask
return mask
@property
def trainable_weights(self):
if not self.model.layers:
return []
return self.model.trainable_weights
@trainable_weights.setter
def trainable_weights(self, value):
pass
@property
def non_trainable_weights(self):
if not self.model.layers:
return []
return self.model.non_trainable_weights
@non_trainable_weights.setter
def non_trainable_weights(self, value):
pass
@property
def weights(self):
return self.model.weights
def set_truth_tensor(self, val):
if val is not None:
self.uses_learning_phase = True
self._truth_tensor = val
def get_config(self):
attribs = ['return_sequences', 'return_states', 'go_backwards', 'stateful', 'readout', 'state_sync', 'decode', 'input_length', 'unroll', 'output_length']
config = {x : getattr(self, x) for x in attribs}
config['model'] = self.model.get_config()
base_config = super(RecurrentContainer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config):
model_config = config['model']
del config['model']
rc = cls(**config)
from . import cells
rc.model = Sequential()
for layer_config in model_config:
if 'config' in layer_config and 'name' in layer_config['config']:
del layer_config['config']['name']
layer = layer_from_config(layer_config, cells.__dict__)
rc.add(layer)
return rc
def __call__(self, x, mask=None):
args = ['input', 'ground_truth', 'initial_readout', 'states']
if type(x) is dict:
x = list(map(x.get, args))
elif type(x) not in [list, tuple]:
x = [x, None, None, None]
self.input_format = []
input_tensors = []
for i in range(3):
if x[i] is not None:
self.input_format += [args[i]]
input_tensors += [x[i]]
if x[3] is not None:
self.input_format += [args[3]]
states = []
self.state_indices = []
for i in range(len(x[3])):
if x[3][i] is not None:
states += [x[3][i]]
self.state_indices += [i]
input_tensors += states
if not self.built:
self.assert_input_compatibility(x)
input_shapes = []
for x_elem in input_tensors:
if hasattr(x_elem, '_keras_shape'):
input_shapes.append(x_elem._keras_shape)
elif hasattr(K, 'int_shape'):
input_shapes.append(K.int_shape(x_elem))
elif x_elem is not None:
raise Exception('You tried to call layer "' + self.name +
'". This layer has no information'
' about its expected input shape, '
'and thus cannot be built. '
'You can build it manually via: '
'`layer.build(batch_input_shape)`')
self.build(input_shapes[0])
self.built = True
self.assert_input_compatibility(x[0])
input_added = False
inbound_layers = []
node_indices = []
tensor_indices = []
self.ignore_indices = []
for i in range(len(input_tensors)):
input_tensor = input_tensors[i]
if hasattr(input_tensor, '_keras_history') and input_tensor._keras_history:
previous_layer, node_index, tensor_index = input_tensor._keras_history
inbound_layers.append(previous_layer)
node_indices.append(node_index)
tensor_indices.append(tensor_index)
else:
inbound_layers = None
break
if inbound_layers:
self.add_inbound_node(inbound_layers, node_indices, tensor_indices)
input_added = True
if input_added:
outputs = self.inbound_nodes[-1].output_tensors
if len(outputs) == 1:
return outputs[0]
else:
return outputs
else:
return self.call(x, mask)
def set_state(self, index, state):
n = 0
for layer in self.model.layers:
if _isRNN(layer):
if self.state_sync:
layer.states[index] = state
return
n += len(layer.states)
if index < n:
layer.states[index + len(layer.states) - n] = state
return
@property
def nb_states(self):
if self.state_sync:
for layer in self.model.layers:
if _isRNN(layer):
return len(layer.states)
return 0
| [
"659338505@qq.com"
] | 659338505@qq.com |
548d8103a04d1820fb605391299e1a277e6efd2a | fed1d6d3556f7e180de0f5be1e014ecd2c0f5fd9 | /picraft/entity.py | 096c29dafbdc693e8ece65c9f4fca2f44b8488f5 | [
"BSD-3-Clause"
] | permissive | DannyGoodall/picraft | 3eae90c0321a3e8e0db7229f4b9de5081d4ee88f | 27cb3ab2384367e29d0eb0d964b635fb37516e37 | refs/heads/master | 2022-03-05T16:51:04.067733 | 2021-12-23T17:23:00 | 2021-12-23T17:23:00 | 177,415,931 | 0 | 0 | null | 2019-03-24T13:05:51 | 2019-03-24T13:05:51 | null | UTF-8 | Python | false | false | 23,396 | py | import io
from .vector import vector_range, Vector
from pkg_resources import resource_stream
from .entityinterface import ThingInterfaceManager, ThingInterface, ENTITY_INTERFACES
#, ENTITY_NAME_OVERRIDE
from .method import MethodManager
from .interface import InterfaceMethodBase
from dataclasses import dataclass
from .interface import MethodDetailsStructure
from .utils import _noner
def _read_block_data(filename_or_object):
if isinstance(filename_or_object, str):
stream = io.open(filename_or_object, 'rb')
else:
stream = filename_or_object
for line in stream:
line = line.decode('utf-8').strip()
if line and not line.startswith('#'):
name, id, interface = line.split(None, 2)
# name, block_data = line.split(None, 2)
# yield and set to None any 'None' strings
yield EntityDBStructure(
name,
id,
interface
)
@dataclass
class EntityDBStructure(object):
name: str = ""
id: int = 0
interface: str = ""
def __post_init__(self):
# do some conversion
if isinstance(self.id, str):
self.id = int(self.id)
if self.interface == "" or self.interface == "None":
self.interface = None
class EntityBase(InterfaceMethodBase):
_ENTITIES_DB = {
entity_db.name.upper(): entity_db for entity_db in _read_block_data(resource_stream(__name__, 'entity.data'))
}
# ENTITY_NAME_LOOKUP = {
# 1: "DROPPED_ITEM",
# 2: "EXPERIENCE_ORB",
# 3: "AREA_EFFECT_CLOUD",
# 4: "ELDER_GUARDIAN",
# 5: "WITHER_SKELETON",
# 6: "STRAY",
# 7: "EGG",
# 8: "LEASH_HITCH",
# 9: "PAINTING",
# 10: "ARROW",
# 11: "SNOWBALL",
# 12: "FIREBALL",
# 13: "SMALL_FIREBALL",
# 14: "ENDER_PEARL",
# 15: "ENDER_SIGNAL",
# 17: "THROWN_EXP_BOTTLE",
# 18: "ITEM_FRAME",
# 19: "WITHER_SKULL",
# 20: "PRIMED_TNT",
# 23: "HUSK",
# 24: "SPECTRAL_ARROW",
# 25: "SHULKER_BULLET",
# 26: "DRAGON_FIREBALL",
# 27: "ZOMBIE_VILLAGER",
# 28: "SKELETON_HORSE",
# 29: "ZOMBIE_HORSE",
# 30: "ARMOR_STAND",
# 31: "DONKEY",
# 32: "MULE",
# 33: "EVOKER_FANGS",
# 34: "EVOKER",
# 35: "VEX",
# 36: "VINDICATOR",
# 37: "ILLUSIONER",
# 40: "MINECART_COMMAND",
# 41: "BOAT",
# 42: "MINECART",
# 43: "MINECART_CHEST",
# 44: "MINECART_FURNACE",
# 45: "MINECART_TNT",
# 46: "MINECART_HOPPER",
# 47: "MINECART_MOB_SPAWNER",
# 50: "CREEPER",
# 51: "SKELETON",
# 52: "SPIDER",
# 53: "GIANT",
# 54: "ZOMBIE",
# 55: "SLIME",
# 56: "GHAST",
# 57: "PIG_ZOMBIE",
# 58: "ENDERMAN",
# 59: "CAVE_SPIDER",
# 60: "SILVERFISH",
# 61: "BLAZE",
# 62: "MAGMA_CUBE",
# 63: "ENDER_DRAGON",
# 64: "WITHER",
# 65: "BAT",
# 66: "WITCH",
# 67: "ENDERMITE",
# 68: "GUARDIAN",
# 69: "SHULKER",
# 90: "PIG",
# 91: "SHEEP",
# 92: "COW",
# 93: "CHICKEN",
# 94: "SQUID",
# 95: "WOLF",
# 96: "MUSHROOM_COW",
# 97: "SNOWMAN",
# 98: "OCELOT",
# 99: "IRON_GOLEM",
# 100: "HORSE",
# 101: "RABBIT",
# 102: "POLAR_BEAR",
# 103: "LLAMA",
# 104: "LLAMA_SPIT",
# 105: "PARROT",
# 120: "VILLAGER",
# 200: "ENDER_CRYSTAL"
# }
ENTITY_TYPE_ID_LOOKUP = {
entity_details.name: entity_details.id for entity_details in _ENTITIES_DB.values()
}
def __init__(self, name, refresh_after_call=True, **kwargs):
super().__init__("ENTITY", name_of_thing=name)
# self._init_complete = False
# self._valid_methods = {}
self._create_with_gravity = kwargs.get("gravity", True)
self._create_with_ai = kwargs.get("ai", True)
self._refresh_after_call = refresh_after_call
self._name = name.upper()
self._type_id = self.get_type_id(name)
self._entity_id = None
self._connection = None
# self._method_manager = None
self._is_placeable_by_blocks = True
self._init_complete = True
self._interfaces_manager = None
self.initialise_from_name()
# self._last_method = None
@property
def interfaces_manager(self) -> ThingInterfaceManager:
return self._interfaces_manager
@property
def create_with_gravity(self):
return self._create_with_gravity
@create_with_gravity.setter
def create_with_gravity(self, value):
self._create_with_gravity = value
@property
def create_with_ai(self):
return self._create_with_ai
@create_with_ai.setter
def create_with_ai(self, value):
self._create_with_ai = value
def __ga__(self, item):
return object.__getattribute__(self, item)
@property
def refresh_after_call(self):
return self._refresh_after_call
@refresh_after_call.setter
def refresh_after_call(self, value):
self._refresh_after_call = value
# def refresh_entity(self):
# pass
#
# def get_method_details(self, method_name):
# return self._valid_methods.get(method_name)
def initialise_from_name(self, override_name=None):
name = self._name.upper() if override_name is None else override_name.upper()
# ODDNESS - IT LOOKS LIKE CERTAIN ENTITIES HAVE THE _ (UNDERSCORE) REPLACED BY SPACE
# if name in ENTITY_NAME_OVERRIDE:
# name = ENTITY_NAME_OVERRIDE[name]
self._interfaces_manager: dict = ENTITY_INTERFACES.get(name, None)
if self._interfaces_manager is None:
print(f"INTERFACE NOT DEFINED FOR ENTITY: {self._name}")
return
for method_interface in self._interfaces_manager.method_interfaces():
self.register_valid_method_raw(method_interface)
# def register_valid_method_raw(self, method_details):
# if method_details is not None:
# method_name = method_details.name # type: MethodDetailsStructure
# self._valid_methods[method_name] = method_details
# else:
# print("GOT A NONE WHILST REGISTERING VALID METHODS. ALL IS GOOD.")
# return self
@property
def is_placeable_by_blocks(self):
return self._is_placeable_by_blocks
def get_type_id(self, name):
return self.ENTITY_TYPE_ID_LOOKUP.get(name.upper(), -1)
def __repr__(self):
try:
return '<Entity "%s" id=%d type_id=%d>' % (self._name, self.entity_id, self._type_id)
except KeyError:
return '<Entity "%s">' % (self._name)
@property
def type_id(self):
return self._type_id
@property
def name(self):
return self._name
@property
def entity_id(self):
return self._entity_id
@entity_id.setter
def entity_id(self, value):
self._entity_id = value
@classmethod
def entity_details_to_hash_list(cls, connection, r):
array_of_entities = []
for x in r.split("|"):
if not x:
continue
elements = x.split(",")
entity_id = int(elements[0])
name = elements[1].upper()
location = Vector(
int(float(elements[2])),
int(float(elements[3])),
int(float(elements[4]))
)
# Set to -1 if we don't recognise the entity
entity = Entity(name)
entity.make_instance(connection, entity_id)
array_of_entities.append(
{
"id": entity_id,
"name": name,
"entity": entity,
"location": location
}
)
return array_of_entities
def get_nearby_entities(self, distance=1, entity_type="all", exclude_self=True, sort_by_proximity=True):
entities = self.connection.transact(
'entity.getEntities(%d,%d,%s)' % (self._entity_id, distance, entity_type)
)
entity_structures = self.entity_details_to_hash_list(self._connection, entities)
if sort_by_proximity:
my_position: Vector = self.getLocation()
if my_position is None:
return []
entity_structures = sorted(
[x for x in entity_structures if x['location'] is not None and x['entity'] is not None],
key=lambda d: my_position.distance_to(d['location']))
for entity_structure in entity_structures:
entity_object = entity_structure["entity"]
if exclude_self and entity_object.entity_id == self._entity_id:
continue
yield entity_object
def describe_method(self, method_details):
signature_bits = method_details.signature.split(":")
return_value = signature_bits[0] if signature_bits[0] != "void" else ""
parameter_value = ", ".join([x for x in signature_bits[1:] if x not in ["void"]])
return_details = f"- Returns a value of <{return_value}>" if method_details.get else ""
method_and_params = f"{method_details.name}({parameter_value})"
return f"{method_and_params:60} {return_details}"
def document(self, print_it=False):
pass
print(f"Entity of type: {self._name}")
print(f"Entity type id: {self._type_id}")
block_type = "Skeleton entity - A skeleton entity has not yet been placed in the minecraft world" \
if not self.is_instance() \
else f"Instance entity at {self._block_vector} - An instance entity has been placed in the Minecraft world and can have methods called on it"
print(f" {block_type}")
if len(self._valid_methods) == 0:
print(f" No registered methods")
else:
print(f" The following methods are registered:")
for interface in self.interfaces_manager.interfaces():
s = f"Interface: {interface.name}"
print(f"\n{s}")
print("-" * len(s))
for method in interface.method_interfaces():
print(f" {self.describe_method(method)}")
print("\n")
class Ageable(object):
def get_age(self):
return self.invoke_method_return("getAge")
def is_adult(self):
return self.invoke_method_return("isAdult")
def set_adult(self):
self.invoke_method("setAdult")
def set_age(self, value):
self.invoke_method("setAge", str(value))
def set_baby(self):
self.invoke_method("setBaby")
class Damageable(object):
def damage(self, value):
self.invoke_method("damage|void:double", value)
def get_health(self):
return self.invoke_method_return("getHealth|Double:void")
def set_health(self, value):
self.invoke_method("setHealth|void:double", value)
def get_absorption_amount(self):
return self.invoke_method_return('getAbsorptionAmount|Double:void')
def set_absorption_amount(self, value):
self.invoke_method("setAbsorptionAmount|void:double", value)
class Entity(EntityBase):
@classmethod
def from_id(cls, connection, entity_id):
r = connection.transact(
'world.getEntityById(%d)' % (entity_id)
)
entity_details = Entity.entity_details_to_hash_list(connection, r)
if len(entity_details) == 0:
return None
else:
return entity_details[0]["entity"]
@classmethod
def from_meta(cls, connection, entity_id, name):
# OK. This might be an error but I'm going to try to create an instance from the raw information without
# checking that the entity exists. It will save having to do a very expensive getEntityById
e = cls(name)
e.make_instance(connection, entity_id)
return e
def spawn(self, world, vector):
self.set_block(world._connection, vector)
return self
def set_block(self, connection, vector, block_data_cache=None, make_instance=True):
# self.check_is_instance()
# make_instance used as dummy to allow world set block routine to set entities as blocks (I hope)
cmd = 'world.spawnEntity(%d,%d,%d,%s)' % (
vector.x,
vector.y,
vector.z,
# self._type_id
self._name
)
print(f"set_block: About to create entity with this command: {cmd}")
r = connection.transact(
cmd
)
self._entity_id = int(r)
self._connection = connection
if make_instance:
self.make_instance(self._connection, self._entity_id)
if not self.create_with_gravity:
self.setGravity(False)
if not self.create_with_ai:
self.setAI(False)
return self
def set_blocks(self, connection, vector_from, vector_to):
self.check_is_instance()
for v in vector_range(vector_from, vector_to):
self.set_block(connection, v)
class Entities(object):
def __init__(self, connection):
# @
self._connection = connection
self._entities = {} # Keyed on entity_id
self.refresh()
def refresh(self):
self._get_entities()
def __repr__(self):
return '<Entities>'
def parse_entities(self, r):
array_of_entities = []
for x in r.split("|"):
if not x:
continue
elements = x.split(",")
id = int(elements[0])
name = elements[1].upper()
location = Vector(
int(float(elements[2])),
int(float(elements[3])),
int(float(elements[4]))
)
# Set to -1 if we don't recognise the entity
entity_type_id = Entity.ENTITY_TYPE_ID_LOOKUP.get(name, -1)
entity = Entity(
name
)
entity.connection = self._connection
entity.entity_id = id
array_of_entities.append(
{
"id": id,
"entity": entity,
"location": location
}
)
return array_of_entities
def _get_entities(self):
r = self._connection.transact(
'world.getEntities(%s)' % ("all")
)
for entity_detail in self.parse_entities(r):
self._entities[entity_detail["id"]] = entity_detail["entity"]
def get_entities(self):
return [x for x in self._entities.values()]
def spawn(self, entity, vector):
cmd = 'world.spawnEntity(%d,%d,%d,%s)' % (
vector.x,
vector.y,
vector.z,
# self._type_id
entity.name
)
print(f"About to create entity with this command: {cmd}")
r = self._connection.transact(
cmd
)
entity_id = int(r)
entity.make_instance(self._connection, entity_id)
self._entities[entity_id] = entity
return entity
class WolfTest(EntityBase, Ageable, Damageable):
name = "WOLF"
def __init__(self):
super().__init__(self.name)
#
# DROPPED_ITEM = Entity("DROPPED_ITEM")
# EXPERIENCE_ORB = Entity("EXPERIENCE_ORB")
# AREA_EFFECT_CLOUD = Entity("AREA_EFFECT_CLOUD")
# ELDER_GUARDIAN = Entity("ELDER_GUARDIAN")
# WITHER_SKELETON = Entity("WITHER_SKELETON")
# STRAY = Entity("STRAY")
# EGG = Entity("EGG")
# LEASH_HITCH = Entity("LEASH_HITCH")
# PAINTING = Entity("PAINTING")
# ARROW = Entity("ARROW")
# SNOWBALL = Entity("SNOWBALL")
# FIREBALL = Entity("FIREBALL")
# SMALL_FIREBALL = Entity("SMALL_FIREBALL")
# ENDER_PEARL = Entity("ENDER_PEARL")
# ENDER_SIGNAL = Entity("ENDER_SIGNAL")
# THROWN_EXP_BOTTLE = Entity("THROWN_EXP_BOTTLE")
# ITEM_FRAME = Entity("ITEM_FRAME")
# WITHER_SKULL = Entity("WITHER_SKULL")
# PRIMED_TNT = Entity("PRIMED_TNT")
# HUSK = Entity("HUSK")
# SPECTRAL_ARROW = Entity("SPECTRAL_ARROW")
# SHULKER_BULLET = Entity("SHULKER_BULLET")
# DRAGON_FIREBALL = Entity("DRAGON_FIREBALL")
# ZOMBIE_VILLAGER = Entity("ZOMBIE_VILLAGER")
# SKELETON_HORSE = Entity("SKELETON_HORSE")
# ZOMBIE_HORSE = Entity("ZOMBIE_HORSE")
# ARMOR_STAND = Entity("ARMOR_STAND")
# DONKEY = Entity("DONKEY")
# MULE = Entity("MULE")
# EVOKER_FANGS = Entity("EVOKER_FANGS")
# EVOKER = Entity("EVOKER")
# VEX = Entity("VEX")
# VINDICATOR = Entity("VINDICATOR")
# ILLUSIONER = Entity("ILLUSIONER")
# MINECART_COMMAND = Entity("MINECART_COMMAND")
# BOAT = Entity("BOAT")
# MINECART = Entity("MINECART")
# MINECART_CHEST = Entity("MINECART_CHEST")
# MINECART_FURNACE = Entity("MINECART_FURNACE")
# MINECART_TNT = Entity("MINECART_TNT")
# MINECART_HOPPER = Entity("MINECART_HOPPER")
# MINECART_MOB_SPAWNER = Entity("MINECART_MOB_SPAWNER")
# CREEPER = Entity("CREEPER")
# SKELETON = Entity("SKELETON")
# SPIDER = Entity("SPIDER")
# GIANT = Entity("GIANT")
# ZOMBIE = Entity("ZOMBIE")
# SLIME = Entity("SLIME")
# GHAST = Entity("GHAST")
# PIG_ZOMBIE = Entity("PIG_ZOMBIE")
# ENDERMAN = Entity("ENDERMAN")
# CAVE_SPIDER = Entity("CAVE_SPIDER")
# SILVERFISH = Entity("SILVERFISH")
# BLAZE = Entity("BLAZE")
# MAGMA_CUBE = Entity("MAGMA_CUBE")
# ENDER_DRAGON = Entity("ENDER_DRAGON")
# WITHER = Entity("WITHER")
# BAT = Entity("BAT")
# WITCH = Entity("WITCH")
# ENDERMITE = Entity("ENDERMITE")
# GUARDIAN = Entity("GUARDIAN")
# SHULKER = Entity("SHULKER")
# PIG = Entity("PIG")
# SHEEP = Entity("SHEEP")
# COW = Entity("COW")
# CHICKEN = Entity("CHICKEN")
# SQUID = Entity("SQUID")
# WOLF = Entity("WOLF")
# MUSHROOM_COW = Entity("MUSHROOM_COW")
# SNOWMAN = Entity("SNOWMAN")
# OCELOT = Entity("OCELOT")
# IRON_GOLEM = Entity("IRON_GOLEM")
# HORSE = Entity("HORSE")
# RABBIT = Entity("RABBIT")
# POLAR_BEAR = Entity("POLAR_BEAR")
# LLAMA = Entity("LLAMA")
# LLAMA_SPIT = Entity("LLAMA_SPIT")
# PARROT = Entity("PARROT")
# VILLAGER = Entity("VILLAGER")
# ENDER_CRYSTAL = Entity("ENDER_CRYSTAL")
# DROPPED_ITEM("item", Item.class, 1, false),
# EXPERIENCE_ORB("experience_orb", ExperienceOrb.class, 2),
# AREA_EFFECT_CLOUD("area_effect_cloud", AreaEffectCloud.class, 3),
# ELDER_GUARDIAN("elder_guardian", ElderGuardian.class, 4),
# WITHER_SKELETON("wither_skeleton", WitherSkeleton.class, 5),
# STRAY("stray", Stray.class, 6),
# EGG("egg", Egg.class, 7),
# LEASH_HITCH("leash_knot", LeashHitch.class, 8),
# PAINTING("painting", Painting.class, 9),
# ARROW("arrow", Arrow.class, 10),
# SNOWBALL("snowball", Snowball.class, 11),
# FIREBALL("fireball", LargeFireball.class, 12),
# SMALL_FIREBALL("small_fireball", SmallFireball.class, 13),
# ENDER_PEARL("ender_pearl", EnderPearl.class, 14),
# ENDER_SIGNAL("eye_of_ender", EnderSignal.class, 15),
# SPLASH_POTION("potion", ThrownPotion.class, 16, false),
# THROWN_EXP_BOTTLE("experience_bottle", ThrownExpBottle.class, 17),
# ITEM_FRAME("item_frame", ItemFrame.class, 18),
# WITHER_SKULL("wither_skull", WitherSkull.class, 19),
# PRIMED_TNT("tnt", TNTPrimed.class, 20),
# FALLING_BLOCK("falling_block", FallingBlock.class, 21, false),
# FIREWORK("firework_rocket", Firework.class, 22, false),
# HUSK("husk", Husk.class, 23),
# SPECTRAL_ARROW("spectral_arrow", SpectralArrow.class, 24),
# SHULKER_BULLET("shulker_bullet", ShulkerBullet.class, 25),
# DRAGON_FIREBALL("dragon_fireball", DragonFireball.class, 26),
# ZOMBIE_VILLAGER("zombie_villager", ZombieVillager.class, 27),
# SKELETON_HORSE("skeleton_horse", SkeletonHorse.class, 28),
# ZOMBIE_HORSE("zombie_horse", ZombieHorse.class, 29),
# ARMOR_STAND("armor_stand", ArmorStand.class, 30),
# DONKEY("donkey", Donkey.class, 31),
# MULE("mule", Mule.class, 32),
# EVOKER_FANGS("evoker_fangs", EvokerFangs.class, 33),
# EVOKER("evoker", Evoker.class, 34),
# VEX("vex", Vex.class, 35),
# VINDICATOR("vindicator", Vindicator.class, 36),
# ILLUSIONER("illusioner", Illusioner.class, 37),
# MINECART_COMMAND("command_block_minecart", CommandMinecart.class, 40),
# BOAT("boat", Boat.class, 41),
# MINECART("minecart", RideableMinecart.class, 42),
# MINECART_CHEST("chest_minecart", StorageMinecart.class, 43),
# MINECART_FURNACE("furnace_minecart", PoweredMinecart.class, 44),
# MINECART_TNT("tnt_minecart", ExplosiveMinecart.class, 45),
# MINECART_HOPPER("hopper_minecart", HopperMinecart.class, 46),
# MINECART_MOB_SPAWNER("spawner_minecart", SpawnerMinecart.class, 47),
# CREEPER("creeper", Creeper.class, 50),
# SKELETON("skeleton", Skeleton.class, 51),
# SPIDER("spider", Spider.class, 52),
# GIANT("giant", Giant.class, 53),
# ZOMBIE("zombie", Zombie.class, 54),
# SLIME("slime", Slime.class, 55),
# GHAST("ghast", Ghast.class, 56),
# ZOMBIFIED_PIGLIN("zombified_piglin", PigZombie.class, 57),
# ENDERMAN("enderman", Enderman.class, 58),
# CAVE_SPIDER("cave_spider", CaveSpider.class, 59),
# SILVERFISH("silverfish", Silverfish.class, 60),
# BLAZE("blaze", Blaze.class, 61),
# MAGMA_CUBE("magma_cube", MagmaCube.class, 62),
# ENDER_DRAGON("ender_dragon", EnderDragon.class, 63),
# WITHER("wither", Wither.class, 64),
# BAT("bat", Bat.class, 65),
# WITCH("witch", Witch.class, 66),
# ENDERMITE("endermite", Endermite.class, 67),
# GUARDIAN("guardian", Guardian.class, 68),
# SHULKER("shulker", Shulker.class, 69),
# PIG("pig", Pig.class, 90),
# SHEEP("sheep", Sheep.class, 91),
# COW("cow", Cow.class, 92),
# CHICKEN("chicken", Chicken.class, 93),
# SQUID("squid", Squid.class, 94),
# WOLF("wolf", Wolf.class, 95),
# MUSHROOM_COW("mooshroom", MushroomCow.class, 96),
# SNOWMAN("snow_golem", Snowman.class, 97),
# OCELOT("ocelot", Ocelot.class, 98),
# IRON_GOLEM("iron_golem", IronGolem.class, 99),
# HORSE("horse", Horse.class, 100),
# RABBIT("rabbit", Rabbit.class, 101),
# POLAR_BEAR("polar_bear", PolarBear.class, 102),
# LLAMA("llama", Llama.class, 103),
# LLAMA_SPIT("llama_spit", LlamaSpit.class, 104),
# PARROT("parrot", Parrot.class, 105),
# VILLAGER("villager", Villager.class, 120),
# ENDER_CRYSTAL("end_crystal", EnderCrystal.class, 200),
# TURTLE("turtle", Turtle.class, -1),
# PHANTOM("phantom", Phantom.class, -1),
# TRIDENT("trident", Trident.class, -1),
# COD("cod", Cod.class, -1),
# SALMON("salmon", Salmon.class, -1),
# PUFFERFISH("pufferfish", PufferFish.class, -1),
# TROPICAL_FISH("tropical_fish", TropicalFish.class, -1),
# DROWNED("drowned", Drowned.class, -1),
# DOLPHIN("dolphin", Dolphin.class, -1),
# CAT("cat", Cat.class, -1),
# PANDA("panda", Panda.class, -1),
# PILLAGER("pillager", Pillager.class, -1),
# RAVAGER("ravager", Ravager.class, -1),
# TRADER_LLAMA("trader_llama", TraderLlama.class, -1),
# WANDERING_TRADER("wandering_trader", WanderingTrader.class, -1),
# FOX("fox", Fox.class, -1),
# BEE("bee", Bee.class, -1),
# HOGLIN("hoglin", Hoglin.class, -1),
# PIGLIN("piglin", Piglin.class, -1),
# STRIDER("strider", Strider.class, -1),
# ZOGLIN("zoglin", Zoglin.class, -1),
# PIGLIN_BRUTE("piglin_brute", PiglinBrute.class, -1),
# FISHING_HOOK("fishing_bobber", FishHook.class, -1, false),
# LIGHTNING("lightning_bolt", LightningStrike.class, -1, false),
# PLAYER("player", Player.class, -1, false),
# UNKNOWN((String)null, (Class)null, -1, false);
| [
"dannynonegoodall@gmail.com"
] | dannynonegoodall@gmail.com |
e12d6659f0e48bef47c8cf4088059561f98f6169 | e833e573c20e9f66fe713beece367b4094594827 | /seasonal_plot.py | 8bad5a88a45a0f5d762fed3c4102ce65b2ff92d2 | [] | no_license | Smrithi23/Petrol-Price-Forecasting-Using-ARIMA-Model | 28a8bb0d5006560c0065758fd1da42548c265690 | 5c701436659b751fb13cc5bfabf2bbfb8b4b1c1e | refs/heads/main | 2023-01-23T09:42:41.470536 | 2020-11-28T19:09:28 | 2020-11-28T19:09:28 | 316,778,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 982 | py | from dateutil.parser import parse
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
# Import Data
df = pd.read_csv('fuel_price_data.csv', parse_dates=['date'], index_col='date')
df.reset_index(inplace=True)
# Prepare data
df['year'] = [d.year for d in df.date]
df['month'] = [d.strftime('%b') for d in df.date]
years = df['year'].unique()
# Prep Colors
np.random.seed(100)
colors = np.random.choice(list(mpl.colors.XKCD_COLORS.keys()), len(years), replace=False)
# Draw Plot
plt.figure(figsize=(16,12))
for i, y in enumerate(years):
if i > 0:
plt.plot('month', 'value', data=df.loc[df.year==y, :], color=colors[i], label=y)
plt.text(df.loc[df.year==y, :].shape[0]-.9, df.loc[df.year==y, 'value'][-1:].values[0], y, color=colors[i])
# Decoration
plt.gca().set(ylabel='Petrol Price in $/bbl', xlabel='Month')
plt.yticks(alpha=.7)
plt.title("Seasonal Plot of Petrol Price")
plt.show() | [
"smrithi.prakash23@gmail.com"
] | smrithi.prakash23@gmail.com |
183fae952436a7e579eeffb6392a6c7f9aadd798 | 115520f88f888461e06b4e20da79ecb68b978593 | /point/models.py | 9b7a083fcc27e0c452048c2bbb8e6f294b46fcba | [] | no_license | koblas/notewave | c516a2fe05eb4c8f44847d4a396a78e54f477059 | f0d43a362e8f0976ec63a4df46fc81a8e536aad9 | refs/heads/master | 2021-01-18T21:26:44.034678 | 2012-05-30T18:54:17 | 2012-05-30T18:54:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,442 | py | from django.db import models
from openauth.models import User
from snippets.models import JSONField
import re
# from http://daringfireball.net/2010/07/improved_regex_for_matching_urls
LINK_RE = re.compile(r'(?i)\b((?:[a-z][\w-]+:(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?]))', re.U)
class Group(models.Model) :
title = models.CharField(max_length=200, help_text='Group Title')
created_at = models.DateTimeField(auto_now_add=True)
activity_at = models.DateTimeField(auto_now_add=True)
created_by = models.ForeignKey(User, db_index=True)
invite_id = models.CharField(max_length=200, db_index=True)
eaddr = models.CharField(max_length=100, db_index=True, null=True, blank=True)
RESERVED_EMAIL = ('koblas','support','robot','help','service')
# class variable
_callbacks = {}
def root_post_set(self) :
return Post.objects.filter(group=self, parent=None)
def share_url(self) :
from django.core.urlresolvers import reverse
return reverse('point:accepturl', args=[self.invite_id])
def email(self) :
if not self.eaddr :
return None
return '%s@notewave.com' % (self.eaddr)
def save(self) :
if not self.invite_id :
import uuid
self.invite_id = uuid.uuid1()
if not self.created_at :
from django.template.defaultfilters import slugify
base = potential = slugify(self.title)
suffix = 0
while not self.eaddr :
if potential not in self.RESERVED_EMAIL and not self.__class__.objects.filter(eaddr = potential).exists() :
self.eaddr = potential
else :
suffix += 1
potential = "%s-%d" % (base, suffix)
super(Group, self).save()
def post_update(self, post) :
from datetime import datetime
self.activity_at = datetime.now()
self.save()
if self.id in self._callbacks :
for cb in self._callbacks[self.id] :
cb(self, post)
self._callbacks[self.id] = []
def cb_register(self, func) :
if self.id not in self._callbacks :
self._callbacks[self.id] = []
self._callbacks[self.id].append(func)
def add_member(self, user) :
# TODO - notify that a member joined
post = Post(group=self, user=user, body={'join':1})
post.save()
member = Member(group=self, user=user)
member.save()
return member
class Invite(models.Model) :
class Meta :
unique_together = (('user','group','email'),)
user = models.ForeignKey(User, db_index=True)
group = models.ForeignKey(Group, db_index=True)
email = models.CharField(max_length=200, db_index=True)
guid = models.CharField(max_length=200, db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
accepted_at = models.DateTimeField(blank=True, null=True)
def save(self) :
import uuid
if not self.guid :
self.guid = uuid.uuid1()
super(Invite, self).save()
class Member(models.Model) :
class Meta :
unique_together = (('user','group'),)
user = models.ForeignKey(User, db_index=True)
group = models.ForeignKey(Group, db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
activity_at = models.DateTimeField(auto_now_add=True)
lastread_at = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
notify_on_post = models.BooleanField(default=True)
username = models.CharField(max_length=200, help_text='')
image = models.ImageField(upload_to="avatars/%Y/%m-%d", blank=True, null=True)
def get_username(self) :
if self.username :
return self.username
return self.user.profile.username
def icon24(self) :
return self.user.profile.icon(24)
def icon50(self) :
return self.user.profile.icon(50)
def new_count(self) :
cnt = Post.objects.filter(group=self.group, parent=None, created_at__gt=self.lastread_at).count()
ccnt = Post.objects.filter(group=self.group, created_at__gt=self.lastread_at).exclude(parent=None).count()
if cnt == 0 and ccnt == 0 :
return ""
if ccnt == 0 :
return "%d" % cnt
return "%d/%d" % (cnt, ccnt)
def _url(self, w, h) :
import time
modtime = int(time.mktime(self.activity_at.timetuple()))
return "/image/%s/%s_%dx%d.png?%d" % (self.__class__.__name__, self.user.id, w, h, modtime)
class Inbox(models.Model) :
TYPE_INBOX = 0
TYPE_SYSTEM = 1
TYPE_CHOICES = (
(TYPE_INBOX, 'Inbox'),
(TYPE_SYSTEM, 'System'),
)
body = JSONField(blank=True, null=True)
user = models.ForeignKey(User, db_index=True, related_name='message_user')
sender = models.ForeignKey(User, db_index=True, related_name='message_sender')
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
is_read = models.BooleanField(default=False)
msgtype = models.IntegerField(default=TYPE_INBOX, choices=TYPE_CHOICES)
class Post(models.Model) :
class Meta :
ordering = (('-created_at'),)
body = JSONField(blank=True, null=True)
user = models.ForeignKey(User, db_index=True)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
parent = models.ForeignKey('self', null=True, blank=True, default=None)
group = models.ForeignKey(Group, db_index=True)
likes = models.IntegerField(default=0)
def links(self) :
if not hasattr(self, '_links') :
if 'text' in self.body :
self._links = [link[0] for link in LINK_RE.findall(self.body['text'])]
else :
self._links = []
return self._links
def links_youtube(self) :
res = []
for link in self.links() :
id = self._is_youtube(link)
if id :
res.append(id)
return res
@staticmethod
def _is_youtube(link) :
exp = re.compile(r'youtube\.com/.*\bv=(\w+)')
r = exp.search(link)
if r :
return r.group(1)
return None
@classmethod
def _replace_html(cls, m) :
link = m.group(0)
if cls._is_youtube(link) :
return ""
return u'<a href="%s">%s</a>' % (link, link)
def html(self) :
from django.utils.safestring import mark_safe
if 'join' in self.body :
html = "Joined the group"
elif 'text' in self.body :
html = LINK_RE.sub(self._replace_html, self.body['text'])
else :
html = ''
return mark_safe(html)
def add_image(self, image_bytes) :
from django.core.files.uploadedfile import InMemoryUploadedFile
name = 'img_%s_%s_%s.jpg' % (self.user.id, self.group.id, self.id)
ofd = PostImage.safesize(image_bytes)
if ofd == None :
return None
ipost = PostImage(user=self.user, post=self)
ipost.image.save(name, InMemoryUploadedFile(ofd, None, name, 'image/jpeg', len(ofd.getvalue()), None), save=False)
ipost.save()
self.body['image_count'] = self.body.get('image_count', 0) + 1
if not hasattr(self, '_images') :
self._images = []
self._images.append(ipost)
return ipost
def images(self) :
if self.body.get('image_count', 0) == 0 :
return None
if not hasattr(self, '_images') :
self._images = [img for img in self.postimage_set.all()]
return self._images
def member(self) :
return Member.objects.get(user=self.user, group=self.group)
def comments(self) :
return Post.objects.filter(parent=self).order_by('created_at')
def email_notify(self, request, member, users) :
from snippets.email import email_template
from django.core.urlresolvers import reverse
from django.template import RequestContext
url = request.build_absolute_uri(reverse('point:readpost', kwargs={'gid':self.group.id, 'pid':self.id}))
if self.parent is None :
subject = 'Notewave | New post by: %s' % member.get_username()
else :
subject = 'Notewave | Comment by: %s' % member.get_username()
email_template(subject = subject,
rcpt = [user.profile.email for user in users if user.profile.email is not None],
sender = 'notify@notewave.com',
text = 'point/_email_newpost.txt',
html = 'point/_email_newpost.html',
context_instance = RequestContext(request, {
'member' : member,
'post' : self,
'url' : url,
}))
def share(self, users) :
pass
def save(self, notify=True) :
""" TODO - change to signal based """
existing = False
if self.id :
existing = True
super(Post, self).save()
if notify and not existing :
self.notify()
def notify(self) :
self.group.post_update(self)
def watchers(self, exclude_user=None) :
users = []
if self.parent is not None :
#
# Get everybody watching the post
#
for obj in Like.objects.filter(post=self).all() :
if obj.user == self.user :
continue
if exclude_user and exclude_user == obj.user :
continue
users.append(obj.user)
for obj in Post.objects.filter(parent=self).all() :
if obj.user == self.user :
continue
if exclude_user and exclude_user == obj.user :
continue
users.append(obj.user)
if self.parent.user not in users and exclude_user != self.parent.user :
users.append(self.parent.user)
else :
#
# Get the member list of the group, excluding the poster and anybody who doesn't follow...
#
for m in self.group.member_set.all() :
if not m.notify_on_post :
continue
if m.user == self.user :
continue
if exclude_user and exclude_user == m.user :
continue
users.append(m.user)
return users
class PostImage(models.Model) :
user = models.ForeignKey(User, db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post, db_index=True)
image = models.ImageField(upload_to="post/%Y/%m-%d", blank=True, null=True)
def _url(self, w, h) :
import image
return image.url(self, self.id, w, h)
def icon100(self) :
return self._url(100,100)
def icon130(self) :
return self._url(130,130)
@classmethod
def icon_image(cls, iid) :
try :
image = cls.objects.get(id=iid)
return image.image
except Exception, e:
return None
@staticmethod
def safesize(bytes) :
from cStringIO import StringIO
from PIL import Image
try :
img = Image.open(StringIO(bytes))
except :
return None
w, h = img.size
if w > 640 or h > 640 :
if w > h :
scale = 640.0 / w
else :
scale = 640.0 / h
img = img.resize((int(w * scale), int(h * scale)))
ofd = StringIO()
img.save(ofd, 'JPEG')
ofd.seek(0)
return ofd
class Like(models.Model) :
class Meta :
unique_together = (('user','post'),)
user = models.ForeignKey(User, db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post, db_index=True)
| [
"koblas@extra.com"
] | koblas@extra.com |
2abe5d08dc261b8f084b22d90822fcbdd82d1fbe | 11c975bd35541e89f7d398dc9bb3348882e03633 | /templates/python/facebook_business/adobjects/helpers/adsinsightsmixin.py | edd5e48ed271af8ee00ba32a0464c964e28ce951 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-facebook-software-license"
] | permissive | facebook/facebook-business-sdk-codegen | 46bd376c6e88b8f9d3b30153252b4f8b69d5baff | 21a5f587afe4fb8fe7dd6a002240ab2eb3069fbe | refs/heads/main | 2023-09-01T21:13:25.865556 | 2023-08-18T19:09:11 | 2023-08-18T19:09:11 | 167,639,042 | 75 | 64 | MIT | 2023-09-14T17:44:46 | 2019-01-26T02:03:10 | PHP | UTF-8 | Python | false | false | 1,656 | py | # Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
class AdsInsightsMixin:
class Increment(object):
monthly = 'monthly'
all_days = 'all_days'
class Operator(object):
all = 'ALL'
any = 'ANY'
contain = 'CONTAIN'
equal = 'EQUAL'
greater_than = 'GREATER_THAN'
greater_than_or_equal = 'GREATER_THAN_OR_EQUAL'
in_ = 'IN'
in_range = 'IN_RANGE'
less_than = 'LESS_THAN'
less_than_or_equal = 'LESS_THAN_OR_EQUAL'
none = 'NONE'
not_contain = 'NOT_CONTAIN'
not_equal = 'NOT_EQUAL'
not_in = 'NOT_IN'
not_in_range = 'NOT_IN_RANGE'
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
78191de7a3552304ba7958b9dacbe8ec85ed9f5f | 457ff88e70deccdd530a2a7a826a30944eb995d0 | /bullet.py | 1615f1020dcafe2f4c1571f669e29ad43a6df26a | [] | no_license | gouthem1/Pygame_final | f4ab246e626a7d49dc2371815c34f5847b61dbf2 | 5e16efd0878e84ecfe848f31343e8f231e2453e2 | refs/heads/master | 2022-12-29T19:06:51.106355 | 2020-10-19T22:29:07 | 2020-10-19T22:29:07 | 305,512,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | py | import pygame
from pygame.sprite import Sprite
class Bullet (Sprite):
"""A class to amnage bullets fired from the ship"""
#sprite groups elements together
def __init__ (self, ai_game):
'''Create a bullet object at the ship's current position'''
super().__init__()
self.screen= ai_game.screen
self.settings = ai_game.settings
self.color = self.settings.bullet_color
#create a bulllet rect at (0,0) and then set the correct postition
self.rect = pygame.Rect(0, 0, self.settings.bullet_width, self.settings.bullet_height)
self.rect.midtop = ai_game.ship.rect.midtop
#store the bullet's position as a decimal value
self.y = float(self.rect.y)
def update(self):
"""Move the bullet up the screen."""
#Update the decimal postition of the bullet
self.y -= self.settings.bullet_speed
#update the rect position.
self.rect.y = self.y
def draw_bullet(self):
"""Draw the bullet to the screen."""
pygame.draw.rect(self.screen, self.color, self.rect)
| [
"johndoe@example.com"
] | johndoe@example.com |
bab9fea8551790c24d825944de199aa596e1b011 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/2/erz.py | bba8142a2aa3894ba93a388f5fe901afbc750baf | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'eRZ':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
de5d15d0b7c4c9c8e4113c2abaf3463af7a368af | be07637552f1089bb3e7300921f19baa8115a050 | /utils.py | afc2eb5e503cf15be6e3d186fe678a7fc6f6d006 | [] | no_license | andremourato/travian_bot | b0081f676c7e8c122fb6cc1f7a18b88319b92e86 | 0c8c1f0112127315dc8d08e4f067c99e9f17c04b | refs/heads/master | 2020-04-29T06:38:43.538761 | 2019-03-17T19:08:08 | 2019-03-17T19:08:08 | 175,923,792 | 1 | 0 | null | 2019-03-16T03:58:47 | 2019-03-16T03:58:46 | null | UTF-8 | Python | false | false | 256 | py | from datetime import datetime, timedelta
def add_seconds_to_datetime_now(seconds):
"""Add seconds to current timestamp and return it."""
future_date = datetime.now() + timedelta(seconds=seconds)
return future_date.strftime('%H:%M %d-%m-%Y')
| [
"sasha750i@mail.ru"
] | sasha750i@mail.ru |
8ef4a64b0d65f4733c09394c9f18f7e7269e6b82 | 6fddcefe20d0f94d5ad058232d53d8d2dc21c3f7 | /demo/us.py | 85119b73cd2b1090bcdc7338336158606bceda8c | [
"MIT"
] | permissive | Xie-Zhibin/backend-guide | ff178ab4557ad3762945f1a47bbf609176a03bb6 | 9934688ab17afa0f21f845296c2313fc148e813f | refs/heads/master | 2020-12-11T05:24:51.298771 | 2016-07-08T21:18:16 | 2016-07-08T21:18:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 818 | py | from flask import Flask, request, abort
app = Flask(__name__)
@app.route("/api")
def api():
with app.open_resource("us-api.json") as f:
content = f.read()
return content, 200, {"Content-Type": "application/json"}
@app.route("/api/user")
def user():
if request.method == 'GET':
return "user.get"
elif request.method == 'POST':
return "user.post"
elif request.method == 'Delete':
return "user.delete"
elif request.method == 'PUT':
return "user.put"
elif request.method == 'POST':
return "user.post"
else:
abort(404, "Not Impl")
@app.route("/api/user/me")
def user_me():
if request.method == 'GET':
return "user.get_me"
else:
abort(404, "Not Impl")
if __name__ == '__main__':
app.run(debug=True)
| [
"guyskk@qq.com"
] | guyskk@qq.com |
a6d6725030ae4d13cb32a74cda9bcc4bf9fc7bda | 3b334e9ac96ba3b975f427e84d2f981cacd884ff | /oop/contacts.py | 9fa7da0437faead706944e000a0c176c99995f96 | [] | no_license | HminiL/flask-madird-titanic-docker | 38a4d797501a252dbe1579e9e53e6f70f3ffc0a9 | 3670792f41427d22f5f2e10e4f7acbe8fa86b845 | refs/heads/main | 2023-08-11T03:41:33.002213 | 2021-09-24T03:25:04 | 2021-09-24T03:25:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,133 | py | """
name, phone, email, address
"""
class Contacts(object):
def __init__(self, name, phone, email, address):
self.name = name
self.phone = phone
self.email = email
self.address = address
def to_string(self):
print(f'{self.name}, {self.phone}, {self.email}, {self.address}')
def set_contact() -> object:
return Contacts(input('name : '), input('phone : '), input('email : '), input('address : '))
def get_contact(ls):
for i in ls:
i.to_string()
def del_contact(ls, name):
for i, j in enumerate(ls):
if name == j.name:
del ls[i]
def print_menu(ls) -> int:
t = ''
for i, j in enumerate(ls):
t += str(i) + '-' + j + '\t'
return int(input(t))
def main():
ls = []
while 1:
menu = print_menu(['exit', 'add', 'print', 'delete'])
if menu == 1:
t = set_contact()
ls.append(t)
elif menu == 2:
get_contact(ls)
elif menu == 3:
del_contact(ls, input('Del Name : '))
else:
break
if __name__ == '__main__':
main()
| [
"hmin10580@gmail.com"
] | hmin10580@gmail.com |
ec7d46baa6693cb551224bc25db5ebe267eba300 | 9e8daa9c83af977e9d414c4315d4b9503ed4fa1b | /Query_practice_geo.py | 3c3c49e3cf27e42721887ccb55a9b91c7be9fd36 | [] | no_license | PracticeMinder/practiceminder-data-utils | da2ef666e48fcdd5b8d8a12f7e6f98f2a134a078 | d56bc7c6d4991e45bdefe1b59133cae158a77ea8 | refs/heads/master | 2016-09-06T12:35:12.709491 | 2013-11-11T23:28:58 | 2013-11-11T23:28:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,583 | py | from pymongo import MongoClient
from pymongo import ASCENDING, DESCENDING
import re
import csv
import geojson
import json
#connection = Connection('146.185.159.107',27017)
def __connect__():
connection = MongoClient('146.185.159.107', 27017)
db = connection.prescription
db.authenticate('nhshd','nhshd')
return db
def __connectWrite__():
connection = MongoClient('146.185.159.107', 27017)
db = connection.prescription
db.authenticate('nhshd-rw','nhs-m0ng0')
return db
def getCollection(collName,write=False):
if write:
db = __connectWrite__()
else:
db = __connect__()
if collName=='system.indexes':
posts = db.system.indexes
elif collName=='practices':
posts = db.practices
elif collName=='chem_sub':
posts = db.chem_sub
elif collName=='practice_geo':
posts = db.practice_geo
elif collName=='prescriptons':
posts = db.prescriptons
elif collName=='system.users':
posts = db.system.users
else:
raise Exception('Unknown database collection ' + collName)
return posts
def getAllAD():
p = getCollection('prescriptons')
regx = re.compile("^0403030", re.IGNORECASE)
res = p.find({'BNF CODE':regx})
return res
def sumAmounts():
## Function to aggregate
p = getCollection('prescriptons')
# regx = re.compile("^0403030", re.IGNORECASE)
regx = re.compile("^0403030E0", re.IGNORECASE)
return p.aggregate( [{ '$match': {'BNF CODE':regx} } ,
{'$group': { '_id' : "$PRACTICE" ,'totalCost': {'$sum': "$ACT COST"} }}])
def pushMetrics(id):
## Function to take a list of dictonaries with id:practice and
# metricName and push to practice db
db = getCollection('practices')
print db.find({'_id':id})
def pushToPrac(dic):
db = getCollection('practices',write=True)
for pracID,pracDic in dic.iteritems():
for key,value in pracDic.iteritems():
r = db.update({'_id':pracID},{'$set': {'metrics':{key:value}} })
def testPrac(dic):
db = getCollection('practices',write=False)
for pracID,pracDic in dic.iteritems():
res = db.find({'_id':pracID})
for r in res:
print r
def main():
cl = getCollection('practices',write=False)
print '{ "type": "FeatureCollection",','\n';
print '"features": [','\n'
for p in cl.find({ 'loc' : { '$exists' : True} }) :
print '{ "type": "Feature",'
print '"properties" : { '
name = p['name']
name = name.strip()
post = p['post']
post = post.strip()
town = p['town']
town = town.strip()
print '"Name" : "',name,'",',
print '"Post code" : "',post,'",',
print '"Town" : "',town,'"',
#print '"Quantity of Metformin HCl_Tab 500mg prescribed" : "',p['metrics']['Quanity of Metformin HCl_Tab 500mg'],'"',
print'},\n'
print '"geometry": { "type": "Point", "coordinates": [',p['loc']['coordinates'][0],',',p['loc']['coordinates'][1],'] }\n';
print '},',"\n"
print "]\n"
print "}"
#with open('data.geojson', 'w') as outfile:
#with open("geo.practice.csv", "w") as file:
#csv_file = csv.writer(file)
#print p['loc']['coordinates'][0]
#f.write(['loc']['coordinates'][0],
# p['loc']['coordinates'][1]
# ])
#print pt;
#json.dump(p['loc'],outfile)
#csv_file.writerow([p['name'].lstrip(),
# p['town'].lstrip(),
# p['post'].lstrip(),
# p['loc']['coordinates'][0],
# p['loc']['coordinates'][1]
# ]);
#f.close()
#practiceID = 'D81025'
#cl = getCollection('prescriptons',write=False)
#print "Number of prescriptions for practice ",practiceID," : \n";
#print cl.find({"PRACTICE": practiceID}).count()
#for pr in cl.find({"PRACTICE": practiceID}).sort("ACT_COST",DESCENDING) :
# print pr
if __name__ == "__main__":
main()
# res = getAllAD()
# for r in res:
# print r
# print sumAmounts()['result'][:5]
# tmpListMetric = [{u'totalCost': 6.99, u'_id': u'C82651'}, {u'totalCost': 39.38, u'_id': u'C82642'}, {u'totalCost': 63.82, u'_id': u'C82639'}, {u'totalCost': 126.17, u'_id': u'C82624'}, {u'totalCost': 40.97, u'_id': u'C82610'}]
# pushMetrics(id = tmpListMetric[0] )
# db = __connect__()
# print db.collection_names()
# pres = getCollection('practices')
# print pres.find_one()
| [
"ole.st@gmx.de"
] | ole.st@gmx.de |
2088dbeee2068e4dc57d1ad90cdcf24e1fb6776b | 3189215ffa8120bbb811952b60894eb4d9776618 | /source/trancost_funcs.py | 05e227e3dee05963fead4d5a71785f9cd04927b7 | [
"MIT"
] | permissive | opimal-hedging-pg/OHPG | d0c2d955d6bfb91dc4459d9cc5ff769822a043a1 | f0de2170c2154d1e94afdeb89b2f24326f06123b | refs/heads/main | 2023-05-25T22:50:44.514227 | 2021-06-11T11:53:29 | 2021-06-11T11:53:29 | 376,008,271 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py |
class BaseTrancost(object):
@property
def name(self):
raise NotImplementedError()
class constant_trancost(BaseTrancost):
@property
def name(self):
tcp = '_'.join(str(self.tcp).split('.'))
return f'TCcont-{tcp}'
def __init__(self, tc_para = 0.):
self.tcp = tc_para
def __call__(self, x):
return self.tcp
class proportional_trancost(BaseTrancost):
'''
tc_para is thousandth.
'''
@property
def name(self):
tcp = '_'.join(str(self.tcp).split('.'))
return f'TCprop-{tcp}'
def __init__(self, tc_para = 3.):
self.tcp = tc_para
def __call__(self, x):
return abs((self.tcp * x) / 1000) | [
"noreply@github.com"
] | opimal-hedging-pg.noreply@github.com |
296f4389a7d7cba67e3405d2a9054bbd0112ef2a | b4afb44b8f483c048716fe12d778186ce68ac846 | /AutoFrameworkForAppiumPy/com/qa/automation/appium/pages/ios/ffan/location_bluetooth_page.py | a4424934bd10cece78cca0f401632ff7bf593584 | [] | no_license | liu111xiao111/UItest | 64309b2c85f6d2334d64bb0875ba9ced459ebb1e | 67e2acc9a99da81022e286e8d8ec7ccb12636ff3 | refs/heads/master | 2021-09-01T18:30:28.044296 | 2017-12-28T04:36:46 | 2017-12-28T04:36:46 | 115,585,226 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | # -*- coding: utf-8 -*-
from com.qa.automation.appium.api.api_new import API
from com.qa.automation.appium.pages.ios.common.super_page import SuperPage
from com.qa.automation.appium.pages.ios.ffan.location_bluetooth_page_configs import LocationBluetoothPageConfigs
class LocationBluetoothPage(SuperPage):
'''
作者 刘涛
首页=>广场=>室内地图=>是否开启蓝牙设置提示
'''
def __init__(self, testcase, driver, logger):
super(LocationBluetoothPage, self).__init__(testcase=testcase, driver=driver, logger=logger);
def clickOnOkBtn(self):
'''
usage : 点击 "好" button
'''
API().clickElementByXpath(testCase = self.testcase,
driver = self.driver,
logger = self.logger,
xpath = LocationBluetoothPageConfigs.xpath_ok_button)
if __name__ == '__main__':
pass; | [
"tl@neusoft.com"
] | tl@neusoft.com |
23c646f97b857f315e89b4ba721d55d1e5f7dc10 | e299ad494a144cc6cfebcd45b10ddcc8efab54a9 | /test/functionalities/inferior-assert/TestInferiorAssert.py | a8ce779434fe5689895199118a548b6bdb2eb513 | [
"NCSA"
] | permissive | apple-oss-distributions/lldb | 3dbd2fea5ce826b2bebec2fe88fadbca771efbdf | 10de1840defe0dff10b42b9c56971dbc17c1f18c | refs/heads/main | 2023-08-02T21:31:38.525968 | 2014-04-11T21:20:22 | 2021-10-06T05:26:12 | 413,590,587 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,217 | py | """Test that lldb functions correctly after the inferior has asserted."""
import os, time
import unittest2
import lldb, lldbutil
from lldbtest import *
class AssertingInferiorTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
@unittest2.expectedFailure("rdar://15367233")
def test_inferior_asserting_dsym(self):
"""Test that lldb reliably catches the inferior asserting (command)."""
self.buildDsym()
self.inferior_asserting()
@expectedFailurei386 # llvm.org/pr17384: lldb needs to be aware of linux-vdso.so to unwind stacks properly'
@unittest2.expectedFailure("rdar://15367233")
def test_inferior_asserting_dwarf(self):
"""Test that lldb reliably catches the inferior asserting (command)."""
self.buildDwarf()
self.inferior_asserting()
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
def test_inferior_asserting_registers_dsym(self):
"""Test that lldb reliably reads registers from the inferior after asserting (command)."""
self.buildDsym()
self.inferior_asserting_registers()
def test_inferior_asserting_register_dwarf(self):
"""Test that lldb reliably reads registers from the inferior after asserting (command)."""
self.buildDwarf()
self.inferior_asserting_registers()
@expectedFailurei386 # llvm.org/pr17384: lldb needs to be aware of linux-vdso.so to unwind stacks properly
def test_inferior_asserting_disassemble(self):
"""Test that lldb reliably disassembles frames after asserting (command)."""
self.buildDefault()
self.inferior_asserting_disassemble()
@python_api_test
def test_inferior_asserting_python(self):
"""Test that lldb reliably catches the inferior asserting (Python API)."""
self.buildDefault()
self.inferior_asserting_python()
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
@unittest2.expectedFailure("rdar://15367233")
def test_inferior_asserting_expr(self):
"""Test that the lldb expression interpreter can read from the inferior after asserting (command)."""
self.buildDsym()
self.inferior_asserting_expr()
@expectedFailurei386 # llvm.org/pr17384: lldb needs to be aware of linux-vdso.so to unwind stacks properly
@unittest2.expectedFailure("rdar://15367233")
def test_inferior_asserting_expr(self):
"""Test that the lldb expression interpreter can read from the inferior after asserting (command)."""
self.buildDwarf()
self.inferior_asserting_expr()
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
@unittest2.expectedFailure("rdar://15367233")
def test_inferior_asserting_step(self):
"""Test that lldb functions correctly after stepping through a call to assert()."""
self.buildDsym()
self.inferior_asserting_step()
@expectedFailurei386 # llvm.org/pr17384: lldb needs to be aware of linux-vdso.so to unwind stacks properly
@unittest2.expectedFailure("rdar://15367233")
def test_inferior_asserting_step(self):
"""Test that lldb functions correctly after stepping through a call to assert()."""
self.buildDwarf()
self.inferior_asserting_step()
def set_breakpoint(self, line):
lldbutil.run_break_set_by_file_and_line (self, "main.c", line, num_expected_locations=1, loc_exact=True)
def check_stop_reason(self):
stop_reason = 'stop reason = signal SIGABRT'
# The stop reason of the thread should be an abort signal or exception.
self.expect("thread list", STOPPED_DUE_TO_ASSERT,
substrs = ['stopped',
stop_reason])
return stop_reason
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number of the call to assert.
self.line = line_number('main.c', '// Assert here.')
def inferior_asserting(self):
"""Inferior asserts upon launching; lldb should catch the event and stop."""
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
self.runCmd("run", RUN_SUCCEEDED)
stop_reason = self.check_stop_reason()
# And it should report a backtrace that includes the assert site.
self.expect("thread backtrace all",
substrs = [stop_reason, 'main', 'argc', 'argv'])
# And it should report the correct line number.
self.expect("thread backtrace all",
substrs = [stop_reason,
'main.c:%d' % self.line])
def inferior_asserting_python(self):
"""Inferior asserts upon launching; lldb should catch the event and stop."""
exe = os.path.join(os.getcwd(), "a.out")
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Now launch the process, and do not stop at entry point.
# Both argv and envp are null.
process = target.LaunchSimple (None, None, self.get_process_working_directory())
if process.GetState() != lldb.eStateStopped:
self.fail("Process should be in the 'stopped' state, "
"instead the actual state is: '%s'" %
lldbutil.state_type_to_str(process.GetState()))
thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonSignal)
if not thread:
self.fail("Fail to stop the thread upon assert")
if self.TraceOn():
lldbutil.print_stacktrace(thread)
def inferior_asserting_registers(self):
"""Test that lldb can read registers after asserting."""
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
self.runCmd("run", RUN_SUCCEEDED)
self.check_stop_reason()
# lldb should be able to read from registers from the inferior after asserting.
self.expect("register read eax",
substrs = ['eax = 0x'])
def inferior_asserting_disassemble(self):
"""Test that lldb can disassemble frames after asserting."""
exe = os.path.join(os.getcwd(), "a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Launch the process, and do not stop at the entry point.
target.LaunchSimple (None, None, self.get_process_working_directory())
self.check_stop_reason()
process = target.GetProcess()
self.assertTrue(process.IsValid(), "current process is valid")
thread = process.GetThreadAtIndex(0)
self.assertTrue(thread.IsValid(), "current thread is valid")
# lldb should be able to disassemble frames from the inferior after asserting.
for frame in thread:
self.assertTrue(frame.IsValid(), "current frame is valid")
self.runCmd("frame select " + str(frame.GetFrameID()), RUN_SUCCEEDED)
# Don't expect the function name to be in the disassembly as the assert
# function might be a no-return function where the PC is past the end
# of the function and in the next function. We also can't back the PC up
# because we don't know how much to back it up by on targets with opcodes
# that have differing sizes
self.expect("disassemble -a %s" % frame.GetPC(),
substrs = ['->'])
def check_expr_in_main(self, thread):
depth = thread.GetNumFrames()
for i in range(depth):
frame = thread.GetFrameAtIndex(i)
self.assertTrue(frame.IsValid(), "current frame is valid")
if self.TraceOn():
print "Checking if function %s is main" % frame.GetFunctionName()
if 'main' == frame.GetFunctionName():
frame_id = frame.GetFrameID()
self.runCmd("frame select " + str(frame_id), RUN_SUCCEEDED)
self.expect("p argc", substrs = ['(int)', ' = 1'])
self.expect("p hello_world", substrs = ['Hello'])
self.expect("p argv[0]", substrs = ['a.out'])
self.expect("p null_ptr", substrs = ['= 0x0'])
return True
return False
def inferior_asserting_expr(self):
"""Test that the lldb expression interpreter can read symbols after asserting."""
exe = os.path.join(os.getcwd(), "a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Launch the process, and do not stop at the entry point.
target.LaunchSimple (None, None, self.get_process_working_directory())
self.check_stop_reason()
process = target.GetProcess()
self.assertTrue(process.IsValid(), "current process is valid")
thread = process.GetThreadAtIndex(0)
self.assertTrue(thread.IsValid(), "current thread is valid")
# The lldb expression interpreter should be able to read from addresses of the inferior after a call to assert().
self.assertTrue(self.check_expr_in_main(thread), "cannot find 'main' in the backtrace")
def inferior_asserting_step(self):
"""Test that lldb functions correctly after stepping through a call to assert()."""
exe = os.path.join(os.getcwd(), "a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Launch the process, and do not stop at the entry point.
self.set_breakpoint(self.line)
target.LaunchSimple (None, None, self.get_process_working_directory())
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs = ['main.c:%d' % self.line,
'stop reason = breakpoint'])
self.runCmd("next")
stop_reason = self.check_stop_reason()
# lldb should be able to read from registers from the inferior after asserting.
if "x86_64" in self.getArchitecture():
self.expect("register read rbp", substrs = ['rbp = 0x'])
if "i386" in self.getArchitecture():
self.expect("register read ebp", substrs = ['ebp = 0x'])
process = target.GetProcess()
self.assertTrue(process.IsValid(), "current process is valid")
thread = process.GetThreadAtIndex(0)
self.assertTrue(thread.IsValid(), "current thread is valid")
# The lldb expression interpreter should be able to read from addresses of the inferior after a call to assert().
self.assertTrue(self.check_expr_in_main(thread), "cannot find 'main' in the backtrace")
# And it should report the correct line number.
self.expect("thread backtrace all",
substrs = [stop_reason,
'main.c:%d' % self.line])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
| [
"91980991+AppleOSSDistributions@users.noreply.github.com"
] | 91980991+AppleOSSDistributions@users.noreply.github.com |
fc44d1dec1e8f29a286c997f79b935a1eedebb9d | aa7aca873a232e4b9308bbba18dbdc2fd92fb06f | /server/tests/steps/test_moving_average.py | c5b91aa8557020f563802fead19ce15ab657f37e | [
"BSD-3-Clause"
] | permissive | ToucanToco/weaverbird | b27204c70de0d82cd88414608cb0b3e74e8e427a | d61df345248424802a78e84304a81d4f0ad9c92b | refs/heads/master | 2023-08-15T23:26:34.038972 | 2023-08-11T13:15:22 | 2023-08-11T13:15:22 | 167,165,685 | 85 | 12 | BSD-3-Clause | 2023-09-12T15:55:25 | 2019-01-23T10:40:17 | TypeScript | UTF-8 | Python | false | false | 2,046 | py | import numpy as np
import pandas as pd
from pandas import DataFrame
from tests.utils import assert_dataframes_equals
from weaverbird.backends.pandas_executor.steps.moving_average import execute_moving_average
from weaverbird.pipeline.steps import MovingAverageStep
def test_moving_average_basic():
df = DataFrame(
{"date": [f"2018-01-0{i}" for i in range(1, 9)], "value": [75, 80, 82, 83, 80, 86, 79, 76]}
)
df["date"] = pd.to_datetime(df["date"])
step = MovingAverageStep(
name="movingaverage",
valueColumn="value",
columnToSort="date",
movingWindow=2,
)
df_result = execute_moving_average(step, df)
expected_result = df.assign(
**{"value_MOVING_AVG": [None, 77.5, 81, 82.5, 81.5, 83, 82.5, 77.5]}
)
assert_dataframes_equals(df_result, expected_result)
def test_moving_average_with_groups():
df = DataFrame(
{
"country": ["France"] * 6 + ["USA"] * 6,
"date": [f"2018-01-0{i}" for i in range(1, 7)] * 2,
"value": [75, 80, 82, 83, 80, 86] + [69, 73, 73, 75, 70, 76],
}
)
df["date"] = pd.to_datetime(df["date"])
step = MovingAverageStep(
name="movingaverage",
valueColumn="value",
columnToSort="date",
movingWindow=3,
groups=["country"],
newColumnName="rolling_average",
)
df_result = execute_moving_average(step, df)
expected_result = df.assign(
**{
"rolling_average": [None, None, 79, 81.6667, 81.6667, 83]
+ [None, None, 71.6667, 73.6667, 72.6667, 73.6667]
}
)
assert_dataframes_equals(df_result, expected_result)
def test_benchmark_moving_average(benchmark):
df = DataFrame({"value": np.random.random(1000), "id": list(range(1000))})
step = MovingAverageStep(
name="movingaverage",
valueColumn="value",
columnToSort="id",
movingWindow=3,
newColumnName="rolling_average",
)
benchmark(execute_moving_average, step, df)
| [
"noreply@github.com"
] | ToucanToco.noreply@github.com |
06c28ca540ed9665ccbe1b7f66eaf5574e18b621 | 998a6cf9ce2680abf89425724f7dcfb56a68a05a | /polls/migrations/0026_auto_20210216_1322.py | 84dd90f6349ad3a0c7275b0d5470e3127b0671e3 | [] | no_license | Pasquale012/SocialNetworkAnalytics | 740fd6bf3242836b5b178e1cddc95bd1ca746fc6 | 32697c2bb44ae5f5e0a8eebcc371b96a6207478f | refs/heads/master | 2023-03-17T16:39:09.537984 | 2021-03-03T19:41:39 | 2021-03-03T19:41:39 | 340,024,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | # Generated by Django 3.1.4 on 2021-02-16 12:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('polls', '0025_auto_20210216_1251'),
]
operations = [
migrations.AlterField(
model_name='post',
name='avgNegativeSentiment',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='post',
name='avgNeutralSentiment',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='post',
name='avgPositiveSentiment',
field=models.FloatField(null=True),
),
]
| [
"43438431+Pasquale012@users.noreply.github.com"
] | 43438431+Pasquale012@users.noreply.github.com |
b5326b685b11454f3599295a478e33dae76bfdde | de0aee5d96ce1e255c0fef0e66450bab3aadd2e7 | /admin.py | 98bf0a40ede109bd32080740760a75bef0309b26 | [] | no_license | vinay-chauhan/Web-App-of-Employee-usingDjango-RestAPI | 516b86996f895fdc3cf34b3f6929e7b04ad06204 | 060e7521190cc1cf933737c4efe2d87ac044a836 | refs/heads/master | 2020-06-26T02:27:57.432690 | 2020-01-24T08:53:32 | 2020-01-24T08:53:32 | 199,498,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | from django.contrib import admin
from.models import employees
admin.site.register(employees)
#Register this models here | [
"noreply@github.com"
] | vinay-chauhan.noreply@github.com |
badb5b482d846184bb34c1a22973b81bd89f926b | 9ee0b1d4ddf483ac080d72e1d05131749f95758d | /GestiondeProyectos/forms.py | 12e4f6fc4073f29dfc9de938366aeff83c6ef32f | [] | no_license | IS2fpuna2015/sgpa | 34f785a49cd47a97d36d91b4701b02f21f774725 | 0021ab0be0bfa1de1c4a39cf002922e97bbe7974 | refs/heads/master | 2016-09-05T22:32:09.504493 | 2015-08-20T02:16:57 | 2015-08-20T02:16:57 | 40,995,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,860 | py | from django import forms
from django.forms import ModelForm, DateInput, Textarea, ModelChoiceField, RadioSelect, CheckboxSelectMultiple, \
ModelMultipleChoiceField, CharField, TextInput, ChoiceField, Select
from GestiondeProyectos.models import Proyecto
from GestiondeUsuarios.models import Usuario
from django.contrib.auth.models import Group
class CrearProyectoForm(ModelForm,forms.Form):
estado_proyecto = CharField(widget=TextInput(attrs={'readonly': True,'value':'PENDIENTE'}),)
scrum_master = ModelChoiceField(initial=2, queryset=Usuario.objects.filter(groups=Group.objects.get(name='Scrum Master').id).filter(is_active="True"),widget=RadioSelect)
equipo_desarrollo = ModelMultipleChoiceField(queryset=Usuario.objects.filter(groups=Group.objects.get(name='Desarrollador').id).filter(is_active="True"), widget=CheckboxSelectMultiple)
cliente = ModelChoiceField(initial=2,queryset=Usuario.objects.filter(groups=Group.objects.get(name='Cliente').id).filter(is_active="True"), widget=RadioSelect)
class Meta:
model = Proyecto
fields = ('nombre_proyecto', 'codigo_proyecto', 'descripcion_proyecto',
'fecha_inicio', 'fecha_finalizacion', 'scrum_master', 'equipo_desarrollo','estado_proyecto','cliente')
exclude= ('id_proyecto','cantidad_sprints')
widgets = {
'fecha_inicio': DateInput(attrs={'class': 'datepicker'}),
'fecha_finalizacion': DateInput(attrs={'class': 'datepicker'}),
'descripcion_proyecto': Textarea(attrs={'rows': 3, 'cols': 30})}
class Proyecto_Buscar_form(forms.Form):
Busqueda = forms.CharField(widget=forms.TextInput(attrs={'class':'special'}), required=False)
class ModificarProyectoForm(CrearProyectoForm):
#Utiliza la clase Meta para agregar el atributo de readonly
class Meta:
model = Proyecto
fields = ('nombre_proyecto', 'codigo_proyecto', 'descripcion_proyecto',
'fecha_inicio', 'fecha_finalizacion','scrum_master', 'equipo_desarrollo','estado_proyecto','cliente',)
exclude= ('id_proyecto',)
widgets = {
'nombre_proyecto': TextInput(attrs={'readonly': 'True'}),
'cliente': Select(attrs={'readonly': 'readonly'}),
'codigo_proyecto': TextInput(attrs={'readonly': 'True'}),
'fecha_inicio': DateInput(attrs={'class': 'datepicker', 'readonly': 'True'}),
'fecha_finalizacion': DateInput(attrs={'class': 'datepicker'}),
'descripcion_proyecto': Textarea(attrs={'rows': 3, 'cols': 30})}
ESTADOS = (
('PENDIENTE','Pendiente'),
('ACTIVO','Activo'),
('CANCELADO','Cancelado'),
('FINALIZADO','Finalizado')
)
estado_proyecto = ChoiceField(choices=ESTADOS, widget=Select)
| [
"leoben1907@gmail.com"
] | leoben1907@gmail.com |
a6c552444e9d9a1eb6d0a919943c095a9c54df0a | 4f41a526bf7d617a36f8fe3fa569f1206dfebf9c | /10990.py | 2aba138f17ee51ba9d7f255ce1584b8a7496464c | [] | no_license | joonluvschipotle/baekjoon | 7e519d967bf824e0d37e49a7da4f607594ec6e60 | f8f5039467e235eee9f7f2735273efae5ee8d991 | refs/heads/master | 2020-05-27T09:42:35.616080 | 2017-07-11T08:57:49 | 2017-07-11T08:57:49 | 82,538,945 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | a = int(input())
for i in range(0,a):
print ((" "*(a-i-1)) + "*" + (" "*(i*2-1)),end="")
if i == 0:
print ("")
else:
print ("*")
| [
"joonluvschipotle@gmail.com"
] | joonluvschipotle@gmail.com |
0ff796b0204610ebc28f99e5ecab249d593fa3d0 | 780cf76f45bb8052d85ddefd08c3ff19748675d7 | /mysite/mysite/urls.py | 76e3be8d9d5f3f784f3aa00a0b986654e472df95 | [] | no_license | AntonioCarmonaLopez/phyon-flask-django | f86bfd843c42ed0ce9b2ab87b42b28f9bc6acd06 | 431c0ec0e93657989ba3161715757e250dc6ef5c | refs/heads/master | 2022-12-06T09:47:59.037290 | 2020-08-28T20:25:35 | 2020-08-28T20:25:35 | 291,057,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 797 | py | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('',include('aerolinea.urls')),
path('admin/', admin.site.urls),
]
| [
"antocarmona@gmail.com"
] | antocarmona@gmail.com |
1dcd22e5933acc5c963914b87de070d6d05b9c51 | fd0cf40ace76aef3422ef2aa3a36294b0d54ab33 | /app.py | cb1ef7f3090147fc6dc1a76253038703df59b505 | [] | no_license | codeforpakistan/PBS-API | 6eae4abb92518ea2887fd7fd0acabe9cf3e27496 | f4dd569f70dd39d82cde1b21e86ee0860d8ea549 | refs/heads/master | 2021-01-01T15:59:44.682782 | 2015-01-11T16:28:15 | 2015-01-11T16:28:15 | 29,097,456 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 172 | py | #!api/bin/python
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "Hello World!"
if __name__ == '__main__':
app.run(debug=True)
| [
"dejavu87@gmail.com"
] | dejavu87@gmail.com |
de4cd4e8713c3f87813f40681a5133139406d84f | 82470c6c4819f8b874c92b2e036c6bb6dd1d365b | /Python program/array_integers.py | 51d1481106e7cb95212058f22beec6baa95efb5d | [] | no_license | lion7500000/python-selenium-automation | 87589891211bf327bffda50134c4da918709fd2b | a2f3701661ddf713edb9750150a97e7cd8adb967 | refs/heads/master | 2021-08-08T10:16:44.644725 | 2021-07-29T03:10:54 | 2021-07-29T03:10:54 | 210,194,357 | 0 | 0 | null | 2019-09-22T18:23:33 | 2019-09-22T18:23:32 | null | UTF-8 | Python | false | false | 511 | py | # В одномерном массиве целых чисел определить два наименьших элемента.
# Они могут быть как равны между собой (оба являться минимальными), так и различаться.
def array_integer (numbers):
new_array = []
for i in numbers:
new_array.append(i)
#new_array.sort()
print(*sorted(new_array) [:2])
#print (new_array[:2])
array_integer ([78,45,-1,3,0,4,6,76])
| [
"lion7500000@gmail.com"
] | lion7500000@gmail.com |
19d57a912039dd74cdbea38c747443ac27d20906 | c4df7a430cdae38d8ff4a60d4db33dce88926dec | /genesys/genesys/codelets/compiler/transformations/__init__.py | 80f141f934e59bd6a698f9ecf124605a80f0f5c7 | [
"BSD-3-Clause"
] | permissive | ziqingzeng/public | 82b16f4ca60ba248dff68036263c76c85cf9ae32 | 4102b3bd42f43b49cf74599492d52d4f755ab7b2 | refs/heads/main | 2023-04-14T03:32:48.566475 | 2021-04-17T05:28:39 | 2021-04-17T05:28:39 | 355,981,617 | 0 | 0 | BSD-3-Clause | 2021-04-08T16:38:46 | 2021-04-08T16:38:46 | null | UTF-8 | Python | false | false | 99 | py | from .util import factors
from .transformations import split_loop, split_transfer, split_operation
| [
"sean.kinzer@gmail.com"
] | sean.kinzer@gmail.com |
94199e574ea079452bb6bd08d05345478afaf252 | 51385f1f291c8365b9307420efc877e254b9398c | /volunteer/migrations/0016_auto_20201027_1139.py | e56c6b73fc19bda871dc27c2bbe034e1fe598748 | [] | no_license | jlr9au/AllHands | 7df5e9569db1a0ee7c3b794e1adaf2e6a5f6fb28 | 3b87d7a0efd5eee33f1af05f0c170eb02f6f9261 | refs/heads/master | 2023-01-31T06:59:54.580070 | 2020-11-24T21:58:34 | 2020-11-24T21:58:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | # Generated by Django 3.1.1 on 2020-10-27 15:39
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('volunteer', '0015_auto_20201026_2345'),
]
operations = [
migrations.AddField(
model_name='volunteerprofile',
name='numofevents',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='volunteerevent',
name='event_datetime',
field=models.DateTimeField(default=datetime.datetime(2020, 10, 27, 11, 39, 43, 665138)),
),
]
| [
"jlr9au@virginia.edu"
] | jlr9au@virginia.edu |
a84741cede8f96f1958e18435f820d2bca15beb8 | 6014829f05c6d651edddcd4924e434fc3e363f85 | /python/PostProcessing/Forces1706.py | 851db9482a4878d594db13c2e386d9557b503a0f | [
"MIT"
] | permissive | riemannsBeard/wigglelab | 4c846addd001b1d71aeb51f72c31a871199dc35a | e47c624db9386f13bf3020c22c05765929089f1d | refs/heads/master | 2021-05-10T14:37:55.290801 | 2018-01-09T18:14:15 | 2018-01-09T18:14:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,024 | py | import os.path as path
import numpy as np
from glob import glob as glob
from PostProcessingIO import isNumber
from collections import defaultdict
class Forces:
_TIME = 0
_TOTAL_X = 1
_TOTAL_Y = 2
_TOTAL_Z = 3
_PRESSURE_X = 4
_PRESSURE_Y = 5
_PRESSURE_Z = 6
_VISCOUS_X = 7
_VISCOUS_Y = 8
_VISCOUS_Z = 9
def __init__(self, inputpath, average = True, FFT = False, verbose = True):
self._inputpath = inputpath
self._verbose = verbose
### parse the input path and check if file or directory
if (path.exists(inputpath)):
### if inputpath is a file, try to open the file and read it
if (path.isfile(inputpath)):
self._rawForces = self._readForceFile(inputpath)
elif (path.isdir(inputpath)):
self._timeDirs = []
self._rawForces = []
### iterate over the time directories, read in the forces and append them
for timeDir in glob(path.join(inputpath, "*")):
if isNumber(timeDir.split("/")[-1]):
self._verbosePrint("processing time dir {}".format(timeDir))
self._timeDirs.append(timeDir)
self._rawForces.append(self._readForceFile(path.join(timeDir, "force.dat")))
### generate a numpy matrix containing all forces
self._rawForces = np.concatenate((self._rawForces))
### sort the matrix by sorting after the first column (time)
self._rawForces = self._rawForces[self._rawForces[:,0].argsort()]
### all forces should be loaded by now
# build a "nice" dict with the forces
pos = iter(range(1,10))
self.forces = {}
self.forces["time"] = self._rawForces[:,0]
for forceType in ("total", "pressure", "viscous"):
self.forces[forceType] = {}
for component in "x", "y", "z":
self.forces[forceType][component] = self._rawForces[:,next(pos)]
if average == True:
self.calculateAveragesStd()
if FFT == True:
raise Warning("not implemented yet!")
else:
raise IOError("could not find file: {}".format(inputpath))
def _readForceFile(self, filepath):
raw = []
with open(filepath, 'r') as filehandle:
for line in filehandle:
tmp = [x.strip('(').strip(')') for x in line.split()]
if len(tmp) == 0:
continue
elif tmp[0] == '#':
continue
elif len(tmp) != 10:
continue
else:
try:
raw.append([ float(i) for i in tmp ])
except ValueError:
print("could not convert string to float in line:")
print("\t" + line)
print("in file:")
print("\t" + filepath)
filehandle.close()
raw = np.array(raw)
return raw
def _getTimeIndex(self, time):
index = 0
while self._rawForces[index,0] < time and index < len(self._rawForces[:,0]):
index += 1
return index
def _getIndices(self, startTime = 0, endTime = 0):
startIndex = 0
endIndex = len(self.forces["time"])
if startTime == 0 and endTime == 0:
pass
elif startTime > 0 and endTime > 0 and startTime > endTime:
self._verbosePrint("start time > end time, setting end time to max time: {}".format(self._rawForces[-1,self._TIME]))
startIndex = self._getTimeIndex(startTime)
elif startTime == 0 and endTime > 0:
self._verbosePrint("start time is set to zero!")
endIndex = self._getTimeIndex(endTime)
else:
startIndex = self._getTimeIndex(startTime)
endIndex = self._getTimeIndex(endTime)
self._verbosePrint("start time set to {} and end time set to {}".format(startIndex, endIndex))
return (startIndex, endIndex)
def calculateAveragesStd(self, startTime = 0, endTime = 0):
self.averageForces = {}
self.stdForces = {}
startIndex, endIndex = getIndices(startTime, endTime)
for forceType in ("total", "pressure", "viscous"):
self.averageForces[forceType] = {}
self.stdForces[forceType] = {}
for component in "x", "y", "z":
self.averageForces[forceType][component] = np.average(self.forces[forceType][component][startIndex:endIndex])
self.stdForces[forceType][component] = np.std(self.forces[forceType][component][startIndex:endIndex])
return (self.averageForces, self.stdForces)
def _verbosePrint(self, message):
if self._verbose == True:
print(message)
def getMaxTime(self)
self._verbosePrint("max time is {}".format(self.forces["time"][-1]))
return self.forces["time"][-1]
## define a method for getting forces by time
def getForcesByTime(self, startTime = 0, endTime = 0, forceType = "total", forceComponent = "x"):
startIndex, endIndex = getIndices(startTime, endTime)
return self.forces[forceType][forceComponent][startIndex:endIndex]
def _verbosePrint(self, message):
if self._verbose == True:
print(message)
class ForceCoefficients(Forces):
def __init__(self, inputpath, rho = 1, velocity = 1, area = 2, average = True, FFT = False, verbose = True):
self._inputpath = inputpath
self._verbose = verbose
self._forceObject = Forces(inputpath)
| [
"aljoscha.sander@gmail.com"
] | aljoscha.sander@gmail.com |
ff0aa8ace59313d017b143bff8ce7d11063d4153 | bd12c5bb43878e798abf6a9ad62dc6cebbf1ceae | /firefighters/lib/python3.6/locale.py | a6f32e374f21eee9c01566e0671eb84811776a1c | [] | no_license | alanhovorka/queries | 0a29500fcc35993ae9039a0cf54cf8c0ffa19ee5 | 13e0c02c2c4a3b85bae9fcb581d3895c5716b645 | refs/heads/master | 2021-08-20T07:50:52.679173 | 2017-11-28T14:57:22 | 2017-11-28T14:57:22 | 108,158,671 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | /Users/reporter/anaconda/lib/python3.6/locale.py | [
"alanhovorka94@gmail.com"
] | alanhovorka94@gmail.com |
e4f0ad33a8eb88cb40dec967701bf6563ae47890 | 7db513150b7daaf1f8ac435c41b850545156d037 | /Fort_Machine/dbstore/init_db.py | 6c2acdb08f2302fd2efa7a222c488c05ac13a7e0 | [] | no_license | tonglinge/MyProjects | bb94e141e1833dc38fe2b41d5b80fb4abc222cf0 | 002f80dcc07e3502610b0a0be1e91fe61bcfc42c | refs/heads/master | 2021-04-15T14:53:46.175035 | 2017-03-19T04:39:48 | 2017-03-19T04:39:48 | 61,760,333 | 4 | 5 | null | null | null | null | UTF-8 | Python | false | false | 4,061 | py | #!/usr/bin/env python
"""
初始化数据表文件:
groups.xml: 服务器主机组文件,包括:
GID:组编号
GNAME:组名称
hosts.xml: 主机文件,包括:
GID: 所属主机组编号
HIP: 主机IP
HNAME:主机名
HUSER:登录名
HKEY: 登录密码 (AUTH_TYPE=1则为密码,AUTH_TYPE=2则为私钥文件)
AUTH_TYPE:验证类型(1:用户名密码登录,2:密钥登录)
users.xml: 用户文件,包括:
UID:用户ID
UNAME: 用户名
UPASS: 用户密码 (加密)
UROLE: 用户权限 (user:普通用户, admin: 管理员)
GID: 管理服务器组ID (1,2)
"""
import os
import sys
from xml.etree import ElementTree
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(BASE_DIR)
from modules import common
groups = {
"G01": "新闻组",
"G02": "视频组",
"G03": "开发组",
"G04": "文件组"
}
hosts = {
"G01": [{"HIP": "192.168.2.128", "HNAME": "agent", "HUSER": "root", "PORT": "22", "AUTH_TYPE": 1, "HKEY": "redhat"},
{"HIP": "192.168.2.129", "HNAME": "ubuntu", "HUSER": "super", "PORT": "22", "AUTH_TYPE": 1,
"HKEY": "super"}, ],
"G02": [{"HIP": "192.168.1.128", "HNAME": "agent", "HUSER": "root", "PORT": "22", "AUTH_TYPE": 2,
"HKEY": "private_test03.rsa"}, ],
"G03": [
{"HIP": "192.168.2.129", "HNAME": "ubuntu", "HUSER": "super", "PORT": "22", "AUTH_TYPE": 1, "HKEY": "test"}, ],
"G04": [{"HIP": "192.168.2.128", "HNAME": "agent", "HUSER": "root", "PORT": "22", "AUTH_TYPE": 1, "HKEY": "redhat"}, ]
}
users = {
"admin": {"UPASS": "12345", "GID": "G01,G02,G03,G04", "UROLE": "admin", "NAME": "administrator"},
"test": {"UPASS": "12345", "GID": "G01,G02", "UROLE": "user", "NAME": "zhangsan"}
}
def init_groups():
root = ElementTree.Element("GROUPS")
for k, v in groups.items():
gid = ElementTree.SubElement(root, "GROUP", attrib={"GID": k})
gname = ElementTree.SubElement(gid, "GNAME")
gname.text = v
xmlfile = ElementTree.ElementTree(root)
xmlfile.write("groups.xml", encoding="utf-8", xml_declaration=True)
def init_hosts():
root = ElementTree.Element("HOSTS")
for k, hlist in hosts.items():
gid = ElementTree.SubElement(root, "GROUP", attrib={"GID": k})
for host in hlist:
h = ElementTree.SubElement(gid, "HOST")
hip = ElementTree.SubElement(h, "HIP")
hname = ElementTree.SubElement(h, "HNAME")
huser = ElementTree.SubElement(h, "HUSER")
auth_type = ElementTree.SubElement(h, "AUTH_TYPE")
hkey = ElementTree.SubElement(h, "HKEY")
hport = ElementTree.SubElement(h, "PORT")
hip.text = host["HIP"]
hname.text = host["HNAME"]
huser.text = host["HUSER"]
auth_type.text = str(host["AUTH_TYPE"])
hkey.text = host["HKEY"]
hport.text = host["PORT"]
xmlfile = ElementTree.ElementTree(root)
xmlfile.write("hosts.xml", encoding="utf-8", xml_declaration=True)
def init_users():
root = ElementTree.Element("USERS")
for k, info in users.items():
u = ElementTree.SubElement(root, "USER", attrib={"UNAME": k})
upass = ElementTree.SubElement(u, "UPASS")
ugid = ElementTree.SubElement(u, "GID")
urole = ElementTree.SubElement(u, "UROLE")
uname = ElementTree.SubElement(u, "NAME")
upass.text = common.encry_sha(info["UPASS"])
ugid.text = info["GID"]
urole.text = info["UROLE"]
uname.text = info["NAME"]
xmlfile = ElementTree.ElementTree(root)
xmlfile.write("users.xml", encoding="utf-8", xml_declaration=True)
if __name__ == "__main__":
init_users()
| [
"songfreeman@sina.com"
] | songfreeman@sina.com |
59d96b29d5ed590da2ea3136fc938561be154426 | cd187d99cd5e83ca5b8409085d765e74da643496 | /system/migrations/0018_hole_page.py | 2d3cbc19b3c45219f4be94856e6cb4f6cd220311 | [] | no_license | 1701210370/pys | 541c5a910d2b06de2b54ad158e8fcf5309298d01 | b75dc6f00fed4b9c5aa6f8d966347f52a15e5cd6 | refs/heads/master | 2022-04-28T23:44:59.953714 | 2020-04-27T00:38:35 | 2020-04-27T00:38:35 | 258,520,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | # Generated by Django 2.1.4 on 2020-03-27 02:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('system', '0017_hole'),
]
operations = [
migrations.AddField(
model_name='hole',
name='page',
field=models.IntegerField(default=0),
),
]
| [
"2428211025@qq.com"
] | 2428211025@qq.com |
a2e1e744929fe1a6c3c7212157cdcdf9aded4f67 | 82ebc84064d0f2eac9580cd2e67bf3c8c1e2de95 | /accounts/migrations/0014_auto_20200407_1328.py | 5c42140348a90e41d46748165733dc0543ff0a47 | [] | no_license | nayakji/Website-Doctor-Python-Django | 2065863fe49964d9a0562f1aa49bdc1a4b8fea7b | 0cd22daa3d7abb9b7e6777cc94c64b6394ae0183 | refs/heads/master | 2022-06-18T16:20:06.525901 | 2020-05-08T23:42:34 | 2020-05-08T23:42:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 367 | py | # Generated by Django 3.0.3 on 2020-04-07 10:28
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0013_profile_join_new'),
]
operations = [
migrations.RenameField(
model_name='profile',
old_name='join_new',
new_name='join_niw',
),
]
| [
"khaled.info223@gmail.com"
] | khaled.info223@gmail.com |
861da89bfc67bd430419abaa0467f920d3e5db7d | 10ec694b88796923d63014108ea354c29ce366fa | /L9 Python Scripts/Codec_b995_9/Codec.py | 05882801c8d6c145b4e40c1913f76675b20872d0 | [] | no_license | frequenzteiler/monomodular | b74d4a0e5c69019319b8ee7af46fbc53d88a9249 | 05b3227beb828054fa11839fbdf813b60b7c68ad | refs/heads/master | 2021-05-27T09:45:18.919848 | 2014-03-10T08:14:32 | 2014-03-10T08:14:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41,467 | py | # by amounra 0413 : http://www.aumhaa.com
from __future__ import with_statement
import Live
import time
import math
""" All of the Framework files are listed below, but we are only using using some of them in this script (the rest are commented out) """
from _Framework.ButtonElement import ButtonElement # Class representing a button a the controller
from _Framework.ButtonMatrixElement import ButtonMatrixElement # Class representing a 2-dimensional set of buttons
from _Framework.ChannelStripComponent import ChannelStripComponent # Class attaching to the mixer of a given track
from _Framework.CompoundComponent import CompoundComponent # Base class for classes encompasing other components to form complex components
from _Framework.ControlElement import ControlElement # Base class for all classes representing control elements on a controller
from _Framework.ControlSurface import ControlSurface # Central base class for scripts based on the new Framework
from _Framework.ControlSurfaceComponent import ControlSurfaceComponent # Base class for all classes encapsulating functions in Live
#from _Framework.DeviceComponent import DeviceComponent # Class representing a device in Live
from _Framework.EncoderElement import EncoderElement # Class representing a continuous control on the controller
from _Framework.InputControlElement import * # Base class for all classes representing control elements on a controller
from _Framework.MixerComponent import MixerComponent # Class encompassing several channel strips to form a mixer
from _Framework.ModeSelectorComponent import ModeSelectorComponent # Class for switching between modes, handle several functions with few controls
from _Framework.NotifyingControlElement import NotifyingControlElement # Class representing control elements that can send values
from _Framework.SessionComponent import SessionComponent # Class encompassing several scene to cover a defined section of Live's session
from _Framework.TransportComponent import TransportComponent # Class encapsulating all functions in Live's transport section
from _Framework.Task import *
from _Generic.Devices import *
from Push.M4LInterfaceComponent import M4LInterfaceComponent
"""Imports from _Mono_Framework"""
from _Mono_Framework.DetailViewControllerComponent import DetailViewControllerComponent
from _Mono_Framework.CodecEncoderElement import CodecEncoderElement
from _Mono_Framework.EncoderMatrixElement import EncoderMatrixElement
from _Mono_Framework.MonoBridgeElement import MonoBridgeElement
from _Mono_Framework.MonoButtonElement import MonoButtonElement
from _Mono_Framework.MonoEncoderElement import MonoEncoderElement
from _Mono_Framework.MonomodComponent import MonomodComponent
from _Mono_Framework.Live8DeviceComponent import Live8DeviceComponent as DeviceComponent
from _Mono_Framework.LiveUtils import *
"""Custom files, overrides, and files from other scripts"""
from CodecDeviceSelectorComponent import CodecDeviceSelectorComponent
from CodecResetSendsComponent import CodecResetSendsComponent
from CodecDeviceComponent import CodecDeviceComponent
from SpecialCodecDeviceComponent import SpecialCodecDeviceComponent
from Map import *
def tracks_to_use(self):
return tuple(self.song().visible_tracks) + tuple(self.song().return_tracks)
MixerComponent.tracks_to_use = tracks_to_use
""" Here we define some global variables """
factoryreset = (240,0,1,97,4,6,247)
btn_channels = (240, 0, 1, 97, 4, 19, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, 0, 247);
enc_channels = (240, 0, 1, 97, 4, 20, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, CHANNEL, 247);
SLOWENCODER = (240, 0, 1, 97, 4, 30, 00, 00, 247)
NORMALENCODER = (240, 0, 1, 97, 4, 30, 02, 00, 247)
FASTENCODER = (240, 0, 1, 97, 4, 30, 04, 00, 247)
class CodecMonoButtonElement(MonoButtonElement):
def __init__(self, *a, **k):
super(CodecMonoButtonElement, self).__init__(*a, **k)
self.set_color_map(tuple(COLOR_MAP))
class CodecMonomodComponent(MonomodComponent):
def __init__(self, *a, **k):
super(CodecMonomodComponent, self).__init__(*a, **k)
self._host_name = 'Code'
def select_client(self, number, *a, **k):
super(CodecMonomodComponent, self)._select_client(number, *a, **k)
def _matrix_value(self, value, x, y, is_momentary): #to be sent to client from controller
assert (self._grid != None)
assert (value in range(128))
assert isinstance(is_momentary, type(False))
if (self.is_enabled()):
if self._shift_pressed == 1:
if value > 0:
if y == 0:
self._select_client(x + (self._bank_add * 8))
self._display_bank()
elif y == 1:
self._active_client._set_channel(x + (self._chan_add * 8))
self._display_channel()
elif (x == 0) and (y == 2):
self._bank_add = abs(self._bank_add - 1)
self._display_bank()
elif (x == 7) and (y == 2):
self._chan_add = abs(self._chan_add - 1)
self._display_channel()
elif (x in range(2, 4)) and (y in range(2, 4)):
self._change_offset(0, 0)
elif (x in range(4, 6)) and (y in range(2, 4)):
self._change_offset(8, 0)
elif (x in range(2, 4)) and (y in range(4, 6)):
self._change_offset(0, 8)
elif (x in range(4, 6)) and (y in range(4, 6)):
self._change_offset(8, 8)
elif (y == 7):
if self._alt_pressed > 0:
if (x == 7):
self.select_active_client()
else:
self._active_client._send_key(x, value)
elif self._shift_pressed == 0:
if self._locked == 1:
if y == 7:
if self._alt_pressed > 0:
#added from here
if x == 7:
self.select_active_client()
else:
self._active_client._send_key(x, value)
#to here
else:
self._active_client._send_key(x, value)
else:
self._active_client._send_grid(x + self._x, y + self._y, value)
else:
self._active_client._send_grid(x + self._x, y + self._y, value)
def _alt_value(self, value):
if self._shift_pressed == 0:
self._alt_pressed = value != 0
self._active_client._send('alt', int(self._alt_pressed))
self.update()
def _key_value(self, value, sender):
if self.is_enabled():
self._active_client._send_key(self._keys.index(sender), int(value!=0))
def _send_key(self, index, value): #to be sent to controller from client
if self.is_enabled():
if (self._shift_pressed > 0) or (self._locked > 0):
if self._grid != None:
self._grid.get_button(index, 7).send_value(int(self._colors[value]))
if self._dial_button_matrix != None:
for index in range(8):
self._dial_button_matrix.get_button(index, 4).send_value(int(self._colors[self._active_client._key[index]]))
if self._keys != None and len(self._keys) > index:
self._keys[index].send_value(int(self._colors[value]))
def on_enabled_changed(self):
if self._active_client != None:
if self.is_enabled():
self._active_client._device_component.update()
self._script.set_absolute_mode(self._active_client._absolute_mode)
self._script.set_local_ring_control(self._active_client._local_ring_control)
else:
for control in self._parameter_controls:
control.release_parameter()
self._script.set_absolute_mode(1)
self._script.set_local_ring_control(1)
super(CodecMonomodComponent, self).on_enabled_changed()
def _update_shift_button(self):
if self._shift_button!=None:
if self._shift_pressed != 0:
self._shift_button.turn_on()
else:
self._shift_button.send_value(7, True)
def update(self):
if (self.is_enabled()) and (self._allow_updates==True) and (self._active_client != None):
self.set_absolute_mode(self._active_client._absolute_mode)
self.set_local_ring_control(self._active_client._local_ring_control)
super(CodecMonomodComponent, self).update()
def _dial_matrix_value(self, value, x, y):
if self.is_enabled() and self._active_client != None:
if self._script._absolute_mode == 0:
value = RELATIVE[int(value == 1)]
self._active_client._send_dial(x, y, value)
def _reset_encoder(self, coord):
self._dial_matrix.get_dial(coord[0], coord[1])._reset_to_center()
def _dial_button_matrix_value(self, value, x, y, force):
if (self.is_enabled()) and (self._shift_pressed is False) and (self._active_client != None):
if self._locked == 1 and y == 4:
self._active_client._send_key(x, value)
self._update_keys()
else:
self._active_client._send_dial_button(x, y, value)
elif(self.is_enabled()) and (self._shift_pressed is True):
if (y == 0) and (value > 0):
if (x < 8):
self._select_client(x + (self._bank_add * 8))
self._display_bank()
else:
self._locked = abs(self._locked - 1)
self.update()
elif (y == 1):
if (y < 8) and (value > 0):
self._active_client._set_channel(x + (self._chan_add * 8))
self._display_channel()
else:
self._alt_pressed = int(value != 0)
self._active_client._send('alt', int(self._alt_pressed))
self.update()
elif (y == 2):
if (x == 0) and (value > 0):
self._bank_add = abs(self._bank_add - 1)
self._display_bank()
elif (x == 7) and (value > 0):
self._chan_add = abs(self._chan_add - 1)
self._display_channel()
elif (y == 4):
if (self._alt_pressed > 0) and (x == 7) and (val > 0):
self.select_active_client()
else:
self._active_client._send_key(x, value)
self._update_keys()
def _send_wheel(self, column, row, wheel, parameter=None): #to be sent to controller from client
if self.is_enabled() and wheel != None:
if column < 8 and row < 4:
dial = self._dial_matrix.get_dial(column, row)
if(parameter=='value'):
dial._ring_value = int(wheel['value'])
dial._ring_mode = int(wheel['mode'])
dial._ring_green = int(wheel['green']!=0)
if(parameter=='custom'):
dial._ring_custom = dial._calculate_custom(str(wheel['custom']))
if self._shift_pressed == True:
if row in range(2, 3) and column in range(0, 7):
self._dial_button_matrix.send_value(column, row, wheel['white'])
elif self._locked == True:
if row in range(0, 3):
self._dial_button_matrix.send_value(column, row, wheel['white'])
else:
self._dial_button_matrix.send_value(column, row, wheel['white'])
def _update_wheel(self):
if self._dial_button_matrix != None:
if self._shift_pressed is False:
for column in range(9):
for row in range(5):
self._send_wheel(column, row, self._active_client._wheel[column][row])
def set_local_ring_control(self, val = 1):
#self._script.log_message('set local ring control' + str(val))
self._local_ring_control = (val!=0)
self._script.set_local_ring_control(self._local_ring_control)
def set_absolute_mode(self, val=1):
self._absolute_mode = (val!=0)
self._script.set_absolute_mode(self._absolute_mode)
#temp override to get things working in L9
def set_appointed_device(self, *a, **k):
pass
def _send_nav_box(self):
pass
class MonomodModeComponent(ModeSelectorComponent):
def __init__(self, callback, script, *a, **k):
super(MonomodModeComponent, self).__init__(*a, **k)
assert hasattr(callback, '__call__')
self._set_protected_mode_index(0)
self._script = script
self.update = callback
def number_of_modes(self):
return 2
def set_mode_toggle(self, button):
assert (button == None or isinstance(button, ButtonElement))
if self._mode_toggle != None:
self._mode_toggle.remove_value_listener(self._toggle_value)
self._mode_toggle = button
if self._mode_toggle != None:
self._mode_toggle.add_value_listener(self._toggle_value)
def set_mode_buttons(self, buttons):
for button in self._modes_buttons:
button.remove_value_listener(self._mode_value)
self._modes_buttons = []
if (buttons != None):
for button in buttons:
assert isinstance(button, ButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
for index in range(len(self._modes_buttons)):
if (index == self._mode_index):
self._modes_buttons[index].turn_on()
else:
self._modes_buttons[index].turn_off()
class ShiftModeComponent(ModeSelectorComponent):
def __init__(self, callback, script, *a, **k):
super(ShiftModeComponent, self).__init__(*a, **k)
assert hasattr(callback, '__call__')
self._set_protected_mode_index(0)
self._script = script
self.update = callback
def number_of_modes(self):
return 4
def set_mode_toggle(self, button):
assert (button == None or isinstance(button, ButtonElement))
if self._mode_toggle != None:
self._mode_toggle.remove_value_listener(self._toggle_value)
self._mode_toggle = button
if self._mode_toggle != None:
self._mode_toggle.add_value_listener(self._toggle_value)
def set_mode_buttons(self, buttons):
assert buttons != None
assert isinstance(buttons, tuple)
assert len(buttons) - 1 in range(16)
for button in buttons:
assert isinstance(button, ButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
def _mode_value(self, *a, **k):
if self.is_enabled():
super(ShiftModeComponent, self)._mode_value(*a, **k)
class Codec(ControlSurface):
__module__ = __name__
__doc__ = " MonoCode controller script "
def __init__(self, c_instance, *a, **k):
super(Codec, self).__init__(c_instance, *a, **k)
self._monomod_version = 'b995'
self._version_check = 'b995'
self._host_name = 'Codec'
self._color_type = 'Monochrome'
self._link_mixer = LINK_MIXER
self._hosts = []
self._linked_script = None
self._local_ring_control = True
self._absolute_mode = True
self._last_device = None
self._device_list = [None, None, None, None]
self._device_select_buttons = None
self._last_device_component = None
self._timer = 0
self._touched = 0
self._locked = False
self.flash_status = 1
self._shift_button = None
self._shift_pressed = 0
self._shift_pressed_timer = 0
self._shift_thresh = SHIFT_THRESH
self._shift_fix = time.clock()
self._use_device_selector = USE_DEVICE_SELECTOR
self._device_selection_follows_track_selection=FOLLOW
self._leds_last = 0
with self.component_guard():
#self.local_ring_control(True)
#self.set_absolute_mode(True)
self._setup_controls()
self._setup_monobridge()
self._setup_device_controls()
self._setup_special_device_control()
self._device.append(self._special_device) #necessary for device browsing to work with special device
self._setup_device_chooser()
self._setup_mixer_controls()
self._setup_monomod()
self._setup_modes()
self._setup_device_selector()
self._setup_send_reset()
self._setup_default_buttons()
self._setup_m4l_interface()
self.set_local_ring_control(True)
self.song().view.add_selected_track_listener(self._update_selected_device)
self._initialize_code()
#self._shift_mode.set_mode(0)
#self._monomod_mode.set_mode(0)
self.log_message('<<<<<<<<<<<<<<<<<<<<<<<<< Codec ' + str(self._monomod_version) + ' log opened >>>>>>>>>>>>>>>>>>>>>>>>>')
self.show_message('Codec Control Surface Loaded')
self.request_rebuild_midi_map()
"""script initialization methods"""
def _initialize_code(self):
if FACTORY_RESET:
self._send_midi(factoryreset)
self._send_midi(btn_channels)
self._send_midi(enc_channels)
def _setup_monobridge(self):
self._monobridge = MonoBridgeElement(self)
self._monobridge.name = 'MonoBridge'
def _setup_controls(self):
is_momentary = True
self._livid = CodecMonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, LIVID, 'Livid_Button', self)
self._dial = [None for index in range(8)]
for column in range(8):
self._dial[column] = [None for index in range(4)]
for row in range(4):
self._dial[column][row] = CodecEncoderElement(MIDI_CC_TYPE, CHANNEL, CODE_DIALS[row][column], Live.MidiMap.MapMode.absolute, 'Dial_' + str(column) + '_' + str(row), (column + (row*8)), self) #CODE_DIALS[row][column]
#self._dial[column][row]._report_output = True
self._button = [None for index in range(8)]
for column in range(8):
self._button[column] = [None for index in range(4)]
for row in range(4):
self._button[column][row] = CodecMonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, CODE_BUTTONS[row][column], 'Button_' + str(column) + '_' + str(row), self)
self._column_button = [None for index in range(8)]
for index in range(8):
self._column_button[index] = CodecMonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, CODE_COLUMN_BUTTONS[index], 'Column_Button_' + str(index), self)
self._row_button = [None for index in range(4)]
for index in range(4):
self._row_button[index] = CodecMonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, CODE_ROW_BUTTONS[index], 'Row_Button_' + str(index), self)
self._dial_matrix = EncoderMatrixElement(self)
self._dial_matrix.name = 'Encoder_Matrix'
for row in range(4):
dial_row = tuple([self._dial[column][row] for column in range(8)])
self._dial_matrix.add_row(dial_row)
self._button_matrix = ButtonMatrixElement()
self._button_matrix.name = 'Button_Matrix'
for row in range(4):
button_row = [self._button[column][row] for column in range(8)]
button_row.append(self._row_button[row])
self._button_matrix.add_row(tuple(button_row))
self._button_matrix.add_row(tuple(self._column_button + [self._livid]))
def _setup_modes(self):
self._monomod_mode = MonomodModeComponent(self._mod_mode_update, self)
self._monomod_mode.name = 'Monomod_Mode'
self.set_shift_button(self._livid)
self._shift_mode = ShiftModeComponent(self._shift_update, self)
self._shift_mode.name = 'Shift_Mode'
self._shift_mode.set_mode_buttons(tuple([self._row_button[0], self._row_button[1], self._row_button[2], self._row_button[3]]))
def _setup_transport_control(self):
self._transport = TransportComponent()
self._transport.name = 'Transport'
def _setup_monomod(self):
self._host = CodecMonomodComponent(self)
self._host.name = 'Monomod_Host'
self._host._set_dial_matrix(self._dial_matrix, self._button_matrix)
self.hosts = [self._host]
encs = []
for row in range(4):
for col in range(8):
encs.append(self._dial[col][row])
self._host._set_parameter_controls(encs)
def _setup_mixer_controls(self):
is_momentary = True
self._num_tracks = (8)
self._session = SessionComponent(self._num_tracks, 0)
self._session.name = 'Session'
self._mixer = MixerComponent(self._num_tracks, 0, False, False)
self._mixer.name = 'Mixer'
self._mixer._next_track_value = self._mixer_next_track_value(self._mixer)
self._mixer._prev_track_value = self._mixer_prev_track_value(self._mixer)
self._mixer.set_track_offset(0) #Sets start point for mixer strip (offset from left)
#for index in range(8):
#use the bottom row of encoders for volume, so add 24 to offset the index
# self._mixer.channel_strip(index).set_volume_control(self._dial[index+24])
for index in range(8):
self._mixer.channel_strip(index).name = 'Mixer_ChannelStrip_' + str(index)
self._mixer.channel_strip(index)._invert_mute_feedback = True
self._mixer.channel_strip(index)._mute_value = self._channelstrip_mute_value(self._mixer.channel_strip(index))
self._mixer.channel_strip(index)._solo_value = self._channelstrip_solo_value(self._mixer.channel_strip(index))
#mixer.channel_strip(index).set_select_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, CH, track_select_notes[index]))
self.song().view.selected_track = self._mixer.channel_strip(0)._track #set the selected strip to the first track, so that we don't, for example, try to assign a button to arm the master track, which would cause an assertion error
self._session.set_mixer(self._mixer)
def _setup_device_controls(self):
self._device = [None for index in range(4)]
for index in range(4):
self._device[index] = CodecDeviceComponent(self)
self._device[index].name = 'CodecDevice_Component_' + str(index)
device_param_controls = []
for control in range(8):
device_param_controls.append(self._dial[control][index])
self._device[index].set_on_off_button(self._button[1][index])
self._device[index].set_lock_button(self._button[2][index])
self._device[index].set_bank_nav_buttons(self._button[4][index], self._button[5][index])
self._device[index].set_nav_buttons(self._button[6][index], self._button[7][index])
self._device[index].set_parameter_controls(tuple(device_param_controls))
self.set_device_component(self._device[0])
self._last_device_component = self._device[0]
def _setup_special_device_control(self):
self._special_device = SpecialCodecDeviceComponent(self)
self._special_device.name = 'SpecialCodecDeviceComponent'
self._special_device.set_on_off_button(self._button[1][0])
self._special_device.set_lock_button(self._button[2][0])
self._special_device.set_bank_nav_buttons(self._button[4][0], self._button[5][0])
self._special_device.set_nav_buttons(self._button[6][0], self._button[7][0])
device_param_controls = []
for row in range(4):
for column in range(8):
device_param_controls.append(self._dial[column][row])
self._special_device.set_parameter_controls(tuple(device_param_controls))
def _setup_device_chooser(self):
self._selected_device = self._device[0]
self._last_selected_device = self._device[0]
self._device_select_buttons = [self._button[0][index] for index in range(4)]
for button in self._device_select_buttons:
button.add_value_listener(self._device_select_value, True)
def _setup_device_selector(self):
self._device_selector = CodecDeviceSelectorComponent(self, 'c', self._device + [self._special_device])
self._device_selector.name = 'Device_Selector'
self._device_selector.set_mode_buttons(self._column_button)
#self._device_selector.set_mode_toggle(self._livid)
def _setup_send_reset(self):
self._send_reset = CodecResetSendsComponent(self)
self._send_reset.set_buttons(self._button)
def _setup_default_buttons(self):
self._value_default = ParameterDefaultComponent(self)
buttons = []
dials = []
for column in self._button:
for button in column:
buttons.append(button)
for column in self._dial:
for dial in column:
dials.append(dial)
self._value_default.set_buttons(buttons)
self._value_default.set_dials(dials)
def _setup_m4l_interface(self):
self._m4l_interface = M4LInterfaceComponent(controls=self.controls, component_guard=self.component_guard)
self.get_control_names = self._m4l_interface.get_control_names
self.get_control = self._m4l_interface.get_control
self.grab_control = self._m4l_interface.grab_control
self.release_control = self._m4l_interface.release_control
"""multiple device support"""
def _device_select_value(self, value, sender):
#self.log_message('device_select_value ' + str(value) + ' ' + str(self._device_select_buttons.index(sender)))
if not self._shift_pressed:
if sender.is_momentary or value > 0:
if self._shift_mode._mode_index == 2:
self.set_device_component(self._device[self._device_select_buttons.index(sender)])
self._last_device_component = self._device_component
if self._device_component != None and isinstance(self._device_component._device, Live.Device.Device):
if self._device_component.find_track(self._device_component._device) == self.song().view.selected_track:
self._device_component.display_device()
"""livid double press mechanism"""
def set_shift_button(self, button):
assert ((button == None) or (isinstance(button, MonoButtonElement)))
if self._shift_button != None:
self._shift_button.remove_value_listener(self._shift_value)
self._shift_button = button
if self._shift_button != None:
self._shift_button.add_value_listener(self._shift_value)
def _shift_value(self, value):
self._shift_pressed = int(value != 0)
if self._shift_pressed > 0:
self._send_midi(SLOWENCODER)
else:
if self._shift_pressed_timer > 0:
#self.log_message('mod mode: ' + str(abs(self._monomod_mode._mode_index - 1)))
self._monomod_mode.set_mode(max(0, min(1, abs(self._monomod_mode._mode_index - 1))))
self._shift_pressed_timer = 0
else:
if self._shift_pressed_timer == 0:
self._shift_pressed_timer = 1
self.schedule_message(int(self._shift_thresh), self._shift_timer)
self._send_midi(NORMALENCODER)
if self._shift_button != None:
self._shift_button.send_value(self._shift_pressed + (28*self._monomod_mode._mode_index))
def _shift_timer(self, *a, **k):
self._shift_pressed_timer = 0
def _mod_mode_update(self):
if(self._monomod_mode._mode_index == 0):
self._host._set_shift_button(None)
self._host.set_enabled(False)
self._dial_matrix.reset()
self._shift_mode.set_enabled(True)
self._shift_update()
self.request_rebuild_midi_map()
self._livid.turn_off()
elif(self._monomod_mode._mode_index == 1):
self._shift_mode.set_enabled(False)
self._deassign_all()
self._dial_matrix.reset()
self._button_matrix.reset()
self._livid.turn_on()
if not self._host._active_client == None:
self._host.set_enabled(True)
self._host._set_shift_button(self._livid)
else:
self._assign_alternate_mappings(1)
self.request_rebuild_midi_map()
"""Mode Functions"""
def _shift_update(self):
if(self._shift_mode.is_enabled()):
with self.component_guard():
self.allow_updates(False)
self._deassign_all()
if(self._shift_mode._mode_index is 0):
self._assign_volume()
elif(self._shift_mode._mode_index is 1):
self._assign_sends()
elif(self._shift_mode._mode_index is 2):
self._assign_devices()
elif(self._shift_mode._mode_index is 3):
self._assign_special_device()
for index in range(self._shift_mode.number_of_modes()):
if index == self._shift_mode._mode_index:
self._shift_mode._modes_buttons[index].turn_on()
else:
self._shift_mode._modes_buttons[index].turn_off()
self.allow_updates(True)
self.request_rebuild_midi_map()
def _deassign_all(self):
self._assign_alternate_mappings(0)
self._device_selector.set_enabled(False)
for index in range(8):
self._mixer.channel_strip(index).set_volume_control(None)
self._mixer.channel_strip(index).set_pan_control(None)
self._mixer.channel_strip(index).set_send_controls(tuple([None, None, None, None]))
for index in range(4):
self._device[index].set_on_off_button(None)
self._device[index].set_lock_button(None)
self._device[index].set_bank_nav_buttons(None, None)
self._device[index].set_nav_buttons(None, None)
self._device[index].set_enabled(False)
self._device[index]._parameter_controls = None
self._special_device.set_enabled(False)
self._special_device._parameter_controls = None
self._device_selector.set_enabled(False)
self._deassign_buttons()
for control in self.controls:
if isinstance(control, ButtonElement):
control.release_parameter()
control.reset()
self.request_rebuild_midi_map()
def _deassign_buttons(self):
for index in range(8):
self._mixer.channel_strip(index).set_select_button(None)
self._mixer.channel_strip(index).set_solo_button(None)
self._mixer.channel_strip(index).set_mute_button(None)
self._mixer.set_select_buttons(None, None)
self._send_reset.set_enabled(False)
def _assign_volume(self):
for index in range(8):
self._mixer.channel_strip(index).set_volume_control(self._dial[index][3])
self._mixer.channel_strip(index).set_pan_control(self._dial[index][2])
self._mixer.channel_strip(index).set_send_controls(tuple([self._dial[index][0], self._dial[index][1]]))
self._mixer.channel_strip(index).set_select_button(self._column_button[index])
self._mixer.channel_strip(index).set_solo_button(self._button[index][2])
self._mixer.channel_strip(index).set_mute_button(self._button[index][3])
self._mixer.set_select_buttons(self._button[7][0], self._button[6][0])
def _assign_sends(self):
for index in range(8):
self._mixer.channel_strip(index).set_send_controls(tuple([self._dial[index][0], self._dial[index][1], self._dial[index][2], self._dial[index][3]]))
self._mixer.channel_strip(index).set_select_button(self._column_button[index])
self._send_reset.set_enabled(True)
def _assign_devices(self):
self.set_device_component(self._last_device_component)
self._device_select_value(1, self._device_select_buttons[self._device.index(self._device_component)])
for index in range(4):
device_param_controls = []
for control in range(8):
device_param_controls.append(self._dial[control][index])
self._device[index].set_on_off_button(self._button[1][index])
self._device[index].set_lock_button(self._button[2][index])
self._device[index].set_bank_nav_buttons(self._button[4][index], self._button[5][index])
self._device[index].set_nav_buttons(self._button[6][index], self._button[7][index])
self._device[index].set_parameter_controls(tuple(device_param_controls))
self._device[index].set_enabled(True)
self._device_selector.set_enabled(self._use_device_selector)
if not self._use_device_selector:
for index in range(8):
self._mixer.channel_strip(index).set_select_button(self._column_button[index])
def _assign_special_device(self):
self.set_device_component(self._special_device)
device_param_controls = []
for row in range(4):
for column in range(8):
device_param_controls.append(self._dial[column][row])
self._special_device.set_parameter_controls(tuple(device_param_controls))
self._special_device.set_enabled(True)
self._device_selector.set_enabled(self._use_device_selector)
if not self._use_device_selector:
for index in range(8):
self._mixer.channel_strip(index).set_select_button(self._column_button[index])
def _assign_alternate_mappings(self, chan):
for column in self._dial:
for control in column:
control.set_channel(chan)
control.set_enabled(chan is 0)
for column in self._button:
for control in column:
control.set_channel(chan)
control.set_enabled(chan is 0)
for control in self._column_button:
control.set_channel(chan)
control.set_enabled(chan is 0)
for control in self._row_button:
control.set_channel(chan)
control.set_enabled(chan is 0)
"""general functionality"""
def disconnect(self):
"""clean things up on disconnect"""
if not self._shift_button is None:
if self._shift_button.value_has_listener(self._shift_value):
self._shift_button.remove_value_listener(self._shift_value)
for button in self._device_select_buttons:
if button.value_has_listener(self._device_select_value):
button.remove_value_listener(self._device_select_value)
if self._session._is_linked():
self._session._unlink()
if self.song().view.selected_track_has_listener(self._update_selected_device):
self.song().view.remove_selected_track_listener(self._update_selected_device)
"""for cs in self._control_surfaces():
for host in self._hosts:
self.log_message('installed: ' + str(cs) + ' vs. ' + str(host))
if str(type(cs)) == str(type(host)):
self.log_message('disconnecting: ' + str(type(cs)))
cs.disconnect(cs)"""
#self._host._set_parameter_controls(None)
self._hosts = []
if self._linked_script != None:
self._linked_script._update_linked_device_selection = None
self._linked_script = None
#self._disconnect_notifier.set_mode(0)
self.log_message('<<<<<<<<<<<<<<<<<<<<<<<<< Codec log closed >>>>>>>>>>>>>>>>>>>>>>>>>')
ControlSurface.disconnect(self)
return None
def connect_script_instances(self, instanciated_scripts):
found = False
for s in instanciated_scripts:
if '_codec_version' in dir(s):
if s._codec_version == self._version_check:
if s._host_name == ('MonOhm'):
self.log_message('found codec version ' + str(s._codec_version) + ' in script ' + str(s._host_name))
found = True
self._linked_script = s
self._linked_script._update_linked_device_selection = self._update_linked_device_selection
if not self._session._is_linked() and self._link_mixer is True:
self._session.set_offsets(LINK_OFFSET[0], LINK_OFFSET[1])
self._session._link()
else:
self.log_message('version mismatch: Monomod version ' + str(self._version_check) + ' vs. Host version ' + str(s._codec_version))
if found == False:
for s in instanciated_scripts:
if '_codec_version' in dir(s):
if s._codec_version == self._version_check:
if s._host_name == 'BlockMod':
self.log_message('found codec version ' + str(s._codec_version) + ' in script ' + str(s._host_name))
self._linked_script = s
self._linked_script._update_linked_device_selection = self._update_linked_device_selection
if not self._session._is_linked() and self._link_mixer is True:
self._session.set_offsets(LINK_OFFSET[0], LINK_OFFSET[1])
self._session._link()
else:
self.log_message('version mismatch: Monomod version ' + str(self._version_check) + ' vs. Host version ' + str(s._codec_version))
#self.log_message('hosts: ' + str(self._hosts))"""
def update_display(self):
ControlSurface.update_display(self) #since we are overriding this from the inherited method, we need to call the original routine as well
self._timer = (self._timer + 1) % 256
if(self._local_ring_control is False):
self.send_ring_leds()
self.flash()
def handle_sysex(self, midi_bytes):
#self._send_midi(tuple([240, 00, 01, 97, 04, 15, 01, 247]))
#response = [long(0),long(0)]
#self.log_message(response)
pass
def flash(self):
if(self.flash_status > 0):
for control in self.controls:
if isinstance(control, MonoButtonElement):
control.flash(self._timer)
def send_ring_leds(self):
if self._host._is_enabled == True:
leds = [240, 0, 1, 97, 4, 31]
for column in range(8):
for row in range(4):
wheel = self._dial[column][row]
bytes = wheel._get_ring()
leds.append(bytes[0])
leds.append(int(bytes[1]) + int(bytes[2]))
leds.append(247)
if not leds==self._leds_last:
self._send_midi(tuple(leds))
self._leds_last = leds
def set_absolute_mode(self, val = 1):
self._absolute_mode = (val!=0)
if self._absolute_mode is True:
self._send_midi(tuple([240, 0, 1, 97, 4, 17, 0, 0, 0, 0, 0, 0, 0, 0, 247]))
else:
self._send_midi(tuple([240, 0, 1, 97, 4, 17, 127, 127, 127, 127, 127, 127, 127, 127, 247]))
def set_local_ring_control(self, val = 1):
self._local_ring_control = (val!=0)
if(self._local_ring_control is True):
#self._send_midi(tuple([240, 0, 1, 97, 4, 32, 0, 247]))
self._send_midi(tuple([240, 0, 1, 97, 4, 8, 72, 247]))
self._dial_matrix.reset()
else:
#self._send_midi(tuple([240, 0, 1, 97, 4, 32, 1, 247]))
self._send_midi(tuple([240, 0, 1, 97, 4, 8, 64, 247]))
self.schedule_message(2, self._clear_rings)
def _clear_rings(self):
self._leds_last = 1
def device_follows_track(self, val):
self._device_selection_follows_track_selection = (val == 1)
return self
"""m4l bridge"""
def generate_strip_string(self, display_string):
NUM_CHARS_PER_DISPLAY_STRIP = 12
if (not display_string):
return (' ' * NUM_CHARS_PER_DISPLAY_STRIP)
if ((len(display_string.strip()) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)) and (display_string.endswith('dB') and (display_string.find('.') != -1))):
display_string = display_string[:-2]
if (len(display_string) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)):
for um in [' ',
'i',
'o',
'u',
'e',
'a']:
while ((len(display_string) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)) and (display_string.rfind(um, 1) != -1)):
um_pos = display_string.rfind(um, 1)
display_string = (display_string[:um_pos] + display_string[(um_pos + 1):])
else:
display_string = display_string.center((NUM_CHARS_PER_DISPLAY_STRIP - 1))
ret = u''
for i in range((NUM_CHARS_PER_DISPLAY_STRIP - 1)):
if ((ord(display_string[i]) > 127) or (ord(display_string[i]) < 0)):
ret += ' '
else:
ret += display_string[i]
ret += ' '
assert (len(ret) == NUM_CHARS_PER_DISPLAY_STRIP)
return ret
def notification_to_bridge(self, name, value, sender):
if(isinstance(sender, CodecEncoderElement)):
self._monobridge._send(sender.name, 'lcd_name', str(self.generate_strip_string(name)))
self._monobridge._send(sender.name, 'lcd_value', str(self.generate_strip_string(value)))
def touched(self):
if not self._host.is_enabled():
if self._touched is 0:
self._monobridge._send('touch', 'on')
self.schedule_message(2, self.check_touch)
self._touched +=1
def check_touch(self):
if self._touched > 5:
self._touched = 5
elif self._touched > 0:
self._touched -= 1
if self._touched is 0:
self._monobridge._send('touch', 'off')
else:
self.schedule_message(2, self.check_touch)
"""overrides"""
def allow_updates(self, allow_updates):
for component in self.components:
component.set_allow_update(int(allow_updates!=0))
def set_device_component(self, device_component):
if self._device_component != None:
self._device_component._lock_callback = None
assert (device_component != None)
assert isinstance(device_component, DeviceComponent)
self._device_component = device_component
self._device_component._lock_callback = self._toggle_lock #old: self._device_component.set_lock_callback(self._toggle_lock)
if self._device_select_buttons != None:
for button in self._device_select_buttons:
button.send_value(self._device_select_buttons.index(button) == self._device.index(self._device_component))
self._update_device_selection()
return None
def _update_selected_device(self):
if self._device_selection_follows_track_selection is True:
self._update_device_selection()
return None
def _update_linked_device_selection(self, device):
#self.log_message('codec received ' + str(device.name))
if self._device_component != None and device != None:
if not self._device_component.is_locked():
self._device_component.set_device(device)
def _get_num_tracks(self):
return self.num_tracks
def _update_device_selection(self):
#self.log_message('_update_device_selection')
if self._device_component != None:
if not self._device_component.is_locked():
track = self.song().view.selected_track
device_to_select = track.view.selected_device
if ((device_to_select == None) and (len(track.devices) > 0)):
device_to_select = track.devices[0]
if (device_to_select != None):
self.song().view.select_device(device_to_select)
self._device_component.set_device(device_to_select)
def _channelstrip_mute_value(self, channelstrip):
def _mute_value(value):
if not self._shift_pressed:
self.log_message('shift not pressed')
ChannelStripComponent._mute_value(channelstrip, value)
return _mute_value
def _channelstrip_solo_value(self, channelstrip):
def _solo_value(value):
if not self._shift_pressed:
ChannelStripComponent._solo_value(channelstrip, value)
return _solo_value
def _mixer_next_track_value(self, mixer):
def _next_track_value(value):
if not self._shift_pressed:
MixerComponent._next_track_value(mixer, value)
return _next_track_value
def _mixer_prev_track_value(self, mixer):
def _prev_track_value(value):
if not self._shift_pressed:
MixerComponent._prev_track_value(mixer, value)
return _prev_track_value
class ParameterDefaultComponent(ControlSurfaceComponent):
__module__ = __name__
__doc__ = " MonoCode controller script "
def __init__(self, script):
"""everything except the '_on_selected_track_changed' override and 'disconnect' runs from here"""
ControlSurfaceComponent.__init__(self)
self._script = script
self._buttons = []
self._dials = []
def set_buttons(self, buttons):
for button in self._buttons:
if button.value_has_listener(self._value_to_default):
button.remove_value_listener(self._value_to_default)
self._buttons = buttons
for button in self._buttons:
button.add_value_listener(self._value_to_default, True)
def set_dials(self, dials):
assert(len(dials) == len(self._buttons))
self._dials = dials
def _value_to_default(self, value, sender):
if value > 0 and self._script._shift_pressed:
dial = self._dials[self._buttons.index(sender)]
if dial != None:
if dial.mapped_parameter() != None:
if hasattr(dial.mapped_parameter(), 'default_value'):
dial.mapped_parameter().value = dial.mapped_parameter().default_value
def update(self):
pass
def disconnect(self):
for button in self._buttons:
if button.value_has_listener(self._value_to_default):
button.remove_value_listener(self._value_to_default)
#
# | [
"aumhaa@gmail.com"
] | aumhaa@gmail.com |
018af1a310cbc34dede69b67c5ca5c27d53c1d36 | 46ea1dd3bec8a47e2a83f2aafc35b009f2dd34df | /gitea_api/models/edit_repo_option.py | 28ef30975f7469b4ab78c4aa9224979dea6eede3 | [] | no_license | Mari-W/gitea-api | 6a9140c573ce3d46ff6078a70521fc751b98a961 | 41b74e49466ebf7ca1b45b452cf937699d96ff3e | refs/heads/master | 2023-08-23T20:01:51.778259 | 2021-10-25T21:43:21 | 2021-10-25T21:43:21 | 407,828,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,851 | py | # coding: utf-8
"""
Gitea API.
This documentation describes the Gitea API. # noqa: E501
OpenAPI spec version: 1.15.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class EditRepoOption(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'allow_manual_merge': 'bool',
'allow_merge_commits': 'bool',
'allow_rebase': 'bool',
'allow_rebase_explicit': 'bool',
'allow_squash_merge': 'bool',
'archived': 'bool',
'autodetect_manual_merge': 'bool',
'default_branch': 'str',
'default_delete_branch_after_merge': 'bool',
'default_merge_style': 'str',
'description': 'str',
'external_tracker': 'ExternalTracker',
'external_wiki': 'ExternalWiki',
'has_issues': 'bool',
'has_projects': 'bool',
'has_pull_requests': 'bool',
'has_wiki': 'bool',
'ignore_whitespace_conflicts': 'bool',
'internal_tracker': 'InternalTracker',
'mirror_interval': 'str',
'name': 'str',
'private': 'bool',
'template': 'bool',
'website': 'str'
}
attribute_map = {
'allow_manual_merge': 'allow_manual_merge',
'allow_merge_commits': 'allow_merge_commits',
'allow_rebase': 'allow_rebase',
'allow_rebase_explicit': 'allow_rebase_explicit',
'allow_squash_merge': 'allow_squash_merge',
'archived': 'archived',
'autodetect_manual_merge': 'autodetect_manual_merge',
'default_branch': 'default_branch',
'default_delete_branch_after_merge': 'default_delete_branch_after_merge',
'default_merge_style': 'default_merge_style',
'description': 'description',
'external_tracker': 'external_tracker',
'external_wiki': 'external_wiki',
'has_issues': 'has_issues',
'has_projects': 'has_projects',
'has_pull_requests': 'has_pull_requests',
'has_wiki': 'has_wiki',
'ignore_whitespace_conflicts': 'ignore_whitespace_conflicts',
'internal_tracker': 'internal_tracker',
'mirror_interval': 'mirror_interval',
'name': 'name',
'private': 'private',
'template': 'template',
'website': 'website'
}
def __init__(self, allow_manual_merge=None, allow_merge_commits=None, allow_rebase=None, allow_rebase_explicit=None, allow_squash_merge=None, archived=None, autodetect_manual_merge=None, default_branch=None, default_delete_branch_after_merge=None, default_merge_style=None, description=None, external_tracker=None, external_wiki=None, has_issues=None, has_projects=None, has_pull_requests=None, has_wiki=None, ignore_whitespace_conflicts=None, internal_tracker=None, mirror_interval=None, name=None, private=None, template=None, website=None): # noqa: E501
"""EditRepoOption - a model defined in Swagger""" # noqa: E501
self._allow_manual_merge = None
self._allow_merge_commits = None
self._allow_rebase = None
self._allow_rebase_explicit = None
self._allow_squash_merge = None
self._archived = None
self._autodetect_manual_merge = None
self._default_branch = None
self._default_delete_branch_after_merge = None
self._default_merge_style = None
self._description = None
self._external_tracker = None
self._external_wiki = None
self._has_issues = None
self._has_projects = None
self._has_pull_requests = None
self._has_wiki = None
self._ignore_whitespace_conflicts = None
self._internal_tracker = None
self._mirror_interval = None
self._name = None
self._private = None
self._template = None
self._website = None
self.discriminator = None
if allow_manual_merge is not None:
self.allow_manual_merge = allow_manual_merge
if allow_merge_commits is not None:
self.allow_merge_commits = allow_merge_commits
if allow_rebase is not None:
self.allow_rebase = allow_rebase
if allow_rebase_explicit is not None:
self.allow_rebase_explicit = allow_rebase_explicit
if allow_squash_merge is not None:
self.allow_squash_merge = allow_squash_merge
if archived is not None:
self.archived = archived
if autodetect_manual_merge is not None:
self.autodetect_manual_merge = autodetect_manual_merge
if default_branch is not None:
self.default_branch = default_branch
if default_delete_branch_after_merge is not None:
self.default_delete_branch_after_merge = default_delete_branch_after_merge
if default_merge_style is not None:
self.default_merge_style = default_merge_style
if description is not None:
self.description = description
if external_tracker is not None:
self.external_tracker = external_tracker
if external_wiki is not None:
self.external_wiki = external_wiki
if has_issues is not None:
self.has_issues = has_issues
if has_projects is not None:
self.has_projects = has_projects
if has_pull_requests is not None:
self.has_pull_requests = has_pull_requests
if has_wiki is not None:
self.has_wiki = has_wiki
if ignore_whitespace_conflicts is not None:
self.ignore_whitespace_conflicts = ignore_whitespace_conflicts
if internal_tracker is not None:
self.internal_tracker = internal_tracker
if mirror_interval is not None:
self.mirror_interval = mirror_interval
if name is not None:
self.name = name
if private is not None:
self.private = private
if template is not None:
self.template = template
if website is not None:
self.website = website
@property
def allow_manual_merge(self):
"""Gets the allow_manual_merge of this EditRepoOption. # noqa: E501
either `true` to allow mark pr as merged manually, or `false` to prevent it. `has_pull_requests` must be `true`. # noqa: E501
:return: The allow_manual_merge of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._allow_manual_merge
@allow_manual_merge.setter
def allow_manual_merge(self, allow_manual_merge):
"""Sets the allow_manual_merge of this EditRepoOption.
either `true` to allow mark pr as merged manually, or `false` to prevent it. `has_pull_requests` must be `true`. # noqa: E501
:param allow_manual_merge: The allow_manual_merge of this EditRepoOption. # noqa: E501
:type: bool
"""
self._allow_manual_merge = allow_manual_merge
@property
def allow_merge_commits(self):
"""Gets the allow_merge_commits of this EditRepoOption. # noqa: E501
either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. `has_pull_requests` must be `true`. # noqa: E501
:return: The allow_merge_commits of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._allow_merge_commits
@allow_merge_commits.setter
def allow_merge_commits(self, allow_merge_commits):
"""Sets the allow_merge_commits of this EditRepoOption.
either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. `has_pull_requests` must be `true`. # noqa: E501
:param allow_merge_commits: The allow_merge_commits of this EditRepoOption. # noqa: E501
:type: bool
"""
self._allow_merge_commits = allow_merge_commits
@property
def allow_rebase(self):
"""Gets the allow_rebase of this EditRepoOption. # noqa: E501
either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. `has_pull_requests` must be `true`. # noqa: E501
:return: The allow_rebase of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._allow_rebase
@allow_rebase.setter
def allow_rebase(self, allow_rebase):
"""Sets the allow_rebase of this EditRepoOption.
either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. `has_pull_requests` must be `true`. # noqa: E501
:param allow_rebase: The allow_rebase of this EditRepoOption. # noqa: E501
:type: bool
"""
self._allow_rebase = allow_rebase
@property
def allow_rebase_explicit(self):
"""Gets the allow_rebase_explicit of this EditRepoOption. # noqa: E501
either `true` to allow rebase with explicit merge commits (--no-ff), or `false` to prevent rebase with explicit merge commits. `has_pull_requests` must be `true`. # noqa: E501
:return: The allow_rebase_explicit of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._allow_rebase_explicit
@allow_rebase_explicit.setter
def allow_rebase_explicit(self, allow_rebase_explicit):
"""Sets the allow_rebase_explicit of this EditRepoOption.
either `true` to allow rebase with explicit merge commits (--no-ff), or `false` to prevent rebase with explicit merge commits. `has_pull_requests` must be `true`. # noqa: E501
:param allow_rebase_explicit: The allow_rebase_explicit of this EditRepoOption. # noqa: E501
:type: bool
"""
self._allow_rebase_explicit = allow_rebase_explicit
@property
def allow_squash_merge(self):
"""Gets the allow_squash_merge of this EditRepoOption. # noqa: E501
either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. `has_pull_requests` must be `true`. # noqa: E501
:return: The allow_squash_merge of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._allow_squash_merge
@allow_squash_merge.setter
def allow_squash_merge(self, allow_squash_merge):
"""Sets the allow_squash_merge of this EditRepoOption.
either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. `has_pull_requests` must be `true`. # noqa: E501
:param allow_squash_merge: The allow_squash_merge of this EditRepoOption. # noqa: E501
:type: bool
"""
self._allow_squash_merge = allow_squash_merge
@property
def archived(self):
"""Gets the archived of this EditRepoOption. # noqa: E501
set to `true` to archive this repository. # noqa: E501
:return: The archived of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._archived
@archived.setter
def archived(self, archived):
"""Sets the archived of this EditRepoOption.
set to `true` to archive this repository. # noqa: E501
:param archived: The archived of this EditRepoOption. # noqa: E501
:type: bool
"""
self._archived = archived
@property
def autodetect_manual_merge(self):
"""Gets the autodetect_manual_merge of this EditRepoOption. # noqa: E501
either `true` to enable AutodetectManualMerge, or `false` to prevent it. `has_pull_requests` must be `true`, Note: In some special cases, misjudgments can occur. # noqa: E501
:return: The autodetect_manual_merge of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._autodetect_manual_merge
@autodetect_manual_merge.setter
def autodetect_manual_merge(self, autodetect_manual_merge):
"""Sets the autodetect_manual_merge of this EditRepoOption.
either `true` to enable AutodetectManualMerge, or `false` to prevent it. `has_pull_requests` must be `true`, Note: In some special cases, misjudgments can occur. # noqa: E501
:param autodetect_manual_merge: The autodetect_manual_merge of this EditRepoOption. # noqa: E501
:type: bool
"""
self._autodetect_manual_merge = autodetect_manual_merge
@property
def default_branch(self):
"""Gets the default_branch of this EditRepoOption. # noqa: E501
sets the default branch for this repository. # noqa: E501
:return: The default_branch of this EditRepoOption. # noqa: E501
:rtype: str
"""
return self._default_branch
@default_branch.setter
def default_branch(self, default_branch):
"""Sets the default_branch of this EditRepoOption.
sets the default branch for this repository. # noqa: E501
:param default_branch: The default_branch of this EditRepoOption. # noqa: E501
:type: str
"""
self._default_branch = default_branch
@property
def default_delete_branch_after_merge(self):
"""Gets the default_delete_branch_after_merge of this EditRepoOption. # noqa: E501
set to `true` to delete pr branch after merge by default # noqa: E501
:return: The default_delete_branch_after_merge of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._default_delete_branch_after_merge
@default_delete_branch_after_merge.setter
def default_delete_branch_after_merge(self, default_delete_branch_after_merge):
"""Sets the default_delete_branch_after_merge of this EditRepoOption.
set to `true` to delete pr branch after merge by default # noqa: E501
:param default_delete_branch_after_merge: The default_delete_branch_after_merge of this EditRepoOption. # noqa: E501
:type: bool
"""
self._default_delete_branch_after_merge = default_delete_branch_after_merge
@property
def default_merge_style(self):
"""Gets the default_merge_style of this EditRepoOption. # noqa: E501
set to a merge style to be used by this repository: \"merge\", \"rebase\", \"rebase-merge\", or \"squash\". `has_pull_requests` must be `true`. # noqa: E501
:return: The default_merge_style of this EditRepoOption. # noqa: E501
:rtype: str
"""
return self._default_merge_style
@default_merge_style.setter
def default_merge_style(self, default_merge_style):
"""Sets the default_merge_style of this EditRepoOption.
set to a merge style to be used by this repository: \"merge\", \"rebase\", \"rebase-merge\", or \"squash\". `has_pull_requests` must be `true`. # noqa: E501
:param default_merge_style: The default_merge_style of this EditRepoOption. # noqa: E501
:type: str
"""
self._default_merge_style = default_merge_style
@property
def description(self):
"""Gets the description of this EditRepoOption. # noqa: E501
a short description of the repository. # noqa: E501
:return: The description of this EditRepoOption. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this EditRepoOption.
a short description of the repository. # noqa: E501
:param description: The description of this EditRepoOption. # noqa: E501
:type: str
"""
self._description = description
@property
def external_tracker(self):
"""Gets the external_tracker of this EditRepoOption. # noqa: E501
:return: The external_tracker of this EditRepoOption. # noqa: E501
:rtype: ExternalTracker
"""
return self._external_tracker
@external_tracker.setter
def external_tracker(self, external_tracker):
"""Sets the external_tracker of this EditRepoOption.
:param external_tracker: The external_tracker of this EditRepoOption. # noqa: E501
:type: ExternalTracker
"""
self._external_tracker = external_tracker
@property
def external_wiki(self):
"""Gets the external_wiki of this EditRepoOption. # noqa: E501
:return: The external_wiki of this EditRepoOption. # noqa: E501
:rtype: ExternalWiki
"""
return self._external_wiki
@external_wiki.setter
def external_wiki(self, external_wiki):
"""Sets the external_wiki of this EditRepoOption.
:param external_wiki: The external_wiki of this EditRepoOption. # noqa: E501
:type: ExternalWiki
"""
self._external_wiki = external_wiki
@property
def has_issues(self):
"""Gets the has_issues of this EditRepoOption. # noqa: E501
either `true` to enable issues for this repository or `false` to disable them. # noqa: E501
:return: The has_issues of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._has_issues
@has_issues.setter
def has_issues(self, has_issues):
"""Sets the has_issues of this EditRepoOption.
either `true` to enable issues for this repository or `false` to disable them. # noqa: E501
:param has_issues: The has_issues of this EditRepoOption. # noqa: E501
:type: bool
"""
self._has_issues = has_issues
@property
def has_projects(self):
"""Gets the has_projects of this EditRepoOption. # noqa: E501
either `true` to enable project unit, or `false` to disable them. # noqa: E501
:return: The has_projects of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._has_projects
@has_projects.setter
def has_projects(self, has_projects):
"""Sets the has_projects of this EditRepoOption.
either `true` to enable project unit, or `false` to disable them. # noqa: E501
:param has_projects: The has_projects of this EditRepoOption. # noqa: E501
:type: bool
"""
self._has_projects = has_projects
@property
def has_pull_requests(self):
"""Gets the has_pull_requests of this EditRepoOption. # noqa: E501
either `true` to allow pull requests, or `false` to prevent pull request. # noqa: E501
:return: The has_pull_requests of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._has_pull_requests
@has_pull_requests.setter
def has_pull_requests(self, has_pull_requests):
"""Sets the has_pull_requests of this EditRepoOption.
either `true` to allow pull requests, or `false` to prevent pull request. # noqa: E501
:param has_pull_requests: The has_pull_requests of this EditRepoOption. # noqa: E501
:type: bool
"""
self._has_pull_requests = has_pull_requests
@property
def has_wiki(self):
"""Gets the has_wiki of this EditRepoOption. # noqa: E501
either `true` to enable the wiki for this repository or `false` to disable it. # noqa: E501
:return: The has_wiki of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._has_wiki
@has_wiki.setter
def has_wiki(self, has_wiki):
"""Sets the has_wiki of this EditRepoOption.
either `true` to enable the wiki for this repository or `false` to disable it. # noqa: E501
:param has_wiki: The has_wiki of this EditRepoOption. # noqa: E501
:type: bool
"""
self._has_wiki = has_wiki
@property
def ignore_whitespace_conflicts(self):
"""Gets the ignore_whitespace_conflicts of this EditRepoOption. # noqa: E501
either `true` to ignore whitespace for conflicts, or `false` to not ignore whitespace. `has_pull_requests` must be `true`. # noqa: E501
:return: The ignore_whitespace_conflicts of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._ignore_whitespace_conflicts
@ignore_whitespace_conflicts.setter
def ignore_whitespace_conflicts(self, ignore_whitespace_conflicts):
"""Sets the ignore_whitespace_conflicts of this EditRepoOption.
either `true` to ignore whitespace for conflicts, or `false` to not ignore whitespace. `has_pull_requests` must be `true`. # noqa: E501
:param ignore_whitespace_conflicts: The ignore_whitespace_conflicts of this EditRepoOption. # noqa: E501
:type: bool
"""
self._ignore_whitespace_conflicts = ignore_whitespace_conflicts
@property
def internal_tracker(self):
"""Gets the internal_tracker of this EditRepoOption. # noqa: E501
:return: The internal_tracker of this EditRepoOption. # noqa: E501
:rtype: InternalTracker
"""
return self._internal_tracker
@internal_tracker.setter
def internal_tracker(self, internal_tracker):
"""Sets the internal_tracker of this EditRepoOption.
:param internal_tracker: The internal_tracker of this EditRepoOption. # noqa: E501
:type: InternalTracker
"""
self._internal_tracker = internal_tracker
@property
def mirror_interval(self):
"""Gets the mirror_interval of this EditRepoOption. # noqa: E501
set to a string like `8h30m0s` to set the mirror interval time # noqa: E501
:return: The mirror_interval of this EditRepoOption. # noqa: E501
:rtype: str
"""
return self._mirror_interval
@mirror_interval.setter
def mirror_interval(self, mirror_interval):
"""Sets the mirror_interval of this EditRepoOption.
set to a string like `8h30m0s` to set the mirror interval time # noqa: E501
:param mirror_interval: The mirror_interval of this EditRepoOption. # noqa: E501
:type: str
"""
self._mirror_interval = mirror_interval
@property
def name(self):
"""Gets the name of this EditRepoOption. # noqa: E501
name of the repository # noqa: E501
:return: The name of this EditRepoOption. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this EditRepoOption.
name of the repository # noqa: E501
:param name: The name of this EditRepoOption. # noqa: E501
:type: str
"""
self._name = name
@property
def private(self):
"""Gets the private of this EditRepoOption. # noqa: E501
either `true` to make the repository private or `false` to make it public. Note: you will get a 422 error if the organization restricts changing repository visibility to organization owners and a non-owner tries to change the value of private. # noqa: E501
:return: The private of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._private
@private.setter
def private(self, private):
"""Sets the private of this EditRepoOption.
either `true` to make the repository private or `false` to make it public. Note: you will get a 422 error if the organization restricts changing repository visibility to organization owners and a non-owner tries to change the value of private. # noqa: E501
:param private: The private of this EditRepoOption. # noqa: E501
:type: bool
"""
self._private = private
@property
def template(self):
"""Gets the template of this EditRepoOption. # noqa: E501
either `true` to make this repository a template or `false` to make it a normal repository # noqa: E501
:return: The template of this EditRepoOption. # noqa: E501
:rtype: bool
"""
return self._template
@template.setter
def template(self, template):
"""Sets the template of this EditRepoOption.
either `true` to make this repository a template or `false` to make it a normal repository # noqa: E501
:param template: The template of this EditRepoOption. # noqa: E501
:type: bool
"""
self._template = template
@property
def website(self):
"""Gets the website of this EditRepoOption. # noqa: E501
a URL with more information about the repository. # noqa: E501
:return: The website of this EditRepoOption. # noqa: E501
:rtype: str
"""
return self._website
@website.setter
def website(self, website):
"""Sets the website of this EditRepoOption.
a URL with more information about the repository. # noqa: E501
:param website: The website of this EditRepoOption. # noqa: E501
:type: str
"""
self._website = website
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(EditRepoOption, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EditRepoOption):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"weidner@cs.uni-freiburg.de"
] | weidner@cs.uni-freiburg.de |
dbb2290f7d66af10b8f53b033c52e9ac8f65550c | efc6aacf1fb6585a51f2da5a4f0a8c5c8bf51309 | /bees/urls.py | e9c8d52de564cdec60b575d1d185e208afb30e72 | [] | no_license | redlolgeerf/diary_bees | b7d078985c6d9656e109658fb487044d5bc3cd13 | 298bb8b1888170d44241543090b0483cef016de7 | refs/heads/master | 2021-01-22T02:08:40.647155 | 2014-11-28T16:14:31 | 2014-11-28T16:14:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | from django.conf.urls import patterns, url
from bees.views import index, HiveView, SettingsView, authorize
urlpatterns = patterns('',
url(r'^beekeeper/$', SettingsView.as_view(), name='beekeeper-settings'),
url(r'^hive/(?P<d_id>\d+)/$', HiveView.as_view(), name='hive-detail'),
url(r'^hive/$', HiveView.as_view(), name='hive-detail'),
url(r'^authorize/$', authorize, name='authorize'),
url(r'^$', index, name='index'),
)
| [
"mihanne@list.ru"
] | mihanne@list.ru |
89e02d0b19555283c8fd6c2acd38e65070c2d821 | 3830901da6fbe95b5bdb27600a09290795d65526 | /ex35_4.py | 746302348364caa61d717aca87bb471c71139390 | [] | no_license | SergeyErmolaev/A-game | 0a4b2b9ef181182f9403c15b540020f06f4ecf2b | 38f5f0baad5b491b753a4b3886e5827e69f7bb32 | refs/heads/master | 2020-07-16T11:13:51.115606 | 2019-09-02T04:53:40 | 2019-09-02T04:53:40 | 205,779,075 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | from sys import exit
def wonderland():
print("You see a white rabbit with a clock.")
print("will you follow it or walk your ways?")
def trough_looking_glass():
print("")
def start():
print("You are in a strange room with a lot of doors. All of them are closed")
print("There is a table in the room with a key, that fits only to 15 inches door, a small bottle with unknown liquid and pie.")
print("Which one will you take: bottle or pie?")
choice = input("> ")
if choice == "bottle":
wonderland()
elif choice == "pie"
through_looking_glass()
else:
dead("You stumble around the room until you starve.")
start()
| [
"noreply@github.com"
] | SergeyErmolaev.noreply@github.com |
84fc828592eda1cf264622c467dbddf6b11861f2 | 65b15e9da9b5b88e9ce248f06bb011663ebdf22b | /get_post_id.py | 228b1f350ae5d86548addfd112a399ad8e28bcb7 | [] | no_license | JYOTITHAKUROSEM/instabot | 8ad7f82d602edd03efd83204ee50a17947b57651 | 0043aadc78345183e7d82ca0b4576b29373af9ac | refs/heads/master | 2020-12-02T16:14:32.582068 | 2017-07-11T09:39:12 | 2017-07-11T09:39:12 | 96,522,634 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 803 | py | import requests
from get_user_id import get_user_id
from constants import BASE_URL,APP_ACCESS_TOKEN
def get_post_id(insta_user_name):
#Function Logic Here
user_id = get_user_id(insta_user_name)
if user_id==None:
print "user does not exist"
exit()
request_url = (BASE_URL + 'users/%s/media/recent/?access_token=%s') % (user_id,APP_ACCESS_TOKEN)
print 'GET request url : %s' % (request_url)
user_media=requests.get(request_url).json()
if user_media['meta']['code']==200:
if len(user_media['data']):
return user_media['data'][0]['id']
else:
print 'There is no recent post of the user'
exit()
else:
print "Status code other than 200 received!"
exit()
#get_post_id(insta_user_name="sharmatanu9878") | [
"jyotithakur15111@gmail.com"
] | jyotithakur15111@gmail.com |
66b89f7251d24d6189abc0efdf2662fcd772dbfb | 440a833e8a2f87a433ce980ea9774a7e56e9a99f | /handlers/admin.py | df2b323410463111f2b63d1ecdb86530cf486515 | [] | no_license | lonelywolf1981/reset_pass_bot | f5f603cc12b6f867b9c603dff16991ca437ada31 | b5a5802aa021bf539a3f27514ab39d54f21e4251 | refs/heads/master | 2023-08-02T18:49:35.383027 | 2021-10-06T04:15:30 | 2021-10-06T04:15:30 | 413,770,678 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,096 | py | from aiogram.dispatcher.filters import Text
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters.state import State, StatesGroup
from aiogram import types, Dispatcher
from create_bot import dp
from connection.connection import connect_to_ad, user_search_ad, reset_pass
class FSMAdmin(StatesGroup):
admin_user = State()
admin_passwd = State()
user = State()
# @dp.message_handler(commands='сброс', state=None)
async def cm_reset(message: types.Message):
await FSMAdmin.admin_user.set()
await message.reply('Введите логин админа')
@dp.message_handler(state="*", commands='отмена')
@dp.message_handler(Text(equals='отмена', ignore_case=True), state="*")
async def cancel_handler(message: types.Message, state: FSMContext):
current_state = await state.get_state()
if current_state is None:
return
await state.finish()
await message.reply('OK')
# @dp.message_handler(state=FSMAdmin.admin_user)
async def admin_login(message: types.Message, state: FSMContext):
async with state.proxy() as data:
data['admin_user'] = message.text
await FSMAdmin.next()
await message.reply('Теперь введи пароль админа')
# @dp.message_handler(state=FSMAdmin.admin_passwd)
async def admin_passwd(message: types.Message, state: FSMContext):
async with state.proxy() as data:
data['admin_passwd'] = message.text
await FSMAdmin.next()
await message.reply('Введи логин юзера для сброса пароля')
# @dp.message_handler(state=FSMAdmin.user)
async def user_search(message: types.Message, state: FSMContext):
async with state.proxy() as data:
data['user'] = message.text
await message.reply('Все данные приняты')
conn = connect_to_ad(data['admin_user'], data['admin_passwd'])
if conn is None or not conn.bind():
await message.answer('Нет соединения с сервером AD')
await state.finish()
else:
await message.answer('Соединение с сервером установлено')
user = user_search_ad(data['user'], conn)
if not user:
await message.answer('Нет такого юзера')
await state.finish()
else:
await message.answer(f'Пользователь найден\n{user}')
if reset_pass(user, conn):
await state.finish()
await message.answer('Пароль сброшен')
else:
await state.finish()
await message.answer('Что-то пошло не так')
def register_handlers_admin(dp: Dispatcher):
dp.register_message_handler(cm_reset, commands=['сброс'], state=None)
dp.register_message_handler(admin_login, state=FSMAdmin.admin_user)
dp.register_message_handler(admin_passwd, state=FSMAdmin.admin_passwd)
dp.register_message_handler(user_search, state=FSMAdmin.user)
| [
"andrey.lw@gmail.com"
] | andrey.lw@gmail.com |
f92b6125584546b73186b46cb9b7c5b388fb3f55 | 81734061c92b631789b00730e6cfae95a03aec73 | /src/helpcrew/helpdesk/migrations/0037_auto_20171029_2242.py | 415453cddc118df0c69dd3afd70854873c1264fd | [] | no_license | slagovskiy/helpcrew.ru | 311105b395b4c80a4d2f95929d75f993c03fae34 | 9ab2e68ee2a2f0afe406cffb3b805756e25a6386 | refs/heads/master | 2023-01-11T07:42:04.693325 | 2021-08-04T04:45:15 | 2021-08-04T04:45:15 | 96,231,296 | 1 | 0 | null | 2023-01-04T05:03:06 | 2017-07-04T15:21:36 | JavaScript | UTF-8 | Python | false | false | 784 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-29 15:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('helpdesk', '0036_auto_20171029_2229'),
]
operations = [
migrations.AddField(
model_name='crewusers',
name='task_filter',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='crewusers',
name='task_page_size',
field=models.IntegerField(default=100),
),
migrations.AddField(
model_name='crewusers',
name='task_paging',
field=models.BooleanField(default=True),
),
]
| [
"slagovskiy@gmail.com"
] | slagovskiy@gmail.com |
0fe026e4a9fe70a6b17ede1085a5c7c08da2b81d | 4fae5a8f65d0cc656b769799585e5463c9a87851 | /pipeline.py | 690670e39acfb5803d17d9f011a133158f342b4c | [] | no_license | davidmaamoaix/traffic-flow-det | 1fec1023c2d5c472dc66c4a5aaa372a839089966 | 43298fa34035fadf89093781ac3d51efab835adb | refs/heads/master | 2023-05-09T22:48:11.676460 | 2021-06-05T09:57:54 | 2021-06-05T09:57:54 | 342,734,991 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,594 | py | import os
import cv2
import numpy as np
from PIL import Image
import config
from misc import output_stream, load_yolo, reverse_projection, distance
from tools import generate_detections
from deep_sort import nn_matching, tracker, detection
from filter import FilterPID
MARKERS = (
(689, 412),
(766, 408),
(713, 517),
(836, 512)
)
ROAD_WIDTH = distance(MARKERS[0], MARKERS[1])
print(reverse_projection(10, 10, MARKERS))
def run(video_path):
reader = cv2.VideoCapture(video_path)
writer = output_stream(reader)
fps = reader.get(cv2.CAP_PROP_FPS)
if not os.path.exists('output'):
os.mkdir('output')
model = load_yolo(
config.YOLO_WEIGHTS,
config.YOLO_CONFIG,
config.YOLO_CLASSES,
{}
)
metric = nn_matching.NearestNeighborDistanceMetric('cosine', 0.3)
tracking = tracker.Tracker(metric)
encoder = generate_detections.create_box_encoder(
config.ENCODER_PATH,
batch_size=1
)
conf = {
'show_img': True,
'fps': fps
}
session = Session(model, encoder, tracking, writer, conf)
cv2.imwrite("frame1.jpg", reader.read()[1])
while True:
ret, frame = reader.read()
if not ret:
break
process_frame(frame, session)
if session.conf.get('show_img', False) and cv2.waitKey(1) & 0xFF == 27:
break
reader.release()
writer.release()
def process_frame(frame, session):
boxes, labels = session.model.forward(frame)
#for box, label in zip(boxes, labels):
# x, y, w, h = box
# cv2.rectangle(frame, (x, y), (x + w, y + h), (255, 255, 255), 1)
extracts = session.encoder(frame, boxes)
# TODO: retain confidence from yolo
session.tracking.predict()
session.tracking.update([
detection.Detection(box, 0.75, encoded)
for box, encoded in zip(boxes, extracts)
])
color = (255, 255, 255)
for tracked in session.tracking.tracks:
x, y, x_end, y_end = tuple(map(int, tracked.to_tlbr()))
cv2.rectangle(frame, (x, y), (x_end, y_end), color, 1)
cv2.putText(frame, str(tracked.track_id), (x, y), 0, 0.5, color, 1)
x_world, y_world = reverse_projection(
(x + x_end) / 2,
(y + y_end) / 2,
MARKERS
)
if tracked.track_id not in session.speed:
controller = FilterPID(0.52, 0.103, 50)
session.speed[tracked.track_id] = (x_world, y_world, controller)
x_prev, y_prev, pid_controller = session.speed[tracked.track_id]
session.speed[tracked.track_id] = x_world, y_world, pid_controller
dist = distance((x_prev, y_prev), (x_world, y_world))
prev_frame = session.last_frame.get(
tracked.track_id,
session.counter - 1
)
delta_time = session.counter - prev_frame
color = (255, 0, 0)
speed = 3.6 * dist * session.conf.get('fps', 30) / delta_time / 500
speed = pid_controller.update(speed)
cv2.putText(frame, str(speed), (x + 30, y), 0, 0.5, color, 1)
if session.conf.get('show_img', False):
cv2.imshow('img', frame)
session.counter += 1
session.writer.write(frame)
class Session:
def __init__(self, model, encoder, tracking, writer, conf):
self.model = model
self.encoder = encoder
self.tracking = tracking
self.speed = {}
self.writer = writer
self.conf = conf
self.last_frame = {}
self.filter = {} # used for PID
self.counter = 0 | [
"davidmaamoaix@gmail.com"
] | davidmaamoaix@gmail.com |
3693f031ca378e06b7c5b4fea8e8476491558a80 | 3640947f56ad3416ebf1559cdb30a3953f17fc25 | /Blog/Blog/dbsqlite/migrations/0002_auto_20190929_0902.py | 9d5397d62d0637016d33e71a466688ececc8ccd6 | [] | no_license | jiaopanxin/projecttest | 7e08b3812cc7998b7931b7704fd105867ea4e900 | d0a0a8e0b2bdbe6babe1209031b6b2c78b99a730 | refs/heads/master | 2020-08-10T09:09:54.638359 | 2019-12-02T11:50:51 | 2019-12-02T11:50:51 | 214,312,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 616 | py | # Generated by Django 2.1.5 on 2019-09-29 09:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dbsqlite', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='usersprofile',
name='email',
field=models.CharField(max_length=128, unique=True, verbose_name='邮箱'),
),
migrations.AlterField(
model_name='usersprofile',
name='mobile',
field=models.CharField(max_length=11, unique=True, verbose_name='手机号'),
),
]
| [
"j991114@126.com"
] | j991114@126.com |
48c3cade80e8099c3b1dace297f77e3af95e8b87 | 6827c370a0ac861700b187db7a0d46d2c12ed706 | /test_selenuim/test_login/test_shelve.py | f703c0977a86f87b37bcca1fc2b719d463b12992 | [] | no_license | cyg2695249540/WebWeWork | 202a4bcd0339e1207ff0c0416e36cef0bf163990 | 957599dca6c1b86fa3f4e9cbdf288767265e3492 | refs/heads/master | 2022-12-30T06:35:53.244259 | 2020-10-20T07:14:13 | 2020-10-20T07:14:13 | 304,596,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,551 | py | # !/usr/bin/env Python3
# -*- coding: utf-8 -*-
# @FILE : test_shelve.py
# @Author : Pluto.
# @Time : 2020/10/17 11:47
"""
使用shelve小型数据库实现cookie持久化存储
"""
import shelve
from time import sleep
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
class TestShelve():
def setup_method(self):
# #复用网页获取cookie
# option = Options()
# option.debugger_address = "127.0.0.1:9222"
# self.driver = webdriver.Chrome(options=option)
self.driver =webdriver.Chrome()
# 全局隐式等待
self.driver.implicitly_wait(5)
def teardown_method(self):
self.driver.quit()
def test_importContacts(self):
# 点击导入通讯录
self.driver.find_element(By.CSS_SELECTOR, ".index_service_cnt_itemWrap:nth-child(2)").click()
# 点击并添加上传文件
self.driver.find_element(By.XPATH,
"//input[@class='ww_fileImporter_fileContainer_uploadInputMask']").send_keys(
"C:/Users/uiui/Desktop/txl.xlsx")
# 验证上传文件是否正确
assert "txl.xlsx" == self.driver.find_element(By.XPATH,
"//div[@class='ww_fileImporter_fileContainer_fileNames']").text
sleep(3)
def test_shelve(self):
# # #获取cookies
# cookies = self.driver.get_cookies()
# db = shelve.open("./mydbs/cookies")
# db["cookie"] = cookies
# db.close()
db = shelve.open("./mydbs/cookies")
cookies=db["cookie"]
self.driver.get("https://work.weixin.qq.com/wework_admin/frame")
for cookie in cookies:
if "expiry" in cookie.keys():
cookie.pop("expiry")
self.driver.add_cookie(cookie)
self.driver.get("https://work.weixin.qq.com/wework_admin/frame")
# 点击导入通讯录
self.driver.find_element(By.CSS_SELECTOR, ".index_service_cnt_itemWrap:nth-child(2)").click()
# 点击并添加上传文件
self.driver.find_element(By.XPATH, "//input[@class='ww_fileImporter_fileContainer_uploadInputMask']").send_keys(
"C:/Users/uiui/Desktop/txl.xlsx")
# 验证上传文件是否正确
assert "txl.xlsx" == self.driver.find_element(By.XPATH,
"//div[@class='ww_fileImporter_fileContainer_fileNames']").text
sleep(3)
| [
"2695249540@qq.com"
] | 2695249540@qq.com |
20a11dfee662c80836131743d05ada307a58b06d | 8de3115a720f4534d59f0b8f438fce49996d114d | /problem002/002.py | a267f262bf85ceb41cd38e1513444a0a3fd06406 | [] | no_license | rferdowsi/learn-python | 19ee6952d63f408b91a20ea5224e1bd4244715de | c1da13bbc97da864bf02e2fe1b5887ffbea08814 | refs/heads/master | 2022-12-12T10:49:54.671859 | 2019-11-21T17:42:20 | 2019-11-21T17:42:20 | 217,283,474 | 0 | 0 | null | 2022-12-08T06:55:33 | 2019-10-24T11:32:46 | Python | UTF-8 | Python | false | false | 1,779 | py | import pymysql
import datetime
import settings
import os
def _open_db_connection():
# Connect to the database
connection = pymysql.connect(host=os.getenv("DB_HOST"),
user=os.getenv("DB_USER"),
password=os.getenv("DB_PASSWORD"),
db=os.getenv("DB_DBNAME"),
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
return connection
def _close_db_connection(connection):
connection.close()
def show_all_coupons():
connection = _open_db_connection()
try:
with connection.cursor() as cursor:
sql = "SELECT * FROM `coupons`"
cursor.execute(sql)
rows = cursor.fetchall()
for row in rows:
print(row)
finally:
_close_db_connection(connection)
"""
reads coupon details from the database.
"""
def read_one(coupon_code):
connection = _open_db_connection()
try:
with connection.cursor() as cursor:
sql = "SELECT * FROM `coupons` WHERE code = %s"
cursor.execute(sql, (coupon_code))
row = cursor.fetchone()
return row
finally:
_close_db_connection(connection)
def is_valid_coupon(coupon_code):
coupon = read_one(coupon_code)
if coupon == None:
raise Exception('coupon_code does not exist')
today = datetime.datetime.now()
return (today >= coupon['start_date']) & (today <= coupon['end_date'])
#================================
# load_dotenv()
# show_all_coupons()
# print(read_one('blah'))
print(read_one('code1'))
# print(is_valid_coupon('code1'))
# print(is_valid_coupon('code2'))
# is_valid_coupon('code3')
| [
"rudaba.ferdowsi@gmail.com"
] | rudaba.ferdowsi@gmail.com |
7d73bde2f3ea9249ed760f365bd508f49df2c1b9 | c50083e356b976006639c96f801fa658c472e293 | /src/skills/models.py | a7fccbdf2ee54cc6bc99e5e7195290baca7e3f18 | [] | no_license | vladstan/QuestHunt | 486d2bb5326e6e4747a6c33b4714b4ea5064995a | 5d45ef12eea3e9b0e773032afcd0265136196d7f | refs/heads/master | 2021-06-15T18:01:36.748689 | 2017-03-02T12:13:49 | 2017-03-02T12:13:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | from django.db import models
# Create your models here.
class Skill(models.Model):
name =
description =
slug =
icon =
class TribePointsAward(models.Model):
hero = models.ForeignKey(User)
quest =
points =
tribe =
class DestinationPointsAward(models.Model):
hero = models.ForeignKey(User)
quest =
points =
tribe =
| [
"vlad@futuristico.io"
] | vlad@futuristico.io |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.