blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cec42bc5df865c7e99d23024fa4c71a6f7db99d8
|
32fb6fd06b496b4c9ceabe578dceed265950cc37
|
/homework/core/models/meta/base.py
|
b45e9d7de1fe1813e6d37480dcef6702e9545bf9
|
[] |
no_license
|
rach/homework
|
8167d3930d4313818e306fb0965ffbd6402bf12b
|
aca450753445caa188675d637300ead443d15525
|
refs/heads/master
| 2021-01-10T04:50:53.857108
| 2016-01-11T21:13:38
| 2016-01-11T21:13:38
| 49,445,928
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 841
|
py
|
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy import event
from sqlalchemy import (
Column,
Integer
)
import re
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
def _camel_to_snake(s):
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
class Base(object):
@declared_attr
def __tablename__(cls):
return _camel_to_snake(cls.__name__)
id = Column(Integer, primary_key=True)
Base = declarative_base(cls=Base)
def create_dbsession(engine):
dbsession = scoped_session(sessionmaker())
dbsession.configure(bind=engine)
Base.metadata.bind = engine
return dbsession
|
[
"rachid.belaid@gmail.com"
] |
rachid.belaid@gmail.com
|
f6c327232f55a5253a539568cc9c8d10d656384d
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02686/s642611525.py
|
914bb9607791cee5d353d156d9afb343faf395b3
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 981
|
py
|
def main():
N = int(input())
up_lines = []
down_lines = []
for i in range(N):
s = input()
height = 0
bottom = 0
for c in s:
if c == "(":
height += 1
else:
height -= 1
bottom = min(bottom, height)
if height > 0:
up_lines.append((bottom, height))
else:
down_lines.append((bottom-height, -height))
up_lines.sort(reverse=True, key=lambda line: line[0])
down_lines.sort(reverse=True, key=lambda line: line[0])
left = 0
for bottom, height in up_lines:
if left + bottom < 0:
print("No")
return
left += height
right = 0
for bottom, height in down_lines:
if right + bottom < 0:
print("No")
return
right += height
if left == right:
print("Yes")
else:
print("No")
if __name__ == "__main__":
main()
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
92a5da607045107bbee7fd23ac0e9e1ec54453da
|
0d2811497b377bc3530c3ddc10f4e948ef3ab902
|
/launcher/common/Keys.py
|
c7b1c24ad592f0a93219e61683c4b41f827cacc2
|
[
"BSD-3-Clause"
] |
permissive
|
liblit/sampler
|
a475b44d2a257bc9a2cf93bb5d04e94abc9d15b2
|
eaedba51ee8367b9b355e6f85a6c677878160d49
|
refs/heads/master
| 2021-04-24T21:30:53.227637
| 2018-10-08T02:00:59
| 2018-10-08T02:00:59
| 117,015,273
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 287
|
py
|
ASKED = 'asked'
MASTER = 'enabled'
def settings():
# always use "schema_id" once Fedora 20 is no longer supported
import gi
key = 'schema_id' if gi.version_info >= (3, 14) else 'schema'
from gi.repository import Gio
return Gio.Settings(**{key: 'edu.wisc.cs.cbi'})
|
[
"liblit@cs.wisc.edu"
] |
liblit@cs.wisc.edu
|
d7dc7236ef3c00feb2d661cbb1f257e5ad610ec1
|
7a93b7817b343b9da49662948e922684b94d1d40
|
/jstorm-client/src/main/py/storm/Nimbus-remote
|
2aae8a4fa0862207f6473a1653963e9e2abfbd00
|
[
"Apache-2.0"
] |
permissive
|
caiyifeng/learn_jstorm
|
c2aeeb9dd6d29ddf4c7593347b537e049fa57b4a
|
61d27beea51a5c37e00193ad4c464b58ded156c1
|
refs/heads/master
| 2020-12-26T21:49:50.380442
| 2015-05-03T14:43:19
| 2015-05-03T14:43:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,717
|
#!/usr/bin/env python
#
# Autogenerated by Thrift Compiler (0.7.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
import sys
import pprint
from urlparse import urlparse
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.transport import THttpClient
from thrift.protocol import TBinaryProtocol
import Nimbus
from ttypes import *
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
print ''
print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
print ''
print 'Functions:'
print ' void submitTopology(string name, string uploadedJarLocation, string jsonConf, StormTopology topology)'
print ' void submitTopologyWithOpts(string name, string uploadedJarLocation, string jsonConf, StormTopology topology, SubmitOptions options)'
print ' void killTopology(string name)'
print ' void killTopologyWithOpts(string name, KillOptions options)'
print ' void activate(string name)'
print ' void deactivate(string name)'
print ' void rebalance(string name, RebalanceOptions options)'
print ' void metricMonitor(string name, MonitorOptions options)'
print ' void beginLibUpload(string libName)'
print ' string beginFileUpload()'
print ' void uploadChunk(string location, string chunk)'
print ' void finishFileUpload(string location)'
print ' string beginFileDownload(string file)'
print ' string downloadChunk(string id)'
print ' string getNimbusConf()'
print ' ClusterSummary getClusterInfo()'
print ' TopologyInfo getTopologyInfo(string id)'
print ' SupervisorWorkers getSupervisorWorkers(string host)'
print ' string getTopologyConf(string id)'
print ' StormTopology getTopology(string id)'
print ' StormTopology getUserTopology(string id)'
print ' TopologyMetricInfo getTopologyMetric(string id)'
print ''
sys.exit(0)
pp = pprint.PrettyPrinter(indent = 2)
host = 'localhost'
port = 9090
uri = ''
framed = False
http = False
argi = 1
if sys.argv[argi] == '-h':
parts = sys.argv[argi+1].split(':')
host = parts[0]
port = int(parts[1])
argi += 2
if sys.argv[argi] == '-u':
url = urlparse(sys.argv[argi+1])
parts = url[1].split(':')
host = parts[0]
if len(parts) > 1:
port = int(parts[1])
else:
port = 80
uri = url[2]
if url[4]:
uri += '?%s' % url[4]
http = True
argi += 2
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
framed = True
argi += 1
cmd = sys.argv[argi]
args = sys.argv[argi+1:]
if http:
transport = THttpClient.THttpClient(host, port, uri)
else:
socket = TSocket.TSocket(host, port)
if framed:
transport = TTransport.TFramedTransport(socket)
else:
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = Nimbus.Client(protocol)
transport.open()
if cmd == 'submitTopology':
if len(args) != 4:
print 'submitTopology requires 4 args'
sys.exit(1)
pp.pprint(client.submitTopology(args[0],args[1],args[2],eval(args[3]),))
elif cmd == 'submitTopologyWithOpts':
if len(args) != 5:
print 'submitTopologyWithOpts requires 5 args'
sys.exit(1)
pp.pprint(client.submitTopologyWithOpts(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
elif cmd == 'killTopology':
if len(args) != 1:
print 'killTopology requires 1 args'
sys.exit(1)
pp.pprint(client.killTopology(args[0],))
elif cmd == 'killTopologyWithOpts':
if len(args) != 2:
print 'killTopologyWithOpts requires 2 args'
sys.exit(1)
pp.pprint(client.killTopologyWithOpts(args[0],eval(args[1]),))
elif cmd == 'activate':
if len(args) != 1:
print 'activate requires 1 args'
sys.exit(1)
pp.pprint(client.activate(args[0],))
elif cmd == 'deactivate':
if len(args) != 1:
print 'deactivate requires 1 args'
sys.exit(1)
pp.pprint(client.deactivate(args[0],))
elif cmd == 'rebalance':
if len(args) != 2:
print 'rebalance requires 2 args'
sys.exit(1)
pp.pprint(client.rebalance(args[0],eval(args[1]),))
elif cmd == 'metricMonitor':
if len(args) != 2:
print 'metricMonitor requires 2 args'
sys.exit(1)
pp.pprint(client.metricMonitor(args[0],eval(args[1]),))
elif cmd == 'beginLibUpload':
if len(args) != 1:
print 'beginLibUpload requires 1 args'
sys.exit(1)
pp.pprint(client.beginLibUpload(args[0],))
elif cmd == 'beginFileUpload':
if len(args) != 0:
print 'beginFileUpload requires 0 args'
sys.exit(1)
pp.pprint(client.beginFileUpload())
elif cmd == 'uploadChunk':
if len(args) != 2:
print 'uploadChunk requires 2 args'
sys.exit(1)
pp.pprint(client.uploadChunk(args[0],args[1],))
elif cmd == 'finishFileUpload':
if len(args) != 1:
print 'finishFileUpload requires 1 args'
sys.exit(1)
pp.pprint(client.finishFileUpload(args[0],))
elif cmd == 'beginFileDownload':
if len(args) != 1:
print 'beginFileDownload requires 1 args'
sys.exit(1)
pp.pprint(client.beginFileDownload(args[0],))
elif cmd == 'downloadChunk':
if len(args) != 1:
print 'downloadChunk requires 1 args'
sys.exit(1)
pp.pprint(client.downloadChunk(args[0],))
elif cmd == 'getNimbusConf':
if len(args) != 0:
print 'getNimbusConf requires 0 args'
sys.exit(1)
pp.pprint(client.getNimbusConf())
elif cmd == 'getClusterInfo':
if len(args) != 0:
print 'getClusterInfo requires 0 args'
sys.exit(1)
pp.pprint(client.getClusterInfo())
elif cmd == 'getTopologyInfo':
if len(args) != 1:
print 'getTopologyInfo requires 1 args'
sys.exit(1)
pp.pprint(client.getTopologyInfo(args[0],))
elif cmd == 'getSupervisorWorkers':
if len(args) != 1:
print 'getSupervisorWorkers requires 1 args'
sys.exit(1)
pp.pprint(client.getSupervisorWorkers(args[0],))
elif cmd == 'getTopologyConf':
if len(args) != 1:
print 'getTopologyConf requires 1 args'
sys.exit(1)
pp.pprint(client.getTopologyConf(args[0],))
elif cmd == 'getTopology':
if len(args) != 1:
print 'getTopology requires 1 args'
sys.exit(1)
pp.pprint(client.getTopology(args[0],))
elif cmd == 'getUserTopology':
if len(args) != 1:
print 'getUserTopology requires 1 args'
sys.exit(1)
pp.pprint(client.getUserTopology(args[0],))
elif cmd == 'getTopologyMetric':
if len(args) != 1:
print 'getTopologyMetric requires 1 args'
sys.exit(1)
pp.pprint(client.getTopologyMetric(args[0],))
else:
print 'Unrecognized method %s' % cmd
sys.exit(1)
transport.close()
|
[
"songtk@msn.com"
] |
songtk@msn.com
|
|
ebcfe501255bb644caa92394017c550197a10ee4
|
d27d98fc322ea3b29b77fdf0a8751bc6a6e4355a
|
/python/20/ccc20j4.py
|
09d8866cfee2b5c41d870b92254d90a5edccd997
|
[
"MIT"
] |
permissive
|
sadmanca/ccc-solutions
|
9e48279879241b94e746c7ce1c202d66ce5c46f4
|
1ab0d1d7a59eaf0f6b231208a5f42a5eb364caaa
|
refs/heads/master
| 2023-03-05T17:19:18.776037
| 2021-02-17T00:40:30
| 2021-02-17T00:40:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 316
|
py
|
# COMPLETE
t = input()
s = input()
l = len(s)
shifts = set()
shifts.add(s)
for i in range(l-1):
s = s[1:]+s[0]
shifts.add(s)
cyc = False
for string in shifts:
for i in range(0, len(t)-l+1):
if string == t[i:i+l]:
cyc = True
if cyc == True:
print("yes")
else:
print("no")
|
[
"41028402+Sadman-Hossain@users.noreply.github.com"
] |
41028402+Sadman-Hossain@users.noreply.github.com
|
5d00e7dd24ff76d035474abbf3f113bf88deb4cc
|
cb82718999848e7ab557b6877d40c079916d065a
|
/gen_trips.py
|
b17191fbed4c9fd60f31a3d9421639b2a40469c9
|
[
"Apache-2.0"
] |
permissive
|
divergent63/simple_shortest_routing
|
e84d1b6659a7f19436f3d9125534075b39a274e1
|
f6719ad1fb0a0fdd5916bece62edbed82a0ef899
|
refs/heads/master
| 2020-05-20T22:29:29.099605
| 2019-05-15T12:26:26
| 2019-05-15T12:26:26
| 185,783,420
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,456
|
py
|
#!/usr/bin/env python
# coding=utf-8
"""
input: OD information(vehicle information)
output: SUMO Trips
"""
from lxml import etree
from pathlib import Path
import os
import pandas as pd
def gen_trips(od):
start_time = od['start_time'].values
root = etree.Element("routes")
for i in range(len(od.values)):
veh_id = str(i)
route = od['route'].values[i]
route = route.split("'")
route_list = []
for j in range(len(route)):
if len(route[j]) > 3:
route_list.append(route[j])
if len(route_list) == 4:
route = str(route_list[0]) + ' ' + str(route_list[1]) + str(' ') + str(route_list[2]) + ' ' + str(route_list[3])
if len(route_list) == 3:
route = str(route_list[0]) + ' ' + str(route_list[1]) + str(' ') + str(route_list[2])
if len(route_list) == 2:
route = str(route_list[0]) + ' ' + str(route_list[1])
root_1 = etree.SubElement(root, "vehicle", id=str(veh_id), depart=str(start_time[i] * 10))
child_11 = etree.SubElement(
root_1, "route", edges=route
)
with open(Path(Path(os.getcwd()) / 'conf' / Path('test0_trips.trips.xml')), 'w') as e_data:
print(etree.tostring(root, pretty_print=True, encoding='unicode'), file=e_data)
if __name__ == '__main__':
path = Path(os.getcwd()) / 'conf' / 'veh_info.csv'
od = pd.read_csv(path).dropna()
gen_trips(od)
|
[
"634184805@qq.com"
] |
634184805@qq.com
|
6924a180ef02d2c2fcdab06a4084459706c24ff6
|
ed8bfb44c7d8fd2ef3d0d493d39eba6d7b729aec
|
/decode.py
|
05c18d5140f05ddeb6b1977cef37d543fe16147e
|
[] |
no_license
|
omri123/text_lord
|
5cedeb8fb53c69977b21ec25fe77e2d598614d97
|
59f8095784f702891d24c771281c03ec0402e4f0
|
refs/heads/master
| 2022-04-09T11:39:20.561554
| 2020-03-12T22:05:04
| 2020-03-12T22:05:04
| 243,927,064
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,893
|
py
|
import torchtext
from restorant_dataset import START, END
from model import NoEncoderFConvDecoderModel
from archive.utils import load_checkpoint, vocab_to_dictionary
from restorant_dataset import get_dataset
import os
import pickle
import copy
def gready_decode_single(model: NoEncoderFConvDecoderModel, vocab: torchtext.vocab,
stars: int, sample_id: int, start_token=START, end_token=END, device='cpu'):
max_length = 25
src_tokens = torch.tensor([[sample_id, stars]], dtype=torch.int64, device=device)
src_lengths = torch.full((1, 1), 5, device=device)
reviews = torch.tensor([[vocab.stoi[start_token]]], dtype=torch.int64, device=device)
sentence = [start_token]
length = 0
while end_token not in sentence:
logits, _ = model(src_tokens, src_lengths, reviews)
logits_for_new_token = logits[0, -1, :] # batch, seq, vocab
word_index = torch.argmax(logits_for_new_token).item()
sentence.append(vocab.itos[word_index])
length += 1
if length > max_length:
break
sentence_by_indecies = [vocab.stoi[word] for word in sentence]
reviews = torch.tensor([sentence_by_indecies], dtype=torch.int64, device=device)
return ' '.join(sentence)
def gready_decode(model, vocab, src_tokens, src_lengths, start_token, end_token):
pass
import torch
from queue import PriorityQueue
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class BeamSearchNode(object):
def __init__(self, sentence, logProb, length):
'''
:param sentence: a list of tokens!!!
:param logProb: logp sum
:param length:
'''
self.sent = sentence
self.logp = logProb
self.leng = length
def eval(self, alpha=1.0):
reward = 0
# Add here a function for shaping a reward
return - (self.logp / float(self.leng - 1 + 1e-6) + alpha * reward)
def __lt__(self, other):
return self.eval() <= other.eval()
def beam_decode_single(model, vocab, sample_id, stars, beam_width=10, topk=1, device='cpu', SOS_token='<s>', EOS_token='</s>', MAX_LENGTH=50):
"""
decode single example using beam search.
:param decoder: a NoEncoderFConvDecoderModel object
:param vocab:
:param sample_id:
:param stars:
:param beam_width:
:param SOS_token:
:param EOS_token:
:param MAX_LENGTH:
:return:
"""
src_tokens = torch.tensor([[sample_id, stars]], dtype=torch.int64, device=device)
src_lengths = torch.full((1, 1), 5, device=device)
review = [SOS_token]
# review = torch.tensor([[vocab.stoi[SOS_token]]], dtype=torch.int64)
solutions = []
nodes = PriorityQueue()
node = BeamSearchNode(review, 0, 1)
nodes.put(node)
qsize = 1
while True:
# finished
if len(solutions) == topk: break
# give up when decoding takes too long
if qsize > 2000:
for i in range(topk - len(solutions)):
solutions.append(nodes.get())
break
# fetch the best node
node = nodes.get()
review = node.sent
review_int = [vocab.stoi[w] for w in review]
review_torch = torch.tensor([review_int], dtype=torch.int64, device=device)
if review[-1] == EOS_token:
solutions.append(node)
continue
logits, _ = model(src_tokens, src_lengths, review_torch)
predictions = torch.log_softmax(logits, dim=2)
log_probs, indexes = torch.topk(predictions, beam_width)
for new_k in range(beam_width):
word_index = indexes[0, len(review)-1, new_k].item()
word = vocab.itos[word_index]
review_new = copy.deepcopy(node.sent)
review_new.append(word)
new_word_log_p = log_probs[0, 0, new_k].item()
new_node = BeamSearchNode(review_new, node.logp + new_word_log_p, node.leng)
nodes.put(new_node)
qsize += 1
return [' '.join(s.sent) for s in solutions]
def main():
foldername = '/cs/labs/dshahaf/omribloch/data/text_lord/restorant/train/note_tiny_no_noise_dim_32_ntokens_5_nconv_10_nsamples_102400_content_noise_0.001/'
# foldername = '/cs/labs/dshahaf/omribloch/data/text_lord/restorant/train/note_EM_no_noise_dim_32_ntokens_10_nconv_4_nsamples_1024_content_noise_0.0/'
vocab_path = os.path.join(foldername, 'vocab.pickle')
model_ckpt_path = os.path.join(foldername, 'last_checkpoint.ckpt')
with open(vocab_path, 'rb') as file:
vocab = pickle.load(file)
print('vocab was loaded')
decoder_dictionary = vocab_to_dictionary(vocab)
device = 'cpu'
nsamples = 102400
ntokens = 5
dim = 32
content_noise = 0.001
dropout = 0
nconv = 10
model = load_checkpoint(model_ckpt_path, 'cpu',
device, nsamples, decoder_dictionary.pad(),
ntokens, dim, content_noise, dropout,
decoder_dictionary, 50, nconv)
print('model loaded')
model.eval()
dataset, vocab = get_dataset(10000, '/cs/labs/dshahaf/omribloch/data/text_lord/restorant/', vocab)
for i in range(10):
sid = dataset[i].id
stars = dataset[i].stars
# stars = 1
review_sentence = ' '.join(dataset[i].review)
print(review_sentence)
decoded_sentence = gready_decode_single(model, vocab, stars, sid)
print(decoded_sentence)
decoded_sentence = gready_decode_single(model, vocab, 1-stars, sid)
print(decoded_sentence)
print('-------------')
decoded_sentence = beam_decode_single(model, vocab, sid, stars, topk=10, beam_width=4)
for d in decoded_sentence:
print(d)
print('==============================')
if __name__=='__main__':
main()
|
[
"omri.bloch@mail.huji.ac.il"
] |
omri.bloch@mail.huji.ac.il
|
d8b9910b487bbb5eb2e7b49068c6d3b302e6cd43
|
7043eec45732b05686dd7da397f922339efad693
|
/TA.py
|
892b74546802ad34a83a52569521a4b2a44c031a
|
[] |
no_license
|
rtgrimm/SpectralKineticsODE
|
7fd35f924ad64aac08d76dd1141cf3b38051b82f
|
68b3f16372a8294a714dd7df5a6029d1183fd9ae
|
refs/heads/master
| 2023-07-15T06:11:15.368814
| 2021-08-09T17:15:56
| 2021-08-09T17:15:56
| 392,377,367
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,619
|
py
|
import numpy as np
from dataclasses import dataclass
from typing import *
from scipy.constants import Planck, speed_of_light, elementary_charge
from scipy.integrate import odeint, solve_ivp
import matplotlib.pyplot as plt
from kinetics import Scaler, Parameters, run
from style import set_style
@dataclass
class TAParams:
pump_spectrum : np.ndarray
probe_spectrum : np.ndarray
time : np.ndarray
tau_list : np.ndarray
probe_time_env : Callable[[Scaler], Callable[[Scaler], Scaler]]
pump_time_env: Callable[[Scaler], Callable[[Scaler], Scaler]]
make_params : Any
probe_scale = 0.1
@dataclass
class TAResults:
spectra : np.ndarray
def run_TA(TA_params : TAParams) -> TAResults:
params = TA_params.make_params(
lambda t: TA_params.probe_time_env(
np.min(TA_params.tau_list))(t) * TA_params.probe_spectrum * 0.1)
pump_spectrum = TA_params.pump_spectrum
probe_spectrum = TA_params.probe_spectrum
result_A_0 = run(params, TA_params.time)
A_0 = np.sum(result_A_0.spectral_fluxes, axis=0)
spectra = []
pump = TA_params.pump_time_env(np.min(TA_params.tau_list))
for tau in TA_params.tau_list:
print(f"{(tau / np.max(TA_params.tau_list)) * 100.0}%")
probe = TA_params.pump_time_env(tau)
exc_total = lambda t: pump(t) * pump_spectrum + probe(t) * probe_spectrum * 0.1
params = TA_params.make_params(exc_total)
results = run(params, TA_params.time)
spectra.append(np.sum(results.spectral_fluxes, axis=0) - A_0 * 0)
spectra = np.array(spectra)
return TAResults(spectra)
|
[
"rygr1645@colorado.edu"
] |
rygr1645@colorado.edu
|
4acb6cd3c096137fa35422140bd2fb23a7a5a1ca
|
b16abb62b283064bd2fa4819a711578658759c7b
|
/zhihu.py
|
901c9bd5f90425194b7b793230c9c22f6df0872f
|
[] |
no_license
|
pfDou/insects-of-zhihu-hot-topics
|
5bfa66f9775de98b4c6dc58abbe1620f0bbd5a95
|
0f338ec287d1832a1792ad613fb65858329982e7
|
refs/heads/master
| 2021-01-23T22:10:46.731370
| 2015-05-09T14:36:01
| 2015-05-09T14:36:01
| 32,391,701
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,082
|
py
|
# -*- coding: utf-8 -*-
import urllib.request
from bs4 import BeautifulSoup
import re
import io
import sys
sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding="utf8")
main_page = "http://www.zhihu.com/explore#daily-hot"
urllines = urllib.request.urlopen(main_page) #<class 'http.client.HTTPResponse'>
page_data = urllines.read() #<class 'bytes'>
urllines.close()
soup = BeautifulSoup(page_data) #<class 'bs4.BeautifulSoup'>
#f = open("zhihu.txt","w")
hot_topics = soup.find('div', attrs = {"data-type":"daily"}).children
output = []
for item in list(hot_topics):
if item.string:
pass # navigableString type, maybe space line in the source page
else:
output.append({})
q_index = int(item["data-offset"])-1
print(item["data-offset"])
href = item.h2.a["href"]
question = item.h2.a.string
print("Question:", question)
#answer page's url
url = "http://www.zhihu.com" + href
print("answer address:",url)
#open answer page get the answer
sub_urllines = urllib.request.urlopen(url) #<class 'http.client.HTTPResponse'>
sub_page_data = sub_urllines.read() #<class 'bytes'>
sub_urllines.close()
sub_soup = BeautifulSoup(sub_page_data)
# print(sub_soup.title)
favorer_num = sub_soup.find("span", class_="count").text
print("favorer_num:",favorer_num)
brief_Q = sub_soup.find("div", class_="zm-editable-content").text
print("Question's brief:",brief_Q)
# test = sub_soup.find_all("div", attrs={"class":"zm-editable-content"})
# for i in test:
# print(i["class"])
answer_head = sub_soup.find("div", class_="answer-head")
author = sub_soup.find("a", class_="zm-item-link-avatar").next_sibling.next_sibling.string
print("author:", author)
author_qg = sub_soup.find("a", class_="zm-item-link-avatar").next_sibling.next_sibling.next_sibling.next_sibling.string
print("author's qg:", author_qg)
#answer = sub_soup.find_all("div", attrs={"class":"zm-editable-content"})[2].text#get_text()
answer = sub_soup.find("div", class_=" zm-editable-content clearfix").text
print("Answer:", answer)
|
[
"372167676@qq.com"
] |
372167676@qq.com
|
0019c8ec5dd7461f3fd5921e275c326e9f78ab39
|
66149a32aca9d64f08d3ba111ede8613cd1cfb25
|
/11-06-2020_SAMBIT_EMWAVES.py
|
a99ed74f578c7afa73afd522b853a49bd88672d6
|
[
"MIT"
] |
permissive
|
MAVIN-07/EM-WAVES-SAMBIT
|
ad3180ffe6a278f5ac55fe369832f5639f26d3f3
|
c7c72111eed14a2aaa551306811fa1f26ea04f22
|
refs/heads/master
| 2022-12-11T08:17:21.756078
| 2020-06-17T22:04:04
| 2020-06-17T22:04:04
| 273,086,413
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,778
|
py
|
#CREATED AND MAINTAINED BY: SAMBIT KUMAR MISHRA
#Email-ID: sambitmishra1968@gmail.com
import math
import cmath
import matplotlib.pyplot as plt
import matplotlib.image as img
#===========================================================================================================================================================
# Defining lists to use through this program
Eo = []
Q = []
dp = [] #[permittivity1,permeability1,permittivity2,permeability2]
n = [0,0]
B = [0,0]
ba = [0,0] #[perpendicular,parallel]
arr_phase = [0,0] #[transmission,reflection]
arr_magnitude = [0,0] #[transmission,reflection]
critical_angle = 0
#===========================================================================================================================================================
#Printing on the screen- Assumptions and sign conventions to be used.
print("ASSUMPTIONS-NOTE THESE BEFORE PROCEEDING")
print("_________________________________________________________________________________________________________________________________________________")
print("")
print("Assume that the complex Electric field is given by the equation: E = ((a+jb)x^ + (c+jd)y^ + (m+jn)z^)e*exp(-j*B*(xcos(Qx)+ycos(Qy)+zcos(Qz)))")
print("Where ^ means cap, eg. x^ means unit vector in direction of x")
print("j = complex imaginary number analogous to i in maths")
print("e = natural log base")
print("a,c,m are coefficients of real part whereas b,d,n are coefficients of imaginary part of Eo")
print("Qx,Qy,Qz represent the angle made by the incident electric field wrt x,y,z axes respectively")
print("Qi is the incident angle of wave wrt to the normal to the plane of incidence")
print("_________________________________________________________________________________________________________________________________________________")
print("")
#===========================================================================================================================================================
# TAKING THE INPUTS FROM THE USER
try:
Eo = list(map(float, input("Enter the values of a,b,c,d,m,n in this exact order separated by space= ").split()))
# w = float(input("Enter the frequency of the EM Wave in Hz= "))#We dont have to use because in B value it will get cancelled Nr and Dr
Q = list(map(float, input("Enter the values of Qx,Qy,Qz,Qi in this exact order separated by spaces= ").split()))
# Enter 0 if degrees and 1 if radians
unit = int(input("Enter the unit of angle chosen:- TYPE '0' for degrees, TYPE '1' for radians "))
if unit == 0:
Q[0] = math.radians(Q[0])
Q[1] = math.radians(Q[1])
Q[2] = math.radians(Q[2])
Q[3] = math.radians(Q[3])
# If the input type is chosen as radians then leave the value as it is.
#This loop will exit only if the user enters proper values of dielectric properties
while 1:
dp = list(map(float, input(
"Enter the values of permittivity_medium1, permeability_medium1, permittivity_medium2, permeability_medium2 in "
"this same order separated by space= ").split()))
if dp[3]*dp[2] == dp[1]*dp[0]:
print("ERROR: Enter the values as per assumptions")
else:
break
print("")
print("For the following two categories:- TYPE '1' for XY, TYPE '2' for YZ, TYPE '3' for XZ")
print("")
poi = int(input("Enter the plane of interface= "))
pod = int(input("Enter the plane of dielectric= "))
#===========================================================================================================================================================
#CALCULATION OF POLARISATION OF WAVE
polarisation = 0
#Declaration of polarisation variable for using in program.
if poi == 1:
if math.cos(Q[2]) == 0:
polarisation = 1 # Perpendicular polarisation
elif math.sin(Q[2]) == 0:
polarisation = 0 # Parallel polarisation
elif poi == 2:
if math.cos(Q[0]) == 0:
polarisation = 1 # Perpendicular polarisation
elif math.sin(Q[0]) == 0:
polarisation = 0 # Parallel polarisation
elif poi == 3:
if math.cos(Q[1]) == 0:
polarisation = 1 # Perpendicular polarisation
elif math.sin(Q[1]) == 0:
polarisation = 0 # Parallel polarisation
#===============================================================================================================================================================
#Calculation of the magnitude of Incident Electric Field: Absolute value of Eo
Ei=0
#Declaration of Ei variable
for i in range(0, 6):
Ei += Eo[i]**2 # We have to take square root of this value(Ei) to obtain Magnitude of incident electric field.
#===============================================================================================================================================================
#CALCULATION OF BREWSTER'S ANGLE
#The square root value might come out to be negative hence complex square root must be taken into account so this step can be postponed
# reading png image file
im = img.imread('EMWAVE_2.png')
# show image
plt.imshow(im)
#For Perpendicular Polarisation
if ((dp[3]/dp[1])*(((dp[3]*dp[0])-(dp[1]*dp[2]))/((dp[3]*dp[2])-(dp[1]*dp[0])))) >= 0:
ba[0] = math.atan(math.sqrt((dp[3]/dp[1])*(((dp[3]*dp[0])-(dp[1]*dp[2]))/((dp[3]*dp[2])-(dp[1]*dp[0])))))
else:
print("BREWSTER ANGLE IS NOT POSSIBLE FOR THIS CASE")
#For Parallel Polarisation
if ((dp[2]/dp[0])*(((dp[1]*dp[2])-(dp[3]*dp[0]))/((dp[3]*dp[2])-(dp[1]*dp[0])))) >= 0:
ba[1] = math.atan(math.sqrt((dp[2]/dp[0])*(((dp[1]*dp[2])-(dp[3]*dp[0]))/((dp[3]*dp[2])-(dp[1]*dp[0])))))
else:
print("BREWSTER ANGLE IS NOT POSSIBLE FOR THIS CASE")
#=====================================================================================================================================================================
#The case when incident wave just grazes through the plane of dielectric interface
#In this case no reflection or transmission of wave is possible. This is an exceptional case.
if math.cos(Q[3]) == 0:
print("NO TRANSMISSION OR REFLECTION POSSIBLE IN THIS CASE: BECAUSE THE WAVE GRAZES THROUGH THE PLANE OF INTERFACE")
#ACTUAL CALCULATION BEGINS HERE
else:
n[0] = (120*math.pi)/(math.sqrt((dp[1])/(dp[0]))) #For medium 1
n[1] = (120*math.pi)/(math.sqrt((dp[3])/(dp[2]))) #For medium 2
B[0] = math.sqrt(dp[1]*dp[0]) #For medium 1
B[1] = math.sqrt(dp[3]*dp[2]) #For medium 2
b1 = n[1] * math.cos(Q[3])
b2 = n[0] * math.cos(Q[3])
#==================================================================================================================================================================
#CASE-1: When the incident wave is at an angle greater than the critical Angle and medium 2 is rarer than medium 1
if ((B[0] / B[1]) * math.sin(Q[3])) >= 1 and (dp[3]*dp[2])<=(dp[1]*dp[0]):
#==================================================================================================================================================================
print("THIS IS THE CASE OF TOTAL INTERNAL REFLECTION")
print("")
critical_angle = math.asin(B[1]/B[0])
# reading png image file
im = img.imread('EMWAVE_TIR.png')
# show image
plt.imshow(im)
if (B[0] / B[1])*math.sin(Q[3]) > 1:
if polarisation == 1:
reflection_coefficient = (b1 - 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b1 + 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b1)
elif polarisation == 0:
reflection_coefficient = (b2 - 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b2 + 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b2)
#Calculation of magnitude
arr_magnitude[0] = "N/A"
arr_phase[0] = "N/A"
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
elif (B[0] / B[1]) * math.sin(Q[3]) == 1:
try:
if Q[3] == critical_angle:
if polarisation == 1:
reflection_coefficient = (b1 - 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b1 + 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b1)
elif polarisation == 0:
reflection_coefficient = (b2 - 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b2 + 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b2)
#Calculation of magnitude
arr_magnitude[0] = "N/A"
arr_phase[0] = "N/A"
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
else:
print("ERROR: DISCREPANCY IN ANALYTICAL AND INPUT VALUES")
except:
print("ERROR!")
else:
print("ERROR: Please re-enter practical values in input")
#==================================================================================================================================================================
#CASE-2: When the wave is incident at Brewster's Angle
elif Q[3]==ba[0] or Q[3]==ba[1]:
#==================================================================================================================================================================
#No reflection will take place in this case
arr_magnitude[1] = "N/A"
arr_phase[1] = "N/A"
# reading png image file
im = img.imread('EMWAVE_BA.png')
# show image
plt.imshow(im)
a1 = n[0] * math.cos(transmitted_angle)
a2 = n[1] * math.cos(transmitted_angle)
#Case of perpendicular polarisation
#--------------------------------------------------------
if polarisation == 1:
transmission_coefficient = (2 * b1) / (b1 + a1)
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
#Case of parallel polarisation
#--------------------------------------------------------
elif polarisation == 0:
transmission_coefficient = (2 * b1) / (b2 + a2)
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
#==================================================================================================================================================================
#CASE-3: The general case of reflection and transmission
#if ((B[0] / B[1]) * math.sin(Q[3])) < 1:
else:
#==================================================================================================================================================================
transmitted_angle = math.asin(B[0]/B[1]) * math.sin(Q[3])
a1 = n[0] * math.cos(transmitted_angle)
a2 = n[1] * math.cos(transmitted_angle)
# reading png image file
im = img.imread('EMWAVE_2.png')
# show image
plt.imshow(im)
# For the case of perpendicular polarisation
if polarisation == 1:
#----------------------------------------------
reflection_coefficient = (b1 - a1) / (b1 + a1)
transmission_coefficient = (2 * b1) / (b1 + a1)
if reflection_coefficient >= 0:
#Calculation of phase of wave after reflection or transmission is a bit tricky: Need to look into that
arr_phase[1] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3])))
else:
arr_phase[1] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3]))) # A phase change of Pi
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
#----------------------------------------------------------------------------------------------------------------------------------
# For the case of parallel polarisation
elif polarisation == 0:
#-------------------------------------------
reflection_coefficient = (b2 - a2) / (b2 + a2)
transmission_coefficient = (2 * b1) / (b2 + a2)
if reflection_coefficient >= 0:
arr_phase[1] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3])))
else:
arr_phase[1] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3]))) # A phase change of Pi
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
#======================================================================================================================================================================
# The final required values using the input values are printed below
print("__________________________________________________________________________")
print("The phase of transmitted wave is " + str(arr_phase[0]))
print("The phase of reflected wave is " + str(arr_phase[1]))
print("The magnitude of transmitted wave is " + str(arr_phase[0]))
print("The magnitude of reflected wave is " + str(arr_phase[1]))
print("")
print("These were your final results. THANK YOU")
print("__________________________________________________________________________")
#======================================================================================================================================================================
except:
# reading png image file
im = img.imread('SORRY.png')
# show image
plt.imshow(im)
print("__________________________________________")
print("PLEASE RECHECK THE VALUES YOU HAVE ENTERED")
print("__________________________________________")
#THE END
#FOR QUERIES/REMARKS CONTACT: SAMBIT KUMAR MISHRA (sambitmishra1968@gmail.com)
|
[
"noreply@github.com"
] |
noreply@github.com
|
442831645ae4c6f34df075f4576fd9a948e313c2
|
649c930c4c5c89b48673b438c8d42f70e6b00cfd
|
/day 03/作业.py
|
d60c1b0df8e1db4ae550c6e5524337bfd22e6c18
|
[] |
no_license
|
zhangchenghao0617/Learn
|
a0f1ebb9b6248c83e0c87cb756d19b2243722679
|
32cbaacc697d7e5992e077f368c8c80c5900f96e
|
refs/heads/master
| 2023-01-12T07:20:57.911545
| 2020-10-25T09:40:04
| 2020-10-25T09:40:04
| 306,071,934
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,515
|
py
|
# # 1.有变量name = "aleX leNb" 完成如下操作:
# name = "aleX leNb"
# # - 移除 name 变量对应的值两边的空格,并输出处理结果
# print(name.strip())
# # - 判断 name 变量是否以 "al" 开头,并输出结果
# print(name.startswith('al'))
# # - 判断name变量是否以"Nb"结尾,并输出结果
# print(name.endswith('Nb'))
# # - 将 name 变量对应的值中的 所有的"l" 替换为 "p",并输出结果
# print(name.replace('l','p'))
# # - 将name变量对应的值中的第一个"l"替换成"p",并输出结果
# print(name.replace('l','p',1))
# # - 将 name 变量对应的值根据 所有的"l" 分割,并输出结果。
# print(name.split('l'))
# # - 将name变量对应的值根据第一个"l"分割,并输出结果。
# print(name.split('l',1))
# # - 将 name 变量对应的值变大写,并输出结果
# print(name.upper())
# # - 将 name 变量对应的值变小写,并输出结果
# print(name.lower())
# # - 判断name变量对应的值字母"l"出现几次,并输出结果
# print(name.count('l'))
# # - 如果判断name变量对应的值前四位"l"出现几次,并输出结果
# print(name.count('l',0,4))
# # - 请输出 name 变量对应的值的第 2 个字符?
# print(name[1])
# # - 请输出 name 变量对应的值的前 3 个字符?
# print(name[0:3])
# # - 请输出 name 变量对应的值的后 2 个字符?
# print(name[-2:])
#
# # 2.有字符串s = "123a4b5c"
# s = "123a4b5c"
# # - 通过对s切片形成新的字符串s1,s1 = "123"
# print(s[0:3])
# # - 通过对s切片形成新的字符串s2,s2 = "a4b"
# print(s[3:6])
# # - 通过对s切片形成新的字符串s3,s3 = "1345"
# print(s[0:7:2])
# # - 通过对s切片形成字符串s4,s4 = "2ab"
# print(s[1:6:2])
# # - 通过对s切片形成字符串s5,s5 = "c"
# print(s[-1:])
# # - 通过对s切片形成字符串s6,s6 = "ba2"
# print(s[-3:-8:-2])
#
# # 3.使用while和for循环分别打印字符串s="asdfer"中每个元素。
# s="asdfer"
# while:
# i = 0
# while i < len(s):
# print(s[i])
# i+=1
# for
# for i in s:
# print(i)
#
# # 4.使用for循环对s="asdfer"进行循环,但是每次打印的内容都是"asdfer"。
# s="asdfer"
# for i in s:
# print(s)
#
# # 5.使用for循环对s="abcdefg"进行循环,每次打印的内容是每个字符加上sb, 例如:asb, bsb,csb,...gsb。
# s="abcdefg"
# for i in s:
# print(i.join('sb'))
#
# for i in s:
# print(i + 'sb')
#
# # 6.使用for循环对s="321"进行循环,打印的内容依次是:"倒计时3秒","倒计时2秒","倒计时1秒","出发!"。
# s="321"
# for i in s:
# print('倒计时'+ i +'秒!')
# print('出发!')
#
# # 7.实现一个整数加法计算器(两个数相加):如:content = input("请输入内容:") 用户输入:5+9或5+ 9或5 + 9,然后进行分割再进行计算。
# content = input("请输入内容:")
# content_space = content.replace(' ','')
# print(content_space)
# list = content_space.split('+')
# print(list)
# sum = 0
# for i in list:
# sum = sum + int(i)
# print(sum)
#
# # 8.实现一个整数加法计算器(多个数相加):如:content = input("请输入内容:") 用户输入:5+9+6 +12+ 13,然后进行分割再进行计算。
# content = input("请输入内容:")
# content_space = content.replace(' ','')
# print(content_space)
# list = content_space.split('+')
# sum = 0
# for i in list:
# sum = sum + int(i)
# print(sum)
# # 9.计算用户输入的内容中有几个整数(以个位数为单位)。如:content = input("请输入内容:") # 如fhdal234slfh98769fjdla
# str = '1234a'
# content = 0
# for i in str:
# if i.isdecimal() :
# content +=1
# print(content)
#
#
#
# # 10.选做题**:写代码,完成下列需求:用户可持续输入(用while循环),用户使用的情况:
# # 输入A,则显示走大路回家,然后在让用户进一步选择:
# # 是选择公交车,还是步行?
# # 选择公交车,显示10分钟到家,并退出整个程序。
# # 选择步行,显示20分钟到家,并退出整个程序。
# # 输入B,则显示走小路回家,并退出整个程序。
# # 输入C,则显示绕道回家,然后在让用户进一步选择:
# # 是选择游戏厅玩会,还是网吧?
# # 选择游戏厅,则显示 ‘一个半小时到家,爸爸在家,拿棍等你。’并让其重新输入A,B,C选项。
# # 选择网吧,则显示‘两个小时到家,妈妈已做好了战斗准备。’并让其重新输入A,B,C选项。
# while 1 :
# print('A:走大路,B:走小路,C:绕道')
# choice = input('请输入:')
# choice_upper = choice.upper()
# if choice_upper == 'A':
# print('你选择了走大路')
# choice1 = input('请选择A:坐公交,B:走路:')
# choice_upper1 = choice1.upper()
# if choice_upper1 == 'A':
# print('坐公交,十分钟后到家')
# break
# else:
# print('走路回家,二十分钟后到家')
# break
# elif choice_upper == 'B':
# print('你选择了走小路')
# break
# else:
# print('你选择了绕道')
# choice2 = input('请选择A:区游戏厅,B:去网吧:')
# choice_upper2 = choice2.upper()
# if choice_upper2 == 'A':
# print('一个半小时到家,爸爸在家,拿棍等你。')
# else:
# print('两个小时到家,妈妈已做好了战斗准备。')
#
# # 1.写代码:计算1 - 2 + 3 - 4 + 5 - 6... + 99中除了88以外所有数的总和?
# s1 = 0
# s2 = 0
# i = 1
# while i <= 99 :
# if i % 2 == 0:
# if i == 88:
# i += 1
# continue
# else:
# s1 -= i
# else:
# s2 += i
# i += 1
# print(s1+s2)
#
# # 2. ** 选做题: ** 选做题:判断一句话是否是回文.回文: 正着念和反着念是一样的.例如, 上海自来水来自海上
# str = input('请输入:')
# if str[-1::-1] == str:
# print("是")
# else:
# print('不是')
#
# # 3.制作趣味模板程序需求:等待用户输入名字、地点、爱好,根据用户的名字和爱好进行任意现实,如:敬爱可亲的xxx,最喜欢在xxx地方干xxx
# your_name = input('请输入姓名')
# your_place = input('请输入地点')
# your_hobby = input('请输入爱好')
# msg = '可亲的{name},最喜欢在{place}地方干{hobby}'.format(name = your_name,place = your_place,hobby = your_hobby)
# print(msg)
|
[
"zhangchenghao0617@qq.com"
] |
zhangchenghao0617@qq.com
|
ef967c7df544cf14fd81acac69d9aa53ed4449d0
|
9cd4bd2d3c43d14a78c4f72dd2d8c6900aec56c4
|
/TabuadaPython.py
|
7f386b00ecc7d585dacc937cfee125710246f99e
|
[] |
no_license
|
paulorod07/Tabuada
|
937005686f0a9fb1c5b8a37dc49ae7178477c97f
|
14ced3e51ec0ce2c9c7c92bdf6696ce24a956a45
|
refs/heads/master
| 2020-04-27T16:22:05.703246
| 2019-03-08T06:41:19
| 2019-03-08T06:41:19
| 174,481,856
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 152
|
py
|
print('Bem Vindo a Tabuada!')
numero = int(input('Digite um numero para a tabuada: '))
for n in range(11):
print(numero, 'x', n, '=', numero * n)
|
[
"noreply@github.com"
] |
noreply@github.com
|
0847d046a1cea08f4fceb4e24d0e9848c3efd510
|
e1bcef91c724a3b89f442079ab51977ce5a6bc2f
|
/quote/forms.py
|
1d803b8df3d24b45e9b8c59cb8586deddbaa426e
|
[] |
no_license
|
mahdistt/CRM
|
48550b1bb8adbd012b254e42edefc69a572a33fc
|
a1aa33df4ef2e342268f8965d43bf026be61db37
|
refs/heads/master
| 2023-07-09T05:11:23.497844
| 2021-08-11T18:45:56
| 2021-08-11T18:45:56
| 390,491,640
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 476
|
py
|
from django import forms
from . import models
class QuoteCreateViewForm(forms.ModelForm):
class Meta:
model = models.QuoteItem
fields = (
'quote',
'product',
'price',
'quantity',
'discount',
)
widgets = {
'quantity': forms.NumberInput,
'discount': forms.NumberInput,
'price': forms.NumberInput,
'quote': forms.Select,
}
|
[
"80247767+mahdistt@users.noreply.github.com"
] |
80247767+mahdistt@users.noreply.github.com
|
4a24d53b5b6eb6b7db4ed323a7726a5e0b69cd7b
|
16cb142fb04798313c43a073cf1e0ddd6f4bf13b
|
/timing/resources/time.py
|
978a0750832068ee69e9c5e493fb89f1c059e468
|
[] |
no_license
|
iRhonin/timing
|
46d3b080333fe4b169252cdb37f3d3194fc00072
|
91b7d86ba053cf46df6a3edbdf273947d8a37ae3
|
refs/heads/master
| 2023-04-30T21:02:26.861632
| 2021-05-14T20:16:12
| 2021-05-14T20:16:12
| 279,684,898
| 0
| 0
| null | 2021-05-14T20:16:13
| 2020-07-14T20:19:47
|
Python
|
UTF-8
|
Python
| false
| false
| 2,027
|
py
|
from datetime import datetime
import falcon
from sqlalchemy import func
from timing.authorization import Authorization
from timing.models.time import Time, DATETIME_FORMAT
from timing.paginate import Paginate
from timing.schemas.time import TimeInSchema
class TimesResource(object):
@falcon.before(Authorization())
def on_post(self, req, resp):
data = TimeInSchema(
user_id=req.context['user_id'],
createdAt=req.params.get('t', None),
hours=req.media['hours'],
)
time = Time(**data.dict(exclude_unset=True))
req.context.db_session.add(time)
req.context.db_session.commit()
resp.media = time.to_dict()
@falcon.before(Authorization())
@falcon.before(Paginate())
def on_get(self, req, resp):
time_query = self._get_times(req, query_on=Time)
total_count = time_query.count()
time_query = time_query \
.order_by(Time.created_at) \
.limit(req.context['limit']) \
.offset(req.context['offset']) \
resp.media = [t.to_dict() for t in time_query]
resp.set_header('X-COUNT', total_count)
@falcon.before(Authorization())
def on_get_calculator(self, req, resp):
sum_times = self._get_times(req, query_on=func.sum(Time.hours)).one()
sum_times = int(sum_times[0] or 0)
resp.media = Time(
created_at=datetime.utcnow(),
hours=sum_times,
).to_dict()
def _get_times(self, req, query_on):
time_query = req.context.db_session.query(query_on) \
.filter(Time.user_id == req.context.user_id)
if from_ := req.params.get('from'):
time_query = time_query.filter(
Time.created_at >= datetime.strptime(from_, DATETIME_FORMAT)
)
if to_ := req.params.get('to'):
time_query = time_query.filter(
Time.created_at <= datetime.strptime(to_, DATETIME_FORMAT)
)
return time_query
|
[
"fatahzade@gmail.com"
] |
fatahzade@gmail.com
|
c19012af2e5fe52651cc00b9775abc1d3e4e6ea1
|
a71d5838e292e2c0c7371f7fc7870c7018820ae1
|
/day03/03_pie.py
|
71c8ec39a03c52234f30d2660394d2f3d37a995f
|
[] |
no_license
|
skywalkerqwer/DataScience
|
be91541c3da383d15ee52d0101d2dbb0289c2fde
|
4cfd42f3a9795e295393cdb045852d46e99b6e59
|
refs/heads/master
| 2020-06-17T11:41:40.113864
| 2019-07-15T09:49:40
| 2019-07-15T09:49:40
| 195,913,553
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 449
|
py
|
"""
绘制饼状图
"""
import numpy as np
import matplotlib.pyplot as mp
labels = ['Python', 'JavaScript', 'C++', 'Java', 'PHP']
values = [26, 17, 21, 29, 11]
spaces = [0.05, 0.01, 0.01, 0.01, 0.01]
colors = ['dodgerblue', 'orangered', 'limegreen', 'violet', 'gold']
mp.figure('Pie Chart', facecolor='lightgray')
mp.title('Languages PR')
mp.pie(values, spaces, labels, colors, '%.1f%%', shadow=True, startangle=0, radius=1)
mp.legend()
mp.show()
|
[
"15902162780@163.com"
] |
15902162780@163.com
|
34d1d295d4ea0e1a1589db374ba5f46c1c017507
|
6e922c94dc20370de2ad34cd501bdfa824d20515
|
/analysis/planner_2D.py
|
2b2768b17cc60f1162e12d1aba42f3ad169c6da1
|
[] |
no_license
|
funhere/auto-medical-detection
|
0dc24c6e1a7ecc98cb33a37876c31c4678e17dfc
|
653154b338bb844e73fa2ba931144d39db6f0174
|
refs/heads/master
| 2021-08-10T11:56:22.009012
| 2020-06-06T00:42:32
| 2020-06-06T00:42:32
| 188,710,527
| 4
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,737
|
py
|
import shutil, os
from analysis.planner_3D import Planner
from bins.analyze_and_preprocess import get_lists_of_splitted_dataset
from preprocessing.preprocessor import Preprocessor2D
from config.default_configs import *
from utils.files_utils import *
from net_architecture.generic_UNet import Generic_UNet
import numpy as np
from utils.analysis_utils import get_pool_and_conv_props
class Planner2D(Planner):
def __init__(self, folder_of_cropped_data, preprocessing_out_folder):
super(Planner2D, self).__init__(folder_of_cropped_data,
preprocessing_out_folder)
self.data_identifier = "UNet_2D"
self.transpose_forward = [0, 1, 2]
self.transpose_backward = [0, 1, 2]
self.plans_fname = join(self.preprocessing_out_folder, default_plans_identifier + "_plans_2D.pkl")
def load_plans(self):
self.plans = load_pickle(self.plans_fname)
self.plans_per_stage = self.plans['plans_per_stage']
self.dataset_properties = self.plans['dataset_properties']
self.transpose_forward = self.plans['transpose_forward']
self.transpose_backward = self.plans['transpose_backward']
def plan_exps(self):
def get_stage_properties(current_spacing, original_spacing, original_shape, num_cases, transpose_forward,
num_modalities, num_classes):
current_spacing_transposed = np.array([current_spacing[i] for i in transpose_forward])[1:]
new_median_shape = np.round(original_spacing / current_spacing * original_shape).astype(int)
dataset_num_voxels = np.prod(new_median_shape) * num_cases
input_patch_size = new_median_shape[transpose_forward][1:]
net_numpool, net_pool_kernel_sizes, net_conv_kernel_sizes, input_patch_size, \
shape_must_be_divisible_by = get_pool_and_conv_props(current_spacing_transposed, input_patch_size,
FEATUREMAP_MIN_EDGE_LENGTH_BOTTLENECK,
Generic_UNet.MAX_NUMPOOL_2D)
estimated_gpu_ram_consumption = Generic_UNet.compute_vram_consumption(input_patch_size,
net_numpool,
Generic_UNet.BASE_NUM_FEATURES_2D,
Generic_UNet.MAX_FILTERS_2D,
num_modalities, num_classes,
net_pool_kernel_sizes)
batch_size = int(np.floor(Generic_UNet.use_this_for_batch_size_computation_2D /
estimated_gpu_ram_consumption * Generic_UNet.DEFAULT_BATCH_SIZE_2D))
if batch_size < dataset_min_batch_size_cap:
raise RuntimeError("Unsupported patches size. patch-based solution will be implemented later.")
# check if batch size is too large (more than 5 % of dataset)
max_batch_size = np.round(batch_size_covers_max_percent_of_dataset * dataset_num_voxels /
np.prod(input_patch_size)).astype(int)
batch_size = min(batch_size, max_batch_size)
plan = {
'batch_size': batch_size,
'num_pool_per_axis': net_numpool,
'patch_size': input_patch_size,
'median_patient_size_in_voxels': new_median_shape,
'current_spacing': current_spacing,
'original_spacing': original_spacing,
'pool_op_kernel_sizes': net_pool_kernel_sizes,
'conv_kernel_sizes': net_conv_kernel_sizes,
'do_dummy_2D_data_aug': False
}
return plan
use_nonzero_mask_for_normalization = self.use_norm_mask()
print("Are you using the nonzero maks for normalizaion?", use_nonzero_mask_for_normalization)
spacings = self.dataset_properties['all_spacings']
sizes = self.dataset_properties['all_sizes']
all_classes = self.dataset_properties['all_classes']
modalities = self.dataset_properties['modalities']
num_modalities = len(list(modalities.keys()))
target_spacing = self.get_target_spacing()
new_shapes = np.array([np.array(i) / target_spacing * np.array(j) for i, j in zip(spacings, sizes)])
max_spacing_axis = np.argmax(target_spacing)
remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis]
self.transpose_forward = [max_spacing_axis] + remaining_axes
self.transpose_backward = [np.argwhere(np.array(self.transpose_forward) == i)[0][0] for i in range(3)]
new_shapes = new_shapes[:, self.transpose_forward]
# Calculations are based on the median shape of the datasets
median_shape = np.median(np.vstack(new_shapes), 0)
print("the median shape of the dataset is ", median_shape)
max_shape = np.max(np.vstack(new_shapes), 0)
print("the max shape in the dataset is ", max_shape)
min_shape = np.min(np.vstack(new_shapes), 0)
print("the min shape in the dataset is ", min_shape)
print("Don't want feature maps smaller than ", FEATUREMAP_MIN_EDGE_LENGTH_BOTTLENECK, " in the bottleneck")
# how many stages will the image pyramid have?
self.plans_per_stage = []
self.plans_per_stage.append(get_stage_properties(target_spacing, target_spacing, median_shape,
num_cases=len(self.list_of_cropped_npz_files),
transpose_forward=self.transpose_forward,
num_modalities=num_modalities,
num_classes=len(all_classes) + 1))
print(self.plans_per_stage)
self.plans_per_stage = self.plans_per_stage[::-1]
self.plans_per_stage = {i: self.plans_per_stage[i] for i in range(len(self.plans_per_stage))} # convert to dict
normalization_schemes = self.do_normalization_scheme()
store_largest_connected_component, min_size_per_class, min_region_size_per_class = \
self.do_postprocessing()
# these are independent of the stage
plans = {'num_stages': len(list(self.plans_per_stage.keys())), 'num_modalities': num_modalities,
'modalities': modalities, 'normalization_schemes': normalization_schemes,
'dataset_properties': self.dataset_properties, 'list_of_npz_files': self.list_of_cropped_npz_files,
'original_spacings': spacings, 'original_sizes': sizes,
'preprocessing_data_folder': self.preprocessing_out_folder, 'num_classes': len(all_classes),
'all_classes': all_classes, 'base_num_features': Generic_UNet.BASE_NUM_FEATURES_3D,
'use_mask_for_norm': use_nonzero_mask_for_normalization,
'keep_only_largest_region': store_largest_connected_component,
'min_region_size_per_class': min_region_size_per_class, 'min_size_per_class': min_size_per_class,
'transpose_forward': self.transpose_forward, 'transpose_backward': self.transpose_backward,
'data_identifier': self.data_identifier, 'plans_per_stage': self.plans_per_stage}
self.plans = plans
self.save_plans()
def do_preprocessing(self, num_threads):
if os.path.isdir(join(self.preprocessing_out_folder, "gt_segmentations")):
shutil.rmtree(join(self.preprocessing_out_folder, "gt_segmentations"))
shutil.copytree(join(self.folder_of_cropped_data, "gt_segmentations"), join(self.preprocessing_out_folder,
"gt_segmentations"))
normalization_schemes = self.plans['normalization_schemes']
use_nonzero_mask_for_normalization = self.plans['use_mask_for_norm']
intensityproperties = self.plans['dataset_properties']['intensityproperties']
preprocessor = Preprocessor2D(normalization_schemes, use_nonzero_mask_for_normalization,
intensityproperties, self.transpose_forward[0])
target_spacings = [i["current_spacing"] for i in self.plans_per_stage.values()]
preprocessor.run(target_spacings, self.folder_of_cropped_data, self.preprocessing_out_folder,
self.plans['data_identifier'], num_threads)
if __name__ == "__main__":
t = "Task_BoneSeg"
print("\n\n\n", t)
cropped_out_dir = os.path.join(cropped_output_dir, t)
preprocessing_out_dir = os.path.join(preprocessing_output_dir, t)
splitted_4D_out_dir_task = os.path.join(splitted_4D_out_dir, t)
lists, modalities = get_lists_of_splitted_dataset(splitted_4D_out_dir_task)
# need to be careful with RAM usage
if t in ["Task_LITS", "Task_Liver", "Task_BoneSegOrigs", "Task_BoneSeg"]:
threads = 3
elif t in ["Task_LungIntern", "Task_FibroticLungSeg", "Task_Lung", "Task_HepaticVessel"]:
threads = 6
else:
threads = 8
print("number of threads: ", threads, "\n")
print("\n\n\n", t)
exp_planner = Planner2D(cropped_out_dir, preprocessing_out_dir, threads)
exp_planner.plan_exps()
exp_planner.do_preprocessing()
|
[
"noreply@github.com"
] |
noreply@github.com
|
0139a90daa7aba09474a438506fda874d445904a
|
71df0a69bcfba49c7a5d0bff5fcd314942f10541
|
/viewStatsPage.py
|
7dc99083a1166f8666a42bbbede0f46c6385f17f
|
[] |
no_license
|
linzinha/lvlUP
|
24afde28362e62e04ef5c41ffcbd4f44fa4c4ad8
|
17fca44c02c92a8a8a25520b794187c19b6c7ada
|
refs/heads/main
| 2023-05-29T12:36:53.635989
| 2021-06-13T13:36:22
| 2021-06-13T13:36:22
| 376,544,097
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,358
|
py
|
#########
# Imports
#########
import json
import time
import tkinter as tk
from tkinter import *
################
# Open JSON File
# ##############
with open('stats.json', 'r') as string:
my_dict = json.load(string)
string.close()
####################
# Global Variables
####################
p0 = my_dict['points']
p1 = my_dict['player1']
p2 = my_dict['player2']
items = list(p0.items())
bottom = len(items) + 1
timeStr = time.strftime("%Y-%m-%d_%H%M")
lxVarList = []
activityList = []
# Player 1
p1JSON = list(p1.items())
p1WeekStats = p1JSON[2]
p1MonthStats = p1JSON[3]
p1TotalJSON = p1JSON[4]
p1statListWeekScores = p1WeekStats[1]
p1statListMonthScores = p1MonthStats[1]
p1Totals = p1TotalJSON[1]
p1TotalWeek = p1Totals['total week']
# Player 2
p2JSON = list(p2.items())
p2WeekStats = p2JSON[2]
p2MonthStats = p2JSON[3]
p2TotalJSON = p2JSON[4]
p2statListWeekScores = p2WeekStats[1]
p2statListMonthScores = p2MonthStats[1]
p2Totals = p2TotalJSON[1]
p2TotalWeek = p2Totals['total week']
def viewStats():
viewS = tk.Tk()
viewS.configure(bg="turquoise")
viewS.geometry("545x700")
viewS.title("Lvl^")
addPGreeting = tk.Label(text="Current Stats:", bg="mediumturquoise", fg="black", font="10", width=60, pady=20)
addPGreeting.grid(row=0, columnspan=6, sticky=EW)
####################
# Columns
####################
# Players
p1PlayerOne = tk.Label(viewS, text=p1['name'], width=6)
p1PlayerOne.grid(row=1, column=1, columnspan=2, sticky=EW)
p2PlayerTwo = tk.Label(viewS, text=p2['name'], width=6)
p2PlayerTwo.grid(row=1, column=3, columnspan=2, sticky=EW)
# Player1
p1Week = tk.Label(viewS, text="Week", width=6)
p1Week.grid(row=2, column=1, sticky=EW)
p1Month = tk.Label(viewS, text="Month", width=6)
p1Month.grid(row=2, column=2, sticky=EW)
# Player2
p2Week = tk.Label(viewS, text="Week", width=6)
p2Week.grid(row=2, column=3, sticky=EW)
p2Month = tk.Label(viewS, text="Month", width=6)
p2Month.grid(row=2, column=4, sticky=EW)
#############################
# Populates the Activity List
#############################
row = 3
for category in p1statListWeekScores:
labelEntry = tk.Label(viewS, text=category, width=14)
labelEntry.grid(row=row, column=0, sticky=EW, pady=3, padx=18)
row += 1
def changeWindow():
viewS.destroy()
from main import mainPage
mainPage()
################
# Player 1 Stats
################
# week
row = 3
for stat in p1statListWeekScores:
point = str(p1statListWeekScores[stat])
p1WeekPoints = tk.Label(viewS, text=point, width=6)
p1WeekPoints.grid(row=row, column=1, sticky=EW)
row += 1
# month
row = 3
for stat in p1statListMonthScores:
point = str(p1statListMonthScores[stat])
p1WeekPoints = tk.Label(viewS, text=point, width=6)
p1WeekPoints.grid(row=row, column=2, sticky=EW)
row += 1
################
# Player 2 Stats
################
# week
row = 3
for stat in p2statListWeekScores:
point = str(p2statListWeekScores[stat])
p2WeekPoints = tk.Label(viewS, text=point, width=6)
p2WeekPoints.grid(row=row, column=3, sticky=EW)
row += 1
# month
row = 3
for stat in p2statListMonthScores:
point = str(p2statListMonthScores[stat])
p2WeekPoints = tk.Label(viewS, text=point, width=6)
p2WeekPoints.grid(row=row, column=4, sticky=EW)
row += 1
############
# Clear Week
############
def cWeek():
row = 3
for stat in p1statListWeekScores:
p1statListWeekScores[stat] = 0
p1WeekPoints = tk.Label(viewS, text=p1statListWeekScores[stat], width=6)
p1WeekPoints.grid(row=row, column=1, sticky=EW)
row += 1
p1Totals['total week'] = 0
row = 3
for stat in p2statListWeekScores:
p2statListWeekScores[stat] = 0
p2WeekPoints = tk.Label(viewS, text=p2statListWeekScores[stat], width=6)
p2WeekPoints.grid(row=row, column=3, sticky=EW)
row += 1
p2Totals['total week'] = 0
a_file = open("stats.json", "w")
json.dump(my_dict, a_file)
a_file.close()
archive = open("stats" + timeStr + ".json", "w")
json.dump(my_dict, archive)
archive.close()
#############
# Clear Month
#############
def cMonth():
row = 3
for stat in p1statListMonthScores:
p1statListMonthScores[stat] = 0
p1MonthPoints = tk.Label(viewS, text=p1statListMonthScores[stat], width=6)
p1MonthPoints.grid(row=row, column=2, sticky=EW)
row += 1
p1Totals['total month'] = 0
row = 3
for stat in p2statListMonthScores:
p2statListMonthScores[stat] = 0
p2MonthPoints = tk.Label(viewS, text=p2statListMonthScores[stat], width=6)
p2MonthPoints.grid(row=row, column=4, sticky=EW)
row += 1
p2Totals['total month'] = 0
a_file = open("stats.json", "w")
json.dump(my_dict, a_file)
a_file.close()
archive = open("stats" + timeStr + ".json", "w")
json.dump(my_dict, archive)
archive.close()
##########################
# Go Back and Quit Buttons
##########################
em1 = tk.Label(viewS, text="", width=40, bg="turquoise")
em1.grid(row=bottom + 1, columnspan=5, sticky=EW, pady=3, padx=18)
clearWeek = Button(viewS, text="Clear Week", command=cWeek, width=12)
clearWeek.grid(row=bottom + 2, column=1, columnspan=2, sticky=EW, padx=6)
clearMonth = Button(viewS, text="Clear Month", command=cMonth, width=12)
clearMonth.grid(row=bottom + 2, column=3, columnspan=2, sticky=EW, padx=6)
em2 = tk.Label(viewS, text="", width=40, bg="turquoise")
em2.grid(row=bottom + 3, columnspan=5, sticky=EW, pady=3, padx=18)
goBack = Button(viewS, text="Go back", command=lambda *args: changeWindow(), width=30)
goBack.grid(row=bottom + 4, column=1, columnspan=4, sticky=EW, padx=10)
quitG = Button(viewS, text="Quit", command=viewS.destroy, width=30)
quitG.grid(row=bottom + 5, column=1, columnspan=4, sticky=EW, padx=10)
viewS.mainloop()
|
[
"noreply@github.com"
] |
noreply@github.com
|
f15414bf81d05d0860a3345a7f7b0679f6cbba74
|
db35cba13f89601467b4bb4553d2b648c18fc3fb
|
/stores/topshop.py
|
8e7c906ff53b33b1ba4e7916231b878688d31d1a
|
[] |
no_license
|
jitsejan/outfitter
|
e6d73fcc2ec46c5236207c8bb8e1e72fc8929c38
|
53faab3e30e312bbdbc4ca0154efe35592708a8b
|
refs/heads/master
| 2023-01-02T20:15:12.519289
| 2016-09-15T14:07:00
| 2016-09-15T14:07:00
| 309,139,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,113
|
py
|
################################################################################
# Application: Outfitter
# File: topshop.py
# Goal: topshop.py will retrieve specific information from a
# given Topshop link and save the data to a variable
# Input: url of website
# Output: Target data
# Example: info = crawl("http://eu.topman.com/en/tmeu/product/clothing-617800/mens-jumpers-cardigans-617811/charcoal-shawl-cardigan-4656120?bi=0&ps=20")
#
# History: 2015-09-28 - JJ Creation of the file
# 2015-10-29 - JJ Added functionality to retrieve all images
#
################################################################################
################################################################################
# Imports
################################################################################
from product import Product
################################################################################
# Definitions
################################################################################
author = "JJ"
appName = "Topshop Crawler"
################################################################################
# Functions
################################################################################
################################################################################
# Classes
################################################################################
class TopshopProduct(Product):
def __init__(self, *args, **kwargs):
super(TopshopProduct,self).__init__('Topshop', *args, **kwargs)
################################################################################
# Function: _get_image
# Input: tree
# Output: URL of the image
# Goal: Find the image URL and return it
# Targets: Use a css selecter to find the image URL
# Example: Find <meta property="og:image" content="http://media.topman.com/wcsstore/TopManEU/images/catalog/81F22KCHR_normal.jpg"/>
def _get_images(self):
images = []
try:
# Use css to select image
image_data = self._tree.cssselect('meta[property*=\'og:image\']')
# Save the image link from the content field to the variable image
images.append(image_data[0].attrib['content'])
except:
pass
# Return the link to the image
return images
################################################################################
# Function: _get_price
# Input: tree
# Output: Price in euro
# Goal: Find the price and return it
# Targets: Use a css selecter to find the price
# Example: Find <span itemprop="offers"><meta itemprop="price" content="E 39,95">
def _get_price(self):
price = ""
try:
# Use css to select the meta-tag with name equal to twitter:data1
price_meta = self._tree.cssselect('meta[property*=\'og:price:amount\']')
# Save the price from the content field to the variable price
price = price_meta[0].attrib['content']
except:
pass
# Return the price
return price
def _get_currency(self):
currency = ""
try:
currency_meta = self._tree.cssselect('meta[property*=\'og:price:currency\']')
currency = currency_meta[0].attrib['content']
except:
pass
# Return the currency
return currency
################################################################################
# Function: _get_color
# Input: tree
# Output: Color
# Goal: Find the color and return it
# Targets: Use a css selecter to find the color
# Example: Find <ul class="product_summary"><li class="product_colour">Colour: <span>GREY</span></li>
def _get_color(self):
color = ""
try:
# Use css to select to find the color
color_meta = self._tree.cssselect('ul[class*=\'product_summary\'] li[class*=\'product_colour\'] span')
# Save the price from the content field to the variable price
color = color_meta[0].text_content().strip()
except:
pass
# Return the color
return color
################################################################################
# Function: _get_title
# Input: tree
# Output: Title
# Goal: Find the title and return it
# Targets: Use a css selecter to find the title
# Example: Find <div class="title"><h1>ASOS Jumper<h1>
def _get_title(self):
title = ""
try:
# Use css to select the meta-tag with name equal to description
title_data = self._tree.cssselect('meta[property*=\'og:description\']')
# Retrieve the text from h1 and strip unwanted characters
title = title_data[0].attrib['content']
except:
pass
# Return the title
return title
################################################################################
# Function: _get_category
# Input: self
# Output: Title
# Goal: Find the title and return it
# Targets: Use a css selecter to find the title
# Example: Find last word of description. There is no category explicitly
def _get_category(self):
category = ""
try:
# Use css to select the meta-tag with name equal to description
category_data = self._tree.cssselect('meta[property*=\'og:description\']')
# Retrieve the text and take the last word
category = category_data[0].attrib['content'].rsplit(' ')[-1]
except:
pass
# Return the title
return category
def _get_brand(self):
brand = ""
return brand
################################################################################
# main
################################################################################
def get_product(url):
product = TopshopProduct(url)
return product
def get_price(url):
product = TopshopProduct(url)
return product.get_price()
|
[
"jitsejan@gmail.com"
] |
jitsejan@gmail.com
|
e33487b216736e6059584d1fa1c040ace6df1cc7
|
5bfbc89974b7cb29e476c5c8e18d6e363019aacf
|
/Example/sentiment_lstm_regression.py
|
83642cf1479c949bec11858edf1decc1c74e671f
|
[
"MIT"
] |
permissive
|
lagleki/BayesianRNN
|
1cec3f39bb4bc41a965d0443f7b01e1fd4186b9a
|
0426c503438aa9106c676e8f68e85aa9f16bd05b
|
refs/heads/master
| 2021-09-06T07:27:11.933093
| 2018-02-03T19:56:02
| 2018-02-03T19:56:02
| 120,124,468
| 0
| 0
| null | 2018-02-03T19:55:26
| 2018-02-03T19:55:26
| null |
UTF-8
|
Python
| false
| false
| 5,025
|
py
|
# Train a Bayesian LSTM on the IMDB sentiment classification task.
# To use the GPU:
# THEANO_FLAGS=mode=FAST_RUN,device=gpu,floatX=float32 python imdb_lstm_regression.py
# To speed up Theano, create a ram disk:
# mount -t tmpfs -o size=512m tmpfs /mnt/ramdisk
# Then add flag THEANO_FLAGS='base_compiledir=/mnt/ramdisk'
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import sys
import theano
from callbacks import ModelTest
from dataset import loader
from keras.optimizers import SGD, RMSprop, Adagrad
from keras.models import Sequential
from keras.layers.core import Dense, Dropout
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM, GRU, SimpleRNN
from keras.regularizers import l2
# Process inpus:
if len(sys.argv) == 1:
print("Expected args: p_W, p_U, p_dense, p_emb, weight_decay, batch_size, maxlen")
print("Using default args:")
# sys.argv = ["", "0.", "0.", "0.", "0.", "1e-4", "128", "200"]
sys.argv = ["", "0.25", "0.25", "0.25", "0.25", "1e-4", "128", "200"]
args = [float(a) for a in sys.argv[1:]]
print(args)
p_W, p_U, p_dense, p_emb, weight_decay, batch_size, maxlen = args
batch_size = int(batch_size)
maxlen = int(maxlen)
folder = "/scratch/home/Projects/rnn_dropout/exps/"
filename = ("sa_DropoutLSTM_pW_%.2f_pU_%.2f_pDense_%.2f_pEmb_%.2f_reg_%f_batch_size_%d_cutoff_%d_epochs"
% (p_W, p_U, p_dense, p_emb, weight_decay, batch_size, maxlen))
print(filename)
# Global params:
nb_words = 20000
skip_top = 0
test_split = 0.2
init_seed = 0
global_seed = 0
# Load data:
print("Loading data...")
dataset = loader(init_seed, maxlen, nb_words, skip_top, test_split)
X_train, X_test, Y_train, Y_test = dataset.X_train, dataset.X_test, dataset.Y_train, dataset.Y_test
mean_y_train, std_y_train = dataset.mean_y_train, dataset.std_y_train
# Set seed:
np.random.seed(global_seed)
# Build model:
print('Build model...')
model = Sequential()
model.add(Embedding(nb_words + dataset.index_from, 128, W_regularizer=l2(weight_decay),
dropout=p_emb, input_length=maxlen, batch_input_shape=(batch_size, maxlen)))
model.add(LSTM(128, W_regularizer=l2(weight_decay), U_regularizer=l2(weight_decay),
b_regularizer=l2(weight_decay), dropout_W=p_W, dropout_U=p_U))
model.add(Dropout(p_dense))
model.add(Dense(1, W_regularizer=l2(weight_decay), b_regularizer=l2(weight_decay)))
#optimiser = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=False)
optimiser = 'adam'
model.compile(loss='mean_squared_error', optimizer=optimiser)
# Potentially load weights
# model.load_weights("path")
# Train model
print("Train...")
# Theano
modeltest_1 = ModelTest(X_train[:100],
mean_y_train + std_y_train * np.atleast_2d(Y_train[:100]).T,
test_every_X_epochs=1, verbose=0, loss='euclidean',
mean_y_train=mean_y_train, std_y_train=std_y_train)
modeltest_2 = ModelTest(X_test, np.atleast_2d(Y_test).T, test_every_X_epochs=1,
verbose=0, loss='euclidean',
mean_y_train=mean_y_train, std_y_train=std_y_train)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=250,
callbacks=[modeltest_1, modeltest_2])
# # Tensorflow
# modeltest_1 = ModelTest(X_train[:batch_size],
# mean_y_train + std_y_train * np.atleast_2d(Y_train[:batch_size]).T,
# test_every_X_epochs=1, verbose=0, loss='euclidean',
# mean_y_train=mean_y_train, std_y_train=std_y_train, batch_size=batch_size)
# tensorflow_test_size = batch_size * (len(X_test) / batch_size)
# modeltest_2 = ModelTest(X_test[:tensorflow_test_size], np.atleast_2d(Y_test[:tensorflow_test_size]).T,
# test_every_X_epochs=1, verbose=0, loss='euclidean',
# mean_y_train=mean_y_train, std_y_train=std_y_train, batch_size=batch_size)
# tensorflow_train_size = batch_size * (len(X_train) / batch_size)
# model.fit(X_train[:tensorflow_train_size], Y_train[:tensorflow_train_size],
# batch_size=batch_size, nb_epoch=250, callbacks=[modeltest_1, modeltest_2])
# Potentially save weights
# model.save_weights("path", overwrite=True)
# Evaluate model
# Dropout approximation for training data:
standard_prob = model.predict(X_train, batch_size=500, verbose=1)
print(np.mean(((mean_y_train + std_y_train * np.atleast_2d(Y_train).T)
- (mean_y_train + std_y_train * standard_prob))**2, 0)**0.5)
# Dropout approximation for test data:
standard_prob = model.predict(X_test, batch_size=500, verbose=1)
print(np.mean((np.atleast_2d(Y_test).T - (mean_y_train + std_y_train * standard_prob))**2, 0)**0.5)
# MC dropout for test data:
T = 50
prob = np.array([modeltest_2.predict_stochastic(X_test, batch_size=500, verbose=0)
for _ in xrange(T)])
prob_mean = np.mean(prob, 0)
print(np.mean((np.atleast_2d(Y_test).T - (mean_y_train + std_y_train * prob_mean))**2, 0)**0.5)
|
[
"yaringal@gmail.com"
] |
yaringal@gmail.com
|
09f920dba59ee54359a4cf44bf01235cf2d05171
|
122779e414685ac95076f440f2809d314800e510
|
/src/tests/integration/test_cli.py
|
2f9573fd5a48bf3689f6afadd0a0a6530705bcbd
|
[
"BSD-2-Clause"
] |
permissive
|
alexseitsinger/package-controller
|
3fb7a09d2b965630fe11203f6f5c47e664826c7d
|
0ee896986cfa17a96bf9fb6afff35dd97f0b1211
|
refs/heads/master
| 2022-12-11T18:00:17.922442
| 2019-08-21T02:24:35
| 2019-08-21T02:24:35
| 185,510,280
| 2
| 0
|
BSD-2-Clause
| 2022-12-08T05:50:25
| 2019-05-08T02:10:23
|
Python
|
UTF-8
|
Python
| false
| false
| 238
|
py
|
import pytest
from click.testing import CliRunner
from package_controller.cli import main
def test_add():
pass
def test_build():
pass
def test_commit():
pass
def test_release():
pass
def test_version():
pass
|
[
"16756928+alexseitsinger@users.noreply.github.com"
] |
16756928+alexseitsinger@users.noreply.github.com
|
268f77595526ce94d83bcd97375dc506662f676b
|
309da681f1ce8d119f2e44580ba196094d03bd92
|
/project.py
|
1dbaa8cec2329e4e1555049d01b2d79a0b6f0710
|
[] |
no_license
|
aditya6116/catalog
|
bd9da4c8f8ec2c95728b66a8914d04e759c7ddb0
|
e8247118cde31d92327a8df82f766bb0f218999f
|
refs/heads/master
| 2021-01-21T21:32:36.111228
| 2017-06-20T07:01:09
| 2017-06-20T07:01:09
| 94,858,288
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,369
|
py
|
from flask import Flask, render_template
from flask import request, redirect, jsonify, url_for, flash
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem, User
from flask import session as login_session
import random
import string
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import httplib2
import json
from flask import make_response
import requests
app = Flask(__name__)
# Connect to Database and create database session
engine = create_engine('sqlite:///restaurantmenuwithusers.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
CLIENT_ID = json.loads(
open('client_secrets.json', 'r').read())['web']['client_id']
APPLICATION_NAME = "Restaurant Menu Application"
def createUser(login_session):
newUser = User(name=login_session['username'], email=login_session[
'email'], picture=login_session['picture'])
session.add(newUser)
session.commit()
user = session.query(User).filter_by(email=login_session['email']).one()
return user.id
def getUserInfo(user_id):
user = session.query(User).filter_by(id=user_id).one()
return user
def getUserID(email):
try:
user = session.query(User).filter_by(email=email).one()
return user.id
except BaseException:
return None
@app.route('/login')
def showLogin():
state = ''.join(random.choice(string.ascii_uppercase + string.digits)
for x in xrange(32))
login_session['state'] = state
return render_template("login.html", STATE=state)
# gconnect
@app.route('/gconnect', methods=['POST'])
def gconnect():
# Validate state token
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Obtain authorization code
code = request.data
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Check that the access token is valid.
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
h = httplib2.Http()
result = json.loads(h.request(url, 'GET')[1])
# If there was an error in the access token info, abort.
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is used for the intended user.
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(
json.dumps("Token's user ID doesn't match given user ID."), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is valid for this app.
if result['issued_to'] != CLIENT_ID:
response = make_response(
json.dumps("Token's client ID does not match app's."), 401)
print "Token's client ID does not match app's."
response.headers['Content-Type'] = 'application/json'
return response
stored_access_token = login_session.get('access_token')
stored_gplus_id = login_session.get('gplus_id')
if stored_access_token is not None and gplus_id == stored_gplus_id:
response = make_response(
json.dumps('Current user is already connected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
# Store the access token in the session for later use.
login_session['access_token'] = credentials.access_token
login_session['gplus_id'] = gplus_id
# Get user info
userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
params = {'access_token': credentials.access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = answer.json()
login_session['username'] = data['name']
login_session['picture'] = data['picture']
login_session['email'] = data['email']
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
print login_session['username']
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += """ " style = "width: 300px; height: 300px;border-radius: 150px;
-webkit-border-radius: 150px;-moz-border-radius: 150px;"> """
flash("you are now logged in as %s" % login_session['username'])
print "done!"
return output
# DISCONNECT - Revoke a current user's token and reset their login_session
@app.route('/gdisconnect')
def gdisconnect():
access_token = login_session['access_token']
print 'In gdisconnect access token is %s', access_token
print 'User name is: '
print login_session['username']
if access_token is None:
print 'Access Token is None'
response = make_response(
json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
url = """https://accounts.google.com/o/oauth2/revoke?
token=%s""" %login_session['access_token']
h = httplib2.Http()
result = h.request(url, 'GET')[0]
print 'result is '
print result
if result['status'] == '404':
del login_session['access_token']
del login_session['gplus_id']
del login_session['username']
del login_session['email']
del login_session['picture']
response = make_response(json.dumps('Successfully disconnected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
else:
response = make_response(
json.dumps(
'Failed to revoke token for given user.',
400))
response.headers['Content-Type'] = 'application/json'
return response
# JSON APIs to view Restaurant Information
@app.route('/restaurant/<int:restaurant_id>/menu/JSON')
def restaurantMenuJSON(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
items = session.query(MenuItem).filter_by(
restaurant_id=restaurant_id).all()
return jsonify(MenuItems=[i.serialize for i in items])
@app.route('/restaurant/<int:restaurant_id>/menu/<int:menu_id>/JSON')
def menuItemJSON(restaurant_id, menu_id):
Menu_Item = session.query(MenuItem).filter_by(id=menu_id).one()
return jsonify(Menu_Item=Menu_Item.serialize)
@app.route('/restaurant/JSON')
def restaurantsJSON():
restaurants = session.query(Restaurant).all()
return jsonify(restaurants=[r.serialize for r in restaurants])
# Show all restaurants
@app.route('/')
@app.route('/restaurant/')
def showRestaurants():
restaurants = session.query(Restaurant).order_by(asc(Restaurant.name))
if 'username' not in login_session:
return render_template(
'publicrestaurant.html',
restaurants=restaurants)
else:
return render_template('restaurants.html', restaurants=restaurants)
# Create a new restaurant
@app.route('/restaurant/new/', methods=['GET', 'POST'])
def newRestaurant():
if 'username' not in login_session:
return redirect("/login")
if request.method == 'POST':
newRestaurant = Restaurant(
name=request.form['name'],
user_id=login_session['user_id'])
session.add(newRestaurant)
flash('New Restaurant %s Successfully Created' % newRestaurant.name)
session.commit()
return redirect(url_for('showRestaurants'))
else:
return render_template('newRestaurant.html')
# Edit a restaurant
@app.route('/restaurant/<int:restaurant_id>/edit/', methods=['GET', 'POST'])
def editRestaurant(restaurant_id):
if 'username' not in login_session:
return redirect("/login")
editedRestaurant = session.query(
Restaurant).filter_by(id=restaurant_id).one()
if login_session['user_id'] != editedRestaurant.user_id:
flash('edit your restaurant')
return redirect(url_for('showRestaurants'))
else:
if request.method == 'POST':
if request.form['name']:
editedRestaurant.name = request.form['name']
flash(
'Restaurant Successfully Edited %s' %
editedRestaurant.name)
return redirect(url_for('showRestaurants'))
else:
return render_template(
'editRestaurant.html',
restaurant=editedRestaurant)
# Delete a restaurant
@app.route('/restaurant/<int:restaurant_id>/delete/', methods=['GET', 'POST'])
def deleteRestaurant(restaurant_id):
if 'username' not in login_session:
return redirect("/login")
else:
restaurantToDelete = session.query(
Restaurant).filter_by(id=restaurant_id).one()
if login_session['user_id'] != restaurantToDelete.user_id:
flash('You can Delete only your restaurant')
return redirect(url_for('showRestaurants'))
if request.method == 'POST':
session.delete(restaurantToDelete)
flash('%s Successfully Deleted' % restaurantToDelete.name)
session.commit()
return redirect(
url_for(
'showRestaurants',
restaurant_id=restaurant_id))
else:
return render_template(
'deleteRestaurant.html',
restaurant=restaurantToDelete)
# Show a restaurant menu
@app.route('/restaurant/<int:restaurant_id>/')
@app.route('/restaurant/<int:restaurant_id>/menu/')
def showMenu(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
items = session.query(MenuItem).filter_by(
restaurant_id=restaurant_id).all()
creator = getUserInfo(restaurant.user_id)
if "username" not in \
login_session or login_session['user_id'] != creator.id:
return render_template(
'publicmenu.html',
items=items,
restaurant=restaurant,
creator=creator)
else:
return render_template(
'menu.html',
items=items,
restaurant=restaurant,
creator=creator)
# Create a new menu item
@app.route(
'/restaurant/<int:restaurant_id>/menu/new/',
methods=[
'GET',
'POST'])
def newMenuItem(restaurant_id):
if 'username' not in login_session:
return redirect("/login")
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
if request.method == 'POST':
newItem = MenuItem(
name=request.form['name'],
description=request.form['description'],
price=request.form['price'],
course=request.form['course'],
restaurant_id=restaurant_id,
user_id=restaurant.user_id)
session.add(newItem)
session.commit()
flash('New Menu %s Item Successfully Created' % (newItem.name))
return redirect(url_for('showMenu', restaurant_id=restaurant_id))
else:
return render_template('newmenuitem.html', restaurant_id=restaurant_id)
# Edit a menu item
@app.route(
'/restaurant/<int:restaurant_id>/menu/<int:menu_id>/edit',
methods=[
'GET',
'POST'])
def editMenuItem(restaurant_id, menu_id):
if 'username' not in login_session:
return redirect("/login")
editedItem = session.query(MenuItem).filter_by(id=menu_id).one()
restaurant = session.query(
Restaurant).filter_by(id=restaurant_id).one()
if login_session['user_id'] != restaurant.user_id:
flash('edit your restaurant Menu')
return redirect(url_for('showRestaurants'))
if request.method == 'POST':
if request.form['name']:
editedItem.name = request.form['name']
if request.form['description']:
editedItem.description = request.form['description']
if request.form['price']:
editedItem.price = request.form['price']
if request.form['course']:
editedItem.course = request.form['course']
session.add(editedItem)
session.commit()
flash('Menu Item Successfully Edited')
return redirect(url_for('showMenu', restaurant_id=restaurant_id))
else:
return render_template(
'editmenuitem.html',
restaurant_id=restaurant_id,
menu_id=menu_id,
item=editedItem)
# Delete a menu item
@app.route(
'/restaurant/<int:restaurant_id>/menu/<int:menu_id>/delete',
methods=[
'GET',
'POST'])
def deleteMenuItem(restaurant_id, menu_id):
if 'username' not in login_session:
return redirect("/login")
else:
restaurant = session.query(
Restaurant).filter_by(id=restaurant_id).one()
itemToDelete = session.query(MenuItem).filter_by(id=menu_id).one()
if login_session['user_id'] != restaurant.user_id:
flash('edit your restaurant')
return redirect(url_for('showRestaurants'))
if request.method == 'POST':
session.delete(itemToDelete)
session.commit()
flash('Menu Item Successfully Deleted')
return redirect(url_for('showMenu', restaurant_id=restaurant_id))
else:
return render_template('deleteMenuItem.html', item=itemToDelete)
if __name__ == '__main__':
app.secret_key = 'super_secret_key'
app.debug = True
app.run(host='0.0.0.0', port=5000)
|
[
"gurusharan2@gmail.com"
] |
gurusharan2@gmail.com
|
1e5237044f42dfe836143cd43c09e4bc38b7033a
|
c12d0e0935aadc52c6ae03d901fcb52e2b8fe4d7
|
/task1/solution.py
|
f3614fd0a3446ed63f3aa00630f3e45ebe2df4f7
|
[] |
no_license
|
skeleta/python-retrospective
|
c19ec29867ea34ff96cd86c7c69fc0e1108d7f1c
|
e808e007f375fccbbb0ae7082bab0d39b93189fe
|
refs/heads/master
| 2021-01-18T03:14:27.664850
| 2013-04-28T20:18:35
| 2013-04-28T20:18:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 402
|
py
|
SIGNS = ("Козирог", "Водолей", "Риби", "Овен", "Телец", "Близнаци",
"Рак", "Лъв", "Дева", "Везни", "Скорпион", "Стрелец")
RANGES = {1: 19, 2: 18, 3: 20, 4: 20, 5: 20, 6: 20,
7: 21, 8: 22, 9: 22, 10: 22, 11: 21, 12: 21}
def what_is_my_sign(day, month):
return SIGNS[month - 12 - (day <= RANGES[month])]
|
[
"ivankapukaranov@gmail.com"
] |
ivankapukaranov@gmail.com
|
2be7b229988ee87da8b2f46796797fd123be5e00
|
20722df255492f591b1a988c7499e1eab6c71a90
|
/Ch1-Image_Recognition/stacked_autoencoders_for_mnist_classification.py
|
b7276e8c60f5905ae6df8947b5ada8ac112426d5
|
[] |
no_license
|
solaris33/TensorFlow_Examples
|
551f721aa4c97b4735496d5a1aecf742de081fa9
|
122116b268badf27b605d7a3857215474ab99e6a
|
refs/heads/master
| 2020-12-24T19:13:20.622985
| 2018-11-01T12:44:41
| 2018-11-01T12:44:41
| 58,610,121
| 4
| 1
| null | 2016-05-12T05:27:23
| 2016-05-12T05:20:56
| null |
UTF-8
|
Python
| false
| false
| 6,234
|
py
|
# -*- coding: utf-8 -*-
# MNIST 숫자 분류를 위한 Stacked AutoEncoder 예제
# 절대 임포트 설정
from __future__ import division, print_function, absolute_import
# 필요한 라이브러리들을 임포트
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
# MNIST 데이터를 다운로드 한다.
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
# 파라미터 설정
learning_rate_RMSProp = 0.01
learning_rate_Gradient_Descent = 0.5
training_epochs = 400 # epoch 횟수 (iteration)
batch_size = 256
display_step = 1 # 몇 Step마다 log를 출력할지 결정한다.
examples_to_show = 10 # reconstruct된 이미지 중 몇개를 보여줄지를 결정한다.
n_hidden_1 = 200 # 첫번째 히든레이어의 노드 개수
n_hidden_2 = 200 # 두번째 히든레이어의 노드 개수
n_input = 784 # MNIST 데이터 input (이미지 크기: 28*28)
# Stacked Autoencoder를 위한 파라미터들
# Input 데이터 설정
X = tf.placeholder("float", [None, n_input])
# 히든 레이어 1을 위한 Weights와 Biases
Wh_1 = tf.Variable(tf.random_normal([n_input, n_hidden_1]))
bh_1 = tf.Variable(tf.random_normal([n_hidden_1]))
h_1 = tf.nn.sigmoid(tf.matmul(X, Wh_1) +bh_1) # 히든레이어 1의 activation (sigmoid 함수를 사용)
# 히든 레이어 2을 위한 Weights와 Biases
Wh_2 = tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2]))
bh_2 = tf.Variable(tf.random_normal([n_hidden_2]))
h_2 = tf.nn.sigmoid(tf.matmul(h_1, Wh_2) +bh_2) # 히든레이어 2의 activation (sigmoid 함수를 사용)
# Output 레이어를 위한 Weights와 Biases
Wo = tf.Variable(tf.random_normal([n_hidden_2, n_input]))
bo = tf.Variable(tf.random_normal([n_input]))
y_pred = tf.nn.sigmoid(tf.matmul(h_2,Wo) + bo) # Output 레이어의 activation (sigmoid 함수를 사용)
# Output 값(True Output)을 설정(=Input 값)
y_true = X
# Softmax Classifier를 위한 파라미터들
W = tf.Variable(tf.zeros([n_hidden_2, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(h_2, W) + b) # 예측된 Output : 두번째 히든레이어의 activation output을 input으로 사용한다.
y_ = tf.placeholder(tf.float32, [None, 10]) # True Output
# Optimization을 위한 파라미터들
# Autoencoder Optimization을 위한 파라미터들
cost = tf.reduce_mean(tf.pow(y_true - y_pred, 2)) # squared error loss 함수
optimizer = tf.train.RMSPropOptimizer(learning_rate_RMSProp).minimize(cost)
# Softmax Classifier Optimization을 위한 파라미터들
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1])) # cross-entropy loss 함수
train_step = tf.train.GradientDescentOptimizer(learning_rate_Gradient_Descent).minimize(cross_entropy)
# Fine Tuning Optimization을 위한 파라미터들
finetuning_cost = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1])) + tf.reduce_mean(tf.pow(y_true - y_pred, 2)) # cross-entropy loss 함수 + squared error loss 함수
finetuning_train_step = tf.train.GradientDescentOptimizer(learning_rate_Gradient_Descent).minimize(finetuning_cost)
# Step 1: Stacked Autoencoder pre-training
# 변수들을 초기화한다.
init = tf.initialize_all_variables()
# graph를 실행한다.
sess = tf.Session()
sess.run(init)
total_batch = int(mnist.train.num_examples/batch_size)
# Training을 시작한다.
for epoch in range(training_epochs):
# 모든 배치들을 돌아가면서(Loop) 학습한다.
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
# batch 데이터를 이용해서 트레이닝을 진행한다.
_, cost_value = sess.run([optimizer, cost], feed_dict={X: batch_xs})
# 일정 epoch step마다 로그를 출력한다.
if epoch % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(cost_value))
print("Stacked Autoencoder pre-training Optimization Finished!")
# Step 2: test 데이터셋을 autoencoder로 reconstruction 해본다.
reconstructed_image = sess.run(y_pred, feed_dict={X: mnist.test.images[:examples_to_show]})
# 원본 이미지와 재구축(reconstructed)된 이미지를 비교한다.
f, a = plt.subplots(2, 10, figsize=(10, 2))
for i in range(examples_to_show):
a[0][i].imshow(np.reshape(mnist.test.images[i], (28, 28)))
a[1][i].imshow(np.reshape(reconstructed_image[i], (28, 28)))
f.show()
plt.draw()
#plt.waitforbuttonpress() # 버튼을 누를때까지 작업 정지
f.savefig('reconstructed_mnist_image.png') # reconstruction 결과를 png로 저장한다.
# Step 3: Softmax Classifier를 학습한다.
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={X: batch_xs, y_: batch_ys})
print("Softmax Classifier Optimization Finished!")
# Step 4: 학습된 모델이 얼마나 정확한지를 출력한다. (Before fine-tuning)
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print("Accuracy(before fine-tuning): ") # Accuracy ~ 0.9282
print(sess.run(accuracy, feed_dict={X: mnist.test.images, y_: mnist.test.labels}))
# Step 5: Fine-tuning softmax model
# Training을 시작한다.
for epoch in range(training_epochs):
# 모든 배치들을 돌아가면서(Loop) 학습한다.
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
# batch 데이터를 이용해서 트레이닝을 진행한다.
_, cost_value = sess.run([finetuning_train_step, finetuning_cost], feed_dict={X: batch_xs, y_: batch_ys})
# 일정 epoch step마다 로그를 출력한다.
if epoch % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(cost_value))
print("Fine-tuning softmax model Optimization Finished!")
# Step 6: 학습된 모델이 얼마나 정확한지를 출력한다. (After fine-tuning)
print("Accuracy(after fine-tuning): ") # Accuracy ~ 0.9714
print(sess.run(accuracy, feed_dict={X: mnist.test.images, y_: mnist.test.labels}))
|
[
"jinhoyang@snu.ac.kr"
] |
jinhoyang@snu.ac.kr
|
595635a1e4beaf535e5f7640bd2b7ca17a517b2c
|
b205909a015abf08dc15133797c465fd58a48352
|
/sdes.py
|
4d2868d7e9e2d273c541caee44e96f0f1bba4ee5
|
[] |
no_license
|
AlexanderFrancoletti/CryptographyHW1
|
2648f5b52053504ac6cbceb7964a596d91c1a4bb
|
2d0b728046b91d099c3f6af9996c805db0df566e
|
refs/heads/master
| 2020-03-29T05:20:02.226714
| 2018-09-20T17:02:00
| 2018-09-20T17:02:00
| 149,577,970
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,547
|
py
|
#Define keys initially
IP = [2, 6, 3, 1, 4, 8, 5, 7]
EP = [4, 1, 2, 3, 2, 3, 4, 1]
IP_inverse = [4, 1, 3, 5, 7, 2, 8, 6]
P10 = [3, 5, 2, 7, 4, 10, 1, 9, 8, 6]
P8 = [6, 3, 7, 4, 8, 5, 10, 9]
P4 = [2, 4, 3, 1]
sbox0 = [[1, 0, 3, 2],
[3, 2, 1, 0],
[0, 2, 1, 3],
[3, 1, 3, 2]]
sbox1 = [[0, 1, 2, 3],
[2, 0, 1, 3],
[3, 0, 1, 0],
[2, 1, 0, 3]]
default_key = '1010000010'
#Function which returns a string of bits randomized based on the key being used
def permutate(original, key):
return ''.join(original[i-1] for i in key)
#bitList = []
#for char in bitString:
# if char == '1':
# bitList.append(1)
# else:
# bitList.append(0)
#return bitList
#Returns the left half of bits in any key
def keyLeftHalf(bits):
return bits[:(len(bits)/2)]
#Returns the right half of bits in any key
def keyRightHalf(bits):
return bits[(len(bits)/2):]
#Split the bits passed in into 2 different halves, and then perform a leftwise shift on each half
#and returns the combined result
def shift(bits):
leftHalfShift = keyLeftHalf(bits)[1:] + keyLeftHalf(bits)[0]
rightHalfShift = keyRightHalf(bits)[1:] + keyRightHalf(bits)[0]
return leftHalfShift+rightHalfShift
#This function generates the key from the first round of shifts
def generateKey1():
return permutate(shift(permutate(default_key, P10)), P8)
#This function generates the key from the second round of shifts
def generateKey2():
return permutate(shift(shift(shift(permutate(default_key, P10)))), P8)
#XOR function, this returns the string of bits created by applying the XOR operation between 2 bits
def xor(bits, key):
newBitString = ''
for bit, key_bit in zip(map(int, bits), map(int, key)):
if (bit != key_bit):
newBitString = newBitString + '1'
else:
newBitString = newBitString + '0'
return newBitString
#This performs the non-linear sbox operations, which will transform the bits used as input
#based on the sbox tables defined above
def sboxLookup(bits, sbox):
row = int(bits[0] + bits[3], 2)
col = int(bits[1] + bits[2], 2)
return '{0:02b}'.format(sbox[row][col])
#This is the function which will actually perform the DES algorithm by using the
#helper functions above
def fk(bits, key):
#Split the bits into left and right halves
left = keyLeftHalf(bits)
right = keyRightHalf(bits)
#Permutate the right half
bits = permutate(right, EP)
bits = xor(bits, key)
#Transform the bits using the sbox table
bits = sboxLookup(keyLeftHalf(bits), sbox0) + sboxLookup(keyRightHalf(bits), sbox1)
#Permutate the resulting bits using the 4 bit key
bits = permutate(bits, P4)
#Perform xor operation on the
return xor(bits, left)
#Encryption call takes 8 bit inputs and calls the DES functions in order
def encrypt(plainText):
#bits = stringToBits(plainText)
#Cipher the bits according to the IP key
bits = permutate(plainText, IP)
#Store the first key generated by the DES algorithm
temp = fk(bits, generateKey1())
#Generate the second key for the DES algorithm
bits = keyRightHalf(bits) + temp
bits = fk(bits, generateKey2())
#Return the resulting encrypted string of bytes
return permutate(bits + temp, IP_inverse)
#Decryption call has identical operations as encrypt, with the exception of
#calling the Key2 function first
def decrypt(cipherText):
bits = permutate(cipherText, IP)
temp = fk(bits, generateKey2())
bits = keyRightHalf(bits) + temp
bits = fk(bits, generateKey1())
return permutate(bits + temp, IP_inverse)
|
[
"alexander.francoletti@gmail.com"
] |
alexander.francoletti@gmail.com
|
45deb898113df9ae83c65ce0424014c7fb729b1e
|
5ee088a8573a305ea30bfc6d083c4ec6a1112d1f
|
/doule_exit/InitialPedestrian.py
|
d22aa771df9f3d3e6f99b9247685e1904db665b2
|
[] |
no_license
|
Poloma07/CA_Multi_Exits
|
f176d3c68625e6ab571b08c20229416960931659
|
c1736861c81a4328d9344522cc530bda7a8f67a3
|
refs/heads/master
| 2020-04-18T16:57:45.466170
| 2019-01-13T12:24:39
| 2019-01-13T12:24:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,499
|
py
|
import random
import numpy as np
import Data, Block
import matplotlib.pyplot as plt
def creatPeople():
'''
产生随机行人
:return:行人列表
'''
allBlock = [] # 用于存放格子
allPeople = [] # 用于存放行人
# 将所有格子全部存入列表'
for i in range(1, Data.ROOM_M):
for j in range(1, Data.ROOM_N):
b = Block.Block()
b.x = i
b.y = j
if random.random() > 0.5: # 顺时针和逆时针行人各占一半
b.clock_wise = True
else:
b.clock_wise = False
#----------初始化行人收益-------------
b.income_inertia = np.zeros(9)
b.income_wall = np.zeros(9)
b.income_exit = np.zeros(9)
b.income_memory = np.zeros(9)
b.income_all = np.zeros(9)
allBlock.append(b) # 添加行人
random.shuffle(allBlock) # 随机排序
allPeople = allBlock[:Data.PEOPLE_NUMBER] # 取前N个 可有效防止无限产生随机数
return allPeople
def creatAppointPeo():
'''
产生指定行人
:return: 行人列表
'''
allPeople = []
b3 = Block.Block()
b3.x = 3
b3.y = 10
b3.type = False
b3.clock_wise = False
b3.income_inertia = np.zeros(9)
b3.income_wall = np.zeros(9)
b3.income_exit = np.zeros(9)
b3.income_memory = np.zeros(9)
b3.income_all = np.zeros(9)
allPeople.append(b3)
return allPeople
|
[
"1279010287@qq.com"
] |
1279010287@qq.com
|
e714abb10599586f922ae28ac3e229eafdfbbc3b
|
711ca7d1996f3eab0c9f8338fd49c9067274d5e5
|
/image_augment-master/test_detect_multi_process.py
|
34ce758c0c9c30786eaa30c6bbdf0b7112d8c2c3
|
[] |
no_license
|
zj463261929/tools
|
408ccdbe93ae00d4825b74d6e37cc2bd77208504
|
47545fbbb6779378ad833a7b84c89cc91848e345
|
refs/heads/master
| 2020-03-20T13:01:10.970807
| 2019-07-22T02:36:12
| 2019-07-22T02:36:12
| 137,446,532
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,139
|
py
|
#coding=utf-8
import codecs
import random
import cv2, os
import sys #sys是Python内建标准库
sys.path.append(os.getcwd())
from function import *
import function as FUN
import math
import numpy as np
import time
from test_detect_single import img_aug
from multiprocessing import Process
image_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/img/" #原始图片路径
xml_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/xml/" #图片对应xml的路径
image_txt1 = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/minitruck.txt"#原始图片 标签对应的txt
image_txt2 = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/forklift.txt"#原始图片 标签对应的txt
image_txt3 = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/digger.txt"#原始图片 标签对应的txt
'''image_txt4 = "/opt/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/ImageSets_8558/Main/train_4.txt"#原始图片 标签对应的txt
image_txt5 = "/opt/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/ImageSets_8558/Main/train_5.txt"#原始图片 标签对应的txt
image_txt6 = "/opt/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/ImageSets_8558/Main/train_6.txt"#原始图片 标签对应的txt
'''
save_img_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/img_aug/"
save_xml_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/xml_aug/" #处理后保存的路径,每个增强方法会保存在对应文件夹下
if not os.path.exists(save_xml_dir):
os.mkdir(save_xml_dir)
if not os.path.exists(save_img_dir):
os.mkdir(save_img_dir)
if __name__ == "__main__":
print time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
print 'Parent process %s starts.' % os.getpid()
#创建线程
my_process1 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt1,save_img_dir,save_xml_dir) , name= 'ys_process1')
my_process2 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt2,save_img_dir,save_xml_dir) , name= 'ys_process2')
my_process3 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt3,save_img_dir,save_xml_dir) , name= 'ys_process3')
'''
my_process4 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt4,save_img_dir,save_xml_dir) , name= 'ys_process4')
my_process5 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt5,save_img_dir,save_xml_dir) , name= 'ys_process5')
my_process6 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt6,save_img_dir,save_xml_dir) , name= 'ys_process6')
'''
#等待2s
time.sleep(2)
#启动线程
my_process1.start()
my_process2.start()
my_process3.start()
'''my_process4.start()
my_process5.start()
my_process6.start()'''
#等待线程结束
my_process1.join()
my_process2.join()
my_process3.join()
'''my_process4.join()
my_process5.join()
my_process6.join()'''
print time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
print 'process %s ends.' % os.getpid()
|
[
"zhagnjing1@mail.e-u.cn"
] |
zhagnjing1@mail.e-u.cn
|
7bed90a14fc2ce416d14e56c5bf265e8b646487f
|
7d3b096f803d1a47ad71a5c8aab30ba3aa67828c
|
/chibi_file/__init__.py
|
fe22184683cfdc5c75ca908282fad7a086a9d2bc
|
[] |
no_license
|
dem4ply/chibi_file
|
462244dac712d88915f2b931c5f0822f6d1fa937
|
d27cef794512014b1602486edd0235052b38087a
|
refs/heads/master
| 2020-12-03T05:09:15.825690
| 2017-08-23T09:36:57
| 2017-08-23T09:36:57
| 95,737,905
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,835
|
py
|
import mmap
import os
import shutil
def current_dir():
return os.getcwd()
def inflate_dir( src ):
if '~' in src:
return os.path.expanduser( src )
else:
return os.path.abspath( src )
def is_dir( src ):
return os.path.isdir( src )
def is_file( src ):
return os.path.isfile( src )
def ls( src=None ):
if src is None:
src = current_dir()
return ( name for name in os.listdir( src ) )
def ls_only_dir( src=None ):
return ( name for name in ls( src ) if is_dir( name ) )
def join( *patch ):
return os.path.join( *patch )
def exists( file_name ):
return os.path.exists( file_name )
def copy( source, dest ):
shutil.copy( source, dest )
class Chibi_file:
def __init__( self, file_name ):
self._file_name = file_name
if not self.exists:
self.touch()
self.reread()
@property
def file_name( self ):
return self._file_name
def __del__( self ):
self._file_content.close()
def find( self, string_to_find ):
if isinstance( string_to_find, str ):
string_to_find = string_to_find.encode()
return self._file_content.find( string_to_find )
def reread( self ):
with open( self._file_name, 'r' ) as f:
self._file_content = mmap.mmap( f.fileno(), 0,
prot=mmap.PROT_READ )
def __contains__( self, string ):
return self.find( string ) >= 0
def append( self, string ):
with open( self._file_name, 'a' ) as f:
f.write( string )
self.reread()
@property
def exists( self ):
return exists( self.file_name )
def touch( self ):
open( self.file_name, 'a' ).close()
def copy( self, dest ):
copy( self.file_name, dest )
|
[
"dem4ply@gmail.com"
] |
dem4ply@gmail.com
|
b7e89b7513c6151d39dc8adad4fee33e8afcf8f1
|
09cc8367edb92c2f02a0cc1c95a8290ff0f52646
|
/ipypublish_plugins/example_new_plugin.py
|
2fe177802ec9fd3259ca9ac9ac002ef160f3c1f2
|
[
"BSD-3-Clause"
] |
permissive
|
annefou/ipypublish
|
7e80153316ab572a348afe26d309c2a9ee0fb52b
|
917c7f2e84be006605de1cf8851ec13d1a163b24
|
refs/heads/master
| 2020-04-13T16:08:59.845707
| 2018-07-30T18:26:12
| 2018-07-30T18:26:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,612
|
py
|
"""html in standard nbconvert format, but with
- a table of contents
- toggle buttons for showing/hiding code & output cells
- converts or removes (if no converter) latex tags (like \cite{abc}, \ref{})
"""
from ipypublish.filters.replace_string import replace_string
from ipypublish.html.create_tpl import create_tpl
from ipypublish.html.ipypublish import latex_doc
# from ipypublish.html.standard import inout_prompt
from ipypublish.html.ipypublish import toc_sidebar
from ipypublish.html.ipypublish import toggle_buttons
from ipypublish.html.standard import content
from ipypublish.html.standard import content_tagging
from ipypublish.html.standard import document
from ipypublish.html.standard import mathjax
from ipypublish.html.standard import widgets
from ipypublish.preprocessors.latex_doc_captions import LatexCaptions
from ipypublish.preprocessors.latex_doc_html import LatexDocHTML
from ipypublish.preprocessors.latex_doc_links import LatexDocLinks
from ipypublish.preprocessors.latextags_to_html import LatexTagsToHTML
from ipypublish.preprocessors.split_outputs import SplitOutputs
oformat = 'HTML'
config = {'TemplateExporter.filters': {'replace_string': replace_string},
'Exporter.filters': {'replace_string': replace_string},
'Exporter.preprocessors': [SplitOutputs, LatexDocLinks, LatexDocHTML, LatexTagsToHTML, LatexCaptions]}
template = create_tpl([
document.tpl_dict,
content.tpl_dict, content_tagging.tpl_dict,
mathjax.tpl_dict, widgets.tpl_dict,
# inout_prompt.tpl_dict,
toggle_buttons.tpl_dict, toc_sidebar.tpl_dict,
latex_doc.tpl_dict
])
|
[
"chrisj_sewell@hotmail.com"
] |
chrisj_sewell@hotmail.com
|
3a319731bfbf4784b70ae3333efef1452e0f0744
|
bb0af8077407b27f8c3b787d32bdd9c4b37e977a
|
/fitmodel_densitysplit/do_fitting/modelfitfullk.py
|
82cbe8d7dac400a6d8e1c6ddf94bc83a4de38ec0
|
[] |
no_license
|
JulianWack/IfA-SummerStudent-2022
|
94948169acc830da5b2b1bb5d227fb4e782dbe1c
|
d1c6ebe48e0a527fe6138c73adc35464538e2e04
|
refs/heads/main
| 2023-04-15T02:51:59.098422
| 2022-09-10T10:52:38
| 2022-09-10T10:52:38
| 508,616,897
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,407
|
py
|
# Fit Kaiser with FoG term over largest possible k range
# Identical to modelfit.py excepet for storing paths and line 94
import numpy as np
import matplotlib.pyplot as plt
from scipy.special import legendre, erf
from datetime import timedelta
import time
from os import mkdir, listdir
import helper_funcs as hf
from astropy.cosmology import Planck18 as Planck18_astropy
import camb
import zeus
from nbodykit.lab import *
from nbodykit import style
plt.style.use(style.notebook)
### MCMC functions###
def logprior(theta, i, kmax):
''' The natural logarithm of the prior probability. Assume parameters independent such that log priors add.
Note that normalization is irrelevant for MCMC.'''
lp = 0.
b1, beta, sigma = theta
sigma_min, sigma_max = 1, 5
sigma_max = 5 if kmax < 0.075 else 60
lp_sigma = 0. if sigma_min < sigma < sigma_max else -np.inf
b1_min, b1_max = 0, 3
if i == 0:
beta_min, beta_max = -3, 3
else:
beta_min, beta_max = 0, 3
lp_b1 = 0. if b1_min < b1 < b1_max else -np.inf
lp_beta = 0. if beta_min < beta < beta_max else -np.inf
return lp_b1 + lp_beta + lp_sigma
def loglike(theta, data_multipoles, k, C_inv):
'''Return logarithm of likelihood i.e. -0.5*chi2.
data_multipoles must be an array of shape (len(ells), len(k)). theta is parameter vector: [b1, beta, sigma].'''
ells = [0,2]
model_multipoles = np.empty((len(ells), len(k)))
b1, beta, sigma = theta
model_multipoles[0] = ( 1/(2*(k*sigma)**5) * (np.sqrt(2*np.pi)*erf(k*sigma/np.sqrt(2))*(3*beta**2+(k*sigma)**4+2*beta*(k*sigma)**2) +
np.exp(-0.5*(k*sigma)**2)*(-2*beta*(beta+2)*(k*sigma)**3-6*beta**2*k*sigma) ) ) * b1**2 * Plin(k)
model_multipoles[1] = ( -5/(4*(k*sigma)**7) * (np.sqrt(2*np.pi)*erf(k*sigma/np.sqrt(2))*(-45*beta**2+(k*sigma)**6+(2*beta-3)*(k*sigma)**4+3*(beta-6)*beta*(k*sigma)**2) +
np.exp(-0.5*(k*sigma)**2)*((4*beta*(beta+2)+6)*(k*sigma)**5+12*beta*(2*beta+3)*(k*sigma)**3+90*beta**2*k*sigma) ) ) * b1**2 * Plin(k)
D_M = (data_multipoles - model_multipoles).flatten()
return -0.5*D_M@(C_inv @ D_M)
def logpost(theta, i, data_multipoles, k, C_inv):
'''Returns the logarithm of the posterior. By Bayes' theorem, this is just the sum of the log prior and log likelihood (up
to a irrelavant constant).
Uses values for theta from pre-analysis step to inform prior
'''
return logprior(theta, i, k[-1]) + loglike(theta, data_multipoles, k, C_inv)
#####################
### Set up MCMC ###
LOS = [0,0,1]
redshift = 0.2
BoxSize = 2000
cosmo = cosmology.Cosmology.from_astropy(Planck18_astropy)
Plin = cosmology.LinearPower(cosmo, redshift, transfer='CLASS')
sigma8_lin = Plin.sigma_r(8)
# load Planck18 data for CAMB and find f*sigma8 at redshift
# follows https://camb.readthedocs.io/en/latest/CAMBdemo.html
pars=camb.read_ini('../planck_2018.ini')
_ = pars.set_matter_power(redshifts=[redshift], kmax=1.4)
pars.NonLinear = camb.model.NonLinear_none
results = camb.get_results(pars)
fs8_true = results.get_fsigma8()[0]
ptile_labels = [r'$0^{th}$', r'$1^{st}$', r'$2^{nd}$', r'$3^{rd}$', r'$4^{th}$', r'$5^{th}$', r'$6^{th}$', r'$7^{th}$', r'$8^{th}$', r'$9^{th}$']
dk = 0.01
ells = [0,2]
# load computed power spectra to deduce multipoles in each bin and P(k,mu) from data
# need to pass location location of folder containing stored power spectra which lies one level lower in this case
k_full, shotnoise, n_ptile, Pk_ells_full = hf.load_power_data('../', ells, get_data_Pkmus=False)
# for given BoxSize, k is NaN above 0.034
possible_kmax = k_full[k_full<=0.343][1:] # ignore first k bin
kmax_range = possible_kmax
Nkmax = len(kmax_range)
b1_fits, beta_fits, sigma_fits, delta_fs8 = np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan)
b1_stds, beta_stds, sigma_stds, delta_fs8_stds = np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan)
reduced_chi2 = np.full((n_ptile, Nkmax), np.nan)
nsteps = 2500
ndim = 3
nwalkers = 8
start_b1 = 0.5 + 1*np.random.random(nwalkers)
start_beta = 0.5 + 1*np.random.random(nwalkers)
start_sigma = 1 + 4*np.random.random(nwalkers)
start = np.column_stack([start_b1, start_beta, start_sigma])
###################
### Run MCMC ###
root_path = '../fit_results/FoG_fullk/'
print("Fitting up to kmax=%.3f"%kmax_range[-1])
for i in range(n_ptile):
store_path = root_path+'chains_ptile%d/'%i
if 'chains_ptile%d'%i not in listdir(root_path):
mkdir(store_path)
cov_mat = np.loadtxt('../bruteforce_covmat/covariance_matricies/cov_ptile_%d.txt'%i)
t1 = time.time()
for j,kmax in enumerate(kmax_range):
if 'k%d.npy'%j in listdir(store_path):
continue
# slice up to increasingly large kmax and find delta_fs8 for each bin
mask = np.full(len(k_full), False)
mask = k_full <= kmax
mask[0] = False
k_sliced = k_full[mask]
Pk_ells_i = Pk_ells_full[:,:,mask][i]
C_inv = hf.mock_cov_mat_inv(cov_mat, k_full, kmax)
sampler = zeus.EnsembleSampler(nwalkers, ndim, logpost, maxiter=1e5, verbose=False, args=[i, Pk_ells_i, k_sliced, C_inv])
sampler.run_mcmc(start, nsteps)
chain = sampler.get_chain(flat=True, discard=nsteps//2)
# save chain without burn-in
np.save(store_path+'k%d'%j, chain)
b1_fits[i][j], b1_stds[i][j] = np.mean(chain[:,0]), np.std(chain[:,0])
# parameter space is sym about b1=0 for Kaiser model. To get non negative fs8 assure that b1 and beta have the same sign
if i == 0:
b1_fits[i][j] *= -1
beta_fits[i][j], beta_stds[i][j] = np.mean(chain[:,1]), np.std(chain[:,1])
delta_fs8[i][j] = 1 - sigma8_lin*(beta_fits[i][j]*b1_fits[i][j])/fs8_true
delta_fs8_stds[i][j] = np.abs(sigma8_lin/fs8_true*(beta_stds[i][j]*b1_fits[i][j]+beta_fits[i][j]*b1_stds[i][j]))
sigma_fits[i][j], sigma_stds[i][j] = np.mean(chain[:,2]), np.std(chain[:,2])
reduced_chi2[i][j] = -2*loglike([b1_fits[i][j], beta_fits[i][j], sigma_fits[i][j]], Pk_ells_i, k_sliced, C_inv) / (len(ells)*len(k_sliced)-ndim)
t2 = time.time()
print('Fitted %d-th percentile in %s'%(i,str(timedelta(seconds=t2-t1))))
################
### Store fit result ###
np.savetxt(root_path+'b1_fits.txt', b1_fits)
np.savetxt(root_path+'b1_stds.txt', b1_stds)
np.savetxt(root_path+'beta_fits.txt', beta_fits)
np.savetxt(root_path+'beta_stds.txt', beta_stds)
np.savetxt(root_path+'delta_fs8.txt', delta_fs8)
np.savetxt(root_path+'delta_fs8_stds.txt', delta_fs8_stds)
np.savetxt(root_path+'sigma_fits.txt', sigma_fits)
np.savetxt(root_path+'sigma_stds.txt', sigma_stds)
np.savetxt(root_path+'reduced_chi2.txt', reduced_chi2)
########################
### Make fs8 plot ###
fig = plt.figure(figsize=(20,8))
for i in range(n_ptile):
plt.plot(kmax_range, delta_fs8[i], label=ptile_labels[i])
plt.fill_between(kmax_range, delta_fs8[i]-delta_fs8_stds[i,:], delta_fs8[i]+delta_fs8_stds[i,:], alpha=0.1)
plt.title(r'$\Delta f\sigma_8$ at $z=%.3f$'%redshift)
plt.xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
plt.ylabel(r'$1 - (\sigma_8^{lin}*\beta*b_1) \ / \ (f\sigma_8)^{true}$')
handles, labels = plt.gca().get_legend_handles_labels()
fig.legend(handles, labels, loc='upper center', bbox_to_anchor=(0.5, -0.05), ncol=n_ptile)
fig.savefig("../plots/KaiserFoG_fullk_dfs8_vs_kmax.pdf")
#####################
### Make fit plot ###
fig = plt.figure(figsize=(26,18))
ax_b1 = plt.subplot(2,3,1)
ax_beta = plt.subplot(2,3,2)
ax_sigma = plt.subplot(2,3,3)
ax_chi2 = plt.subplot(2,3,(4,6))
for i in range(n_ptile):
ax_b1.plot(kmax_range, b1_fits[i], label=ptile_labels[i])
ax_b1.fill_between(kmax_range, b1_fits[i]-b1_stds[i], b1_fits[i]+b1_stds[i], alpha=0.1)
ax_beta.plot(kmax_range, beta_fits[i], label=ptile_labels[i])
ax_beta.fill_between(kmax_range, beta_fits[i]-beta_stds[i], beta_fits[i]+beta_stds[i], alpha=0.1)
ax_sigma.plot(kmax_range, sigma_fits[i], label=ptile_labels[i])
ax_sigma.fill_between(kmax_range, sigma_fits[i]-sigma_stds[i], sigma_fits[i]+sigma_stds[i], alpha=0.1)
ax_chi2.plot(kmax_range[1:], reduced_chi2[i][1:], label=ptile_labels[i]) # first element negative, s.t. not shown on log scale
ax_b1.set_title(r'$b_1$ mean and $1\sigma$ interval')
ax_b1.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_b1.set_ylabel(r'$b_1$')
ax_beta.set_title(r'$\beta$ mean and $1\sigma$ interval')
ax_beta.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_beta.set_ylabel(r'$\beta$')
ax_sigma.set_title(r'$\sigma$ mean and $1\sigma$ interval')
ax_sigma.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_sigma.set_ylabel(r'$\sigma$ [$h^{-1} \ \mathrm{Mpc}$]')
ax_chi2.set_title(r'reduced $\chi^2$')
ax_chi2.set_yscale('log')
ax_chi2.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_chi2.set_ylabel(r'$\chi^2 / dof$')
handles, labels = plt.gca().get_legend_handles_labels()
fig.legend(handles, labels, loc='upper center', bbox_to_anchor=(0.5, +0.05), ncol=n_ptile)
fig.savefig("../plots/KaiserFoG_fullk_fits.pdf")
#####################
|
[
"jwack@cuillin.roe.ac.uk"
] |
jwack@cuillin.roe.ac.uk
|
49bee32f7d8ddec8715ce98a577f088ab50e9d45
|
a961a54e8d466b1fb98aee86d437ed6872e98d18
|
/shows/management/commands/get_english_names.py
|
403bad44964aea456ad293b48eb19dc7b5d768de
|
[] |
no_license
|
akaram94/animemelody
|
6bcc75e2746def0f3638b0e6acf6cf5b0e2b6f7c
|
2adf79f232d06733bbc8af4def3778ea55838efa
|
refs/heads/master
| 2023-08-19T00:19:43.688266
| 2020-07-27T16:24:56
| 2020-07-27T16:24:56
| 268,275,539
| 0
| 0
| null | 2021-09-22T19:08:05
| 2020-05-31T12:29:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,233
|
py
|
from django.core.management.base import BaseCommand, CommandError
from shows.models import Show, Theme
import datetime
import requests
import json
import time
class Command(BaseCommand):
help = '''
Used to import english show names from the Jikan API (rate limit of 2s)
'''
def handle(self, *args, **options):
feed_url = 'https://api.jikan.moe/v3/anime/'
message = 'Getting english names...'
shows = Show.objects.filter(english_name='')
print(message)
show_count = 0
# Retrieve all data from each endpoint
for show in shows:
current_url = feed_url + str(show.mal_id)
time.sleep(2)
r = requests.get(current_url)
data = r.json()
if data['title_english']:
title_english = str(data['title_english'])
else:
title_english = None
show.english_name = title_english
show.save()
show_count = show_count + 1
print(show_count)
print('----------------------------------------------------------')
print('Show Count: ' + str(show_count))
print('Successfully updated.')
|
[
"root@DESKTOP-FIFE14Q.localdomain"
] |
root@DESKTOP-FIFE14Q.localdomain
|
96f4d811c08062451b1b929e346ee171461170de
|
18aee5d93a63eab684fe69e3aa0abd1372dd5d08
|
/test/legacy_test/test_poisson_op.py
|
ee66d578014c70395ec3525f8118d2780886458c
|
[
"Apache-2.0"
] |
permissive
|
Shixiaowei02/Paddle
|
8d049f4f29e281de2fb1ffcd143997c88078eadb
|
3d4d995f26c48f7792b325806ec3d110fc59f6fc
|
refs/heads/develop
| 2023-06-26T06:25:48.074273
| 2023-06-14T06:40:21
| 2023-06-14T06:40:21
| 174,320,213
| 2
| 1
|
Apache-2.0
| 2022-12-28T05:14:30
| 2019-03-07T10:09:34
|
C++
|
UTF-8
|
Python
| false
| false
| 8,481
|
py
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import unittest
import numpy as np
from eager_op_test import OpTest
import paddle
paddle.enable_static()
paddle.seed(100)
def output_hist(out, lam, a, b):
prob = []
bin = []
for i in range(a, b + 1):
prob.append((lam**i) * math.exp(-lam) / math.factorial(i))
bin.append(i)
bin.append(b + 0.1)
hist, _ = np.histogram(out, bin)
hist = hist.astype("float32")
hist = hist / float(out.size)
return hist, prob
class TestPoissonOp1(OpTest):
def setUp(self):
self.op_type = "poisson"
self.python_api = paddle.tensor.poisson
self.config()
self.attrs = {}
self.inputs = {'X': np.full([2048, 1024], self.lam, dtype=self.dtype)}
self.outputs = {'Out': np.ones([2048, 1024], dtype=self.dtype)}
def config(self):
self.lam = 10
self.a = 5
self.b = 15
self.dtype = "float64"
def verify_output(self, outs):
hist, prob = output_hist(np.array(outs[0]), self.lam, self.a, self.b)
np.testing.assert_allclose(hist, prob, rtol=0.01)
def test_check_output(self):
self.check_output_customized(self.verify_output)
def test_check_grad_normal(self):
self.check_grad(
['X'],
'Out',
user_defined_grads=[np.zeros([2048, 1024], dtype=self.dtype)],
user_defined_grad_outputs=[
np.random.rand(2048, 1024).astype(self.dtype)
],
)
class TestPoissonOp2(TestPoissonOp1):
def config(self):
self.lam = 5
self.a = 1
self.b = 8
self.dtype = "float32"
class TestPoissonAPI(unittest.TestCase):
def test_static(self):
with paddle.static.program_guard(
paddle.static.Program(), paddle.static.Program()
):
x_np = np.random.rand(10, 10)
x = paddle.static.data(name="x", shape=[10, 10], dtype='float64')
y = paddle.poisson(x)
exe = paddle.static.Executor()
y_np = exe.run(
paddle.static.default_main_program(),
feed={"x": x_np},
fetch_list=[y],
)
self.assertTrue(np.min(y_np) >= 0)
def test_dygraph(self):
with paddle.fluid.dygraph.base.guard():
x = paddle.randn([10, 10], dtype='float32')
y = paddle.poisson(x)
self.assertTrue(np.min(y.numpy()) >= 0)
x = paddle.randn([10, 10], dtype='float32')
x.stop_gradient = False
y = paddle.poisson(x)
y.backward()
self.assertTrue(np.min(y.numpy()) >= 0)
np.testing.assert_array_equal(np.zeros_like(x), x.gradient())
def test_fixed_random_number(self):
# Test GPU Fixed random number, which is generated by 'curandStatePhilox4_32_10_t'
if not paddle.is_compiled_with_cuda():
return
print("Test Fixed Random number on GPU------>")
paddle.disable_static()
paddle.set_device('gpu')
paddle.seed(2021)
x = paddle.full([32, 3, 1024, 768], 10.0, dtype="float32")
y = paddle.poisson(x)
y_np = y.numpy()
expect = [
13.0,
13.0,
11.0,
8.0,
12.0,
6.0,
9.0,
15.0,
16.0,
6.0,
13.0,
12.0,
9.0,
15.0,
17.0,
8.0,
11.0,
16.0,
11.0,
10.0,
]
np.testing.assert_array_equal(y_np[0, 0, 0, 0:20], expect)
expect = [
15.0,
7.0,
12.0,
8.0,
14.0,
10.0,
10.0,
11.0,
11.0,
11.0,
21.0,
6.0,
9.0,
13.0,
13.0,
11.0,
6.0,
9.0,
12.0,
12.0,
]
np.testing.assert_array_equal(y_np[8, 1, 300, 200:220], expect)
expect = [
10.0,
15.0,
9.0,
6.0,
4.0,
13.0,
10.0,
10.0,
13.0,
12.0,
9.0,
7.0,
10.0,
14.0,
7.0,
10.0,
8.0,
5.0,
10.0,
14.0,
]
np.testing.assert_array_equal(y_np[16, 1, 600, 400:420], expect)
expect = [
10.0,
9.0,
14.0,
12.0,
8.0,
9.0,
7.0,
8.0,
11.0,
10.0,
13.0,
8.0,
12.0,
9.0,
7.0,
8.0,
11.0,
11.0,
12.0,
5.0,
]
np.testing.assert_array_equal(y_np[24, 2, 900, 600:620], expect)
expect = [
15.0,
5.0,
11.0,
13.0,
12.0,
12.0,
13.0,
16.0,
9.0,
9.0,
7.0,
9.0,
13.0,
11.0,
15.0,
6.0,
11.0,
9.0,
10.0,
10.0,
]
np.testing.assert_array_equal(y_np[31, 2, 1023, 748:768], expect)
x = paddle.full([16, 1024, 1024], 5.0, dtype="float32")
y = paddle.poisson(x)
y_np = y.numpy()
expect = [
4.0,
5.0,
2.0,
9.0,
8.0,
7.0,
4.0,
7.0,
4.0,
7.0,
6.0,
3.0,
10.0,
7.0,
5.0,
7.0,
2.0,
5.0,
5.0,
6.0,
]
np.testing.assert_array_equal(y_np[0, 0, 100:120], expect)
expect = [
1.0,
4.0,
8.0,
11.0,
6.0,
5.0,
4.0,
4.0,
7.0,
4.0,
4.0,
7.0,
11.0,
6.0,
5.0,
3.0,
4.0,
6.0,
3.0,
3.0,
]
np.testing.assert_array_equal(y_np[4, 300, 300:320], expect)
expect = [
7.0,
5.0,
4.0,
6.0,
8.0,
5.0,
6.0,
7.0,
7.0,
7.0,
3.0,
10.0,
5.0,
10.0,
4.0,
5.0,
8.0,
7.0,
5.0,
7.0,
]
np.testing.assert_array_equal(y_np[8, 600, 600:620], expect)
expect = [
8.0,
6.0,
7.0,
4.0,
3.0,
0.0,
4.0,
6.0,
6.0,
4.0,
3.0,
10.0,
5.0,
1.0,
3.0,
8.0,
8.0,
2.0,
1.0,
4.0,
]
np.testing.assert_array_equal(y_np[12, 900, 900:920], expect)
expect = [
2.0,
1.0,
14.0,
3.0,
6.0,
5.0,
2.0,
2.0,
6.0,
5.0,
7.0,
4.0,
8.0,
4.0,
8.0,
4.0,
5.0,
7.0,
1.0,
7.0,
]
np.testing.assert_array_equal(y_np[15, 1023, 1000:1020], expect)
paddle.enable_static()
if __name__ == "__main__":
unittest.main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
f6cc157fcac65c81a3809080db0f1aac9d4e2f7d
|
82f40f23ea87d8ed09974a6ce87418cb7934b14b
|
/Logistic Regression & MultiLayer Perceptron/Code/logistic_kclass.py
|
83abd0da19f525bde30fef8e517fe84551510fd0
|
[] |
no_license
|
priyankpshah/DataAnalysis_MachineLearning
|
22a69bea1426d3291461177a4ddfd67a9f19741b
|
68c7a27a84b341a2a19d7481410536c23750fa24
|
refs/heads/master
| 2021-06-08T14:51:05.167801
| 2016-12-15T21:37:28
| 2016-12-15T21:37:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,061
|
py
|
from __future__ import division
from sklearn.datasets import load_iris, fetch_mldata
from sklearn.cross_validation import KFold
import numpy as np
def indicator(class_fact, class_val):
ind = []
for label in class_fact:
if label == class_val:
ind.append(1)
else:
ind.append(0)
return np.asarray(ind)
def Hypo(theta, X, thetas):
den = exp_sum(thetas, X)
hypo = np.exp(np.dot(X, theta))
hypo /= den
return hypo
def exp_sum(thetas, X):
sum = 0
m, n = np.shape(thetas)
for i in range(n):
sum += np.exp(np.dot(X, thetas[:, i]))
return sum
def Find_Theta(X, Y, estimate, iterations):
calssval = [0,1,2]
x, y = np.shape(X)
mul_theta = np.ones((y, len(calssval)))
for j in range(iterations):
for i, c in enumerate(calssval):
theta = mul_theta[:, i]
temp_hypo = Hypo(theta, X, mul_theta)
ind = indicator(Y, c)
theta_i = estimate * (np.sum((temp_hypo - ind).reshape(len(temp_hypo), 1) * X, axis=0))
theta_i = theta_i.reshape(theta.shape)
theta = theta - theta_i
mul_theta[:, i] = theta
return mul_theta
def confuide_mat(ytest, ypredict):
cm = []
clab = [0,1,2]
for i in clab:
tmp = [0] * len(clab)
for j in range(len(ytest)):
if ytest[j] == i and ytest[j] == ypredict[j]:
tmp[clab.index(i)] += 1
elif ytest[j] == i and ytest[j] != ypredict[j]:
tmp[clab.index(ypredict[j])] += 1
cm.append(tmp)
return np.array(cm)
def predict(X_Test, thetas):
Y_prediction = []
thetas = thetas.T
#print thetas
for x in X_Test:
h = -np.inf
for i, theta in enumerate(thetas):
h_hat = np.dot(x, theta)
#print h_hat
if h_hat > h:
h = h_hat
label = i
Y_prediction.append(label)
return Y_prediction
def confusion_mat(cm):
precesion = np.zeros(2)
recall = np.zeros(2)
f1measure = np.zeros(2)
accuracy = 0
tot = np.sum(confusion_mat)
for i in range(0,2):
for j in range(0,2):
precesion[i] += cm[j][i]
recall[i] += cm[i][j]
if(i==j):
accuracy = accuracy + cm[i][j]
precesion[i] = cm[i][i]/precesion[i]
recall[i] = cm[i][i]/recall[i]
f1measure[i] = ((2*precesion[i]*recall[i])/(precesion[i]+recall[i]))
accuracy = float(accuracy)/tot
return precision,recall,f_measure,accuracy
if __name__ == "__main__":
mnist = fetch_mldata('MNIST original')
X, Y = mnist.data / 255., mnist.target
matrix = np.concatenate((X[Y == 0], X[Y == 1], X[Y == 2]), axis=0)
y = np.concatenate((Y[Y == 0], Y[Y == 1], Y[Y == 2]), axis=0)
kf = KFold(X.shape[0], n_folds=10, shuffle=True)
accuracy = 0.0
precision = np.zeros(3)
recall = np.zeros(3)
f_measure = np.zeros(3)
for train,test in kf:
X_Train, X_Test = X[train], X[test]
Y_Train, Y_Test = Y[train], Y[test]
thetas = Find_Theta(X_Train, Y_Train, 0.001, 2500)
Y_Prediction = predict(X_Test, thetas)
cm = confuide_mat(Y_Test, Y_Prediction)
pre, rec, f1, acc = confusion_mat(cm)
precision = np.add(precision, pre)
recall = np.add(recall, rec)
f_measure = np.add(f_measure, f1)
accuracy = accuracy + acc
precision = map(lambda x: x/10, precision)
recall = map(lambda x: x/10, recall)
f1measure = map(lambda x: x/10, f_measure)
accuracy /= 10
print " Confusion Matrix:"+ str(cm[0])
print "\t\t\t\t "+ str(cm[1])
print " Precesion: Recall: F-1 Measures"
print "Class 0: " + str(round(precision[0],3))+" "+str(round(recall[0],3))+" "+str(round(f1measure[0],3))
print "Class 1: " + str(round(precision[1],3))+" "+str(round(recall[1],3))+" "+str(round(f1measure[1],3))
print "\nAccuracy: "+str(round(accuracy,3)*100)+ "%"
|
[
"priyank687@gmail.com"
] |
priyank687@gmail.com
|
0da39b2b6595f0a25f70e3735197ce8c382da45b
|
c7522a46908dfa0556ed6e2fe584fd7124ee5cdc
|
/ApplicationUsers/views.py
|
80dd9c405729e423ad243becbd6d5c57ca1b5930
|
[] |
no_license
|
stheartsachu/Eventlee
|
461cf35961a7f294229d6c611e58a09d9f4e1eb5
|
6b67dfc873203f1322c16664923ffe5a760d50ed
|
refs/heads/master
| 2022-11-13T14:48:39.097718
| 2020-06-30T04:54:45
| 2020-06-30T04:54:45
| 276,000,638
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,806
|
py
|
from django.shortcuts import render,HttpResponse,redirect,HttpResponseRedirect
from ApplicationUsers.form import ApplicationuserForm
# Create your views here.
from ApplicationUsers.models import users
def home(request):
return render(request,"index.html")
def Contact(request):
return render(request,"contact.html")
def gallery(request):
return render(request,"gallery.html")
def signup(request):
if request.method == 'POST':
form = ApplicationuserForm(request.POST)
f = form.save(commit=False)
f.first_name = request.POST['fn']
f.last_name = request.POST['ln']
f.email = request.POST['email']
if request.POST['p1'] == request.POST['p2']:
f.password = request.POST['p2']
else:
return HttpResponse("<h1> Password and Confirm password is not same</h1>")
f.status = True
f.save()
return HttpResponse("User is created sucessfully now, can login to website")
return render(request, 'registration.html')
def login(request):
if request.method == "POST":
un = request.POST["email"]
up = request.POST["password"]
try:
data = users.objects.get(email=un)
except:
return render(request, "login.html", {'emailerror': True})
dp = data.password
active = data.status
if (active == False):
return render(request, "login.html", {'activeerror': True})
else:
if (dp == up):
request.session['emailid'] = un
request.session['Authentication'] = True
return HttpResponse("You are sucessfullly login")
else:
return render(request, "login.html", {'passworderror': True})
return render(request, "login.html")
|
[
"seartsachu@gmail.com"
] |
seartsachu@gmail.com
|
cdcf3cbc200241606b4cc9140c33f8aa85da216f
|
2c332ae49c8130ab88ee9be7b092f66e4f88324a
|
/MyOwnSolution/compare_images2.py
|
91e3c8d21c645f96e8bf20b90a1abd0d7e2fdea6
|
[] |
no_license
|
lalona/parking-spot-classifier
|
fbc34b385d3620513126c91ecc106c3727a0be63
|
835ae0a514daecf623ba456b2ee5cab26123f939
|
refs/heads/master
| 2020-04-07T09:44:28.396823
| 2019-05-02T19:51:14
| 2019-05-02T19:51:14
| 158,263,531
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,367
|
py
|
"""
La idea es simple: con una imagen donde el espacio se encuentre vacio se compara con otra imagenes y dependiendo las diferencias
se concluye si está ocupado o vacio
"""
import cv2
import os
from skimage.measure import compare_ssim as ssim
from skimage.measure import compare_nrmse as nrmse
import pickle
import argparse
from operator import itemgetter
from itertools import groupby
import matplotlib.pyplot as plt
import numpy as np
from tqdm import tqdm
import ntpath
import json
path_pklot = 'C:\\Eduardo\\ProyectoFinal\\Datasets\\PKLot'
def getGrayscaleImage(filepath):
"""
Read the image files and converts it to gray scale
:param filepath: the path to the image
:return: the image in grayscale
"""
image = cv2.imread(filepath)
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
def extractUniqueItemsByKey(list, key):
"""
This will take a list and sorted by the key and
then it will return the list with just the elements from that
key without duplicates
:param list:
:param key:
:return:
"""
list.sort(key=lambda x: x[key])
return [k for k, v in groupby(list, key=lambda x: x[key])]
def getNewImageInfo(image_info):
image_info['filepath'] = os.path.join(path_pklot, image_info['filepath'], image_info['filename'])
return image_info
def mse(imageA, imageB):
# the 'Mean Squared Error' between the two images is the
# sum of the squared difference between the two images;
# NOTE: the two images must have the same dimension
err = np.sum((imageA.astype("float") - imageB.astype("float")) ** 2)
err /= float(imageA.shape[0] * imageA.shape[1])
# return the MSE, the lower the error, the more "similar"
# the two images are
return err
def main():
parser = argparse.ArgumentParser(description='Select the type of reduced.')
parser.add_argument("-f", "--filename", type=str, required=True,
help='Path to the file the contains the dictionary with the info of the dataset reduced.')
args = vars(parser.parse_args())
info_filename = args["filename"]
# test set
with open(info_filename, "rb") as fp: # Unpickling
images_info = pickle.load(fp)
grouper = itemgetter('parkinglot', 'space')
images_info = sorted(images_info, key=grouper)
parkinglots = extractUniqueItemsByKey(images_info, 'parkinglot')
images_info_by_patkinglot = {}
for parkinglot in parkinglots:
image_info_parkinglot = [i for i in images_info if i['parkinglot'] == parkinglot]
spaces_parkinglot = extractUniqueItemsByKey(image_info_parkinglot, 'space')
images_info_by_spaces = {}
for space in spaces_parkinglot:
images_info_by_spaces[space] = [getNewImageInfo(i) for i in image_info_parkinglot if i['space'] == space]
images_info_by_patkinglot[parkinglot] = images_info_by_spaces
# Hasta este punto ya tengo un dictionario dividido por estacionamiento que a su vez se divide por espacios
# Voy a obtener la lista de un espacio en particular de un estacionamiento, voy a obtener el primer espacio vacio que
# encuentre y despues voy a compararlo con los demas
# Mostrar en una ventana el espacio vacio y en la otra la comparacion y el resultado
empty_space_filepath = ''
errors = []
for parkinglot, images_info_by_spaces in images_info_by_patkinglot.items():
for space, images_info_of_space in images_info_by_spaces.items():
error_count_empty = 0
error_count_occupied = 0
error_empty = 0
error_occupied = 0
empty_space_filepath = ''
example_list = images_info_of_space
for example in tqdm(example_list):
if example['state'] == '0' and len(empty_space_filepath) == 0:
empty_space_filepath = example['filepath']
img_empty_space = getGrayscaleImage(empty_space_filepath)
break
for example in tqdm(example_list):
comparision_space_filepath = example['filepath']
img_comparision_space = getGrayscaleImage(comparision_space_filepath)
try:
sim = ssim(img_empty_space, img_comparision_space)
except:
height1, width1 = img_empty_space.shape
img_comparision_space = cv2.resize(img_comparision_space, (width1, height1))
sim = ssim(img_empty_space, img_comparision_space)
nm = nrmse(img_empty_space, img_comparision_space)
# m = mse(img_empty_space, img_comparision_space)
space_comparing_name = 'state: {} sim: {} nrmse: {}'.format(example['state'], sim, nm)
if sim < 0.4 and example['state'] == '0':
error_count_empty += 1
error_empty += abs(0.4 - sim)
if sim >= 0.4 and example['state'] == '1':
error_count_occupied += 1
error_occupied += abs(sim - 0.4)
if sim > 0.7:
empty_space_filepath = example['filepath']
img_empty_space = img_comparision_space
"""
fig = plt.figure('title')
plt.suptitle(space_comparing_name)
# show first image
ax = fig.add_subplot(1, 2, 1)
plt.imshow(img_empty_space, cmap=plt.cm.gray)
plt.axis("off")
# show the second image
ax = fig.add_subplot(1, 2, 2)
plt.imshow(img_comparision_space, cmap=plt.cm.gray)
plt.axis("off")
# show the images
plt.show()
"""
error_occupied = 0 if error_count_occupied == 0 else (error_occupied / error_count_occupied)
error_empty = 0 if error_count_empty == 0 else (error_empty / error_count_empty)
print('In the space {} in a total of {} there was an error of occupied {} {} empty {} {}'.format(space, len(
example_list), error_count_occupied, error_occupied, error_count_empty, error_empty))
errors.append({'parkinglot': parkinglot, 'space': space, 'total': len(example_list),
'error_count_occupied': error_count_occupied,
'error_occupied': error_occupied,
'error_count_empty': error_count_empty, 'error_empty': error_empty})
info = {'dataset': info_filename, 'threshold': 0.4, 'comparision_method': 'sim', 'errors': errors}
dataset_name = ntpath.basename(info_filename).split('.')[0]
feedback_filename = '{}_{}_{}.json'.format(dataset_name, 0.4, 'sim')
with open(feedback_filename, 'w') as outfile:
json.dump(info, outfile)
# s = ssim(grayscale_selected_image, grayscale_current_image)
if __name__ == "__main__":
main()
|
[
"sebastianxiro@gmail.com"
] |
sebastianxiro@gmail.com
|
a9c4497452165d55ce911de902394c5c85022e33
|
1d1cb14e6d0b2a439348d6677eb9e8e72390d39f
|
/01_LiniarRegression/Live/linearRegressionWGradientDescent.py
|
9be2c569d79cf3a532fbfc0bbbbd1a1c066e0416
|
[] |
no_license
|
EduardoFAFernandes/MyDeepLearningIntro
|
2c7b2278ed1cf446c9f3656ae9dd421c22648933
|
a35a43f0690ddfa499097335d9b8aa058d1db021
|
refs/heads/master
| 2021-09-05T06:51:33.479757
| 2018-01-25T01:07:54
| 2018-01-25T01:07:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,514
|
py
|
from numpy import *
def compute_error_for_given_points(m, b, points):
totalError = 0
for point in points:
totalError += (point[1] - (m * point[0] + b))**2
return totalError / float(size(points))
def step_gradient(current_b, current_m, points, learning_rate):
gradient_m = 0
gradient_b = 0
N = float(len(points))
for point in points:
x = point[0]
y = point[1]
gradient_m += -(2/N) * x *(y - ((current_m * x) + current_b))
gradient_b += -(2/N) * (y - ((current_m * x) + current_b))
new_b = current_b - (learning_rate * gradient_b)
new_m = current_m - (learning_rate * gradient_m)
return [ new_b, new_m]
def gradient_descent_runner(points, initial_b, initial_m,
learning_rate, num_iterations ):
b = initial_b
m = initial_m
for i in range(num_iterations):
b,m = step_gradient(b, m, array(points), learning_rate)
return [b, m]
def run():
points = genfromtxt('data.csv', delimiter = ',')
#hyperparameters
learning_rate = 0.0001
#y = mx + b
initial_b = 0
initial_m = 0
num_iterations = 1000
print("Starting gradient descent at b = {0}, m = {1}, error = {2}".format(
initial_b, initial_m,
compute_error_for_given_points(initial_b, initial_m, points)))
print("Running...")
[b, m] = gradient_descent_runner(points, initial_b, initial_m,
learning_rate, num_iterations )
print("After {0} iterations b = {1}, m = {2}, error = {3}".format(
num_iterations, b, m, compute_error_for_given_points(b, m, points)))
if __name__ == '__main__':
run()
|
[
"nomeaocalhas3@gmail.com"
] |
nomeaocalhas3@gmail.com
|
b19eb4c4a4c8e2182848c6efdc592bdd73bafd78
|
7f004635957d252b7e8d7d6e0aee47e24aca0b82
|
/catkin_ws/src/sound_play/src/sound_play/libsoundplay.py
|
12c62a885fc10f5d991711f131baaf324d3e99cd
|
[] |
no_license
|
Pontusun/CIS700_Team2
|
75bef2305f5ccb7992f152b9fca20ea475077833
|
17b125b652d3be41884d6f625b7424b00ca0a6dd
|
refs/heads/master
| 2021-01-10T16:02:19.327233
| 2016-02-11T22:20:46
| 2016-02-11T22:20:46
| 44,488,662
| 1
| 3
| null | 2015-11-29T19:54:32
| 2015-10-18T17:35:24
|
CMake
|
UTF-8
|
Python
| false
| false
| 13,215
|
py
|
#!/usr/bin/env python
#***********************************************************
#* Software License Agreement (BSD License)
#*
#* Copyright (c) 2009, Willow Garage, Inc.
#* All rights reserved.
#*
#* Redistribution and use in source and binary forms, with or without
#* modification, are permitted provided that the following conditions
#* are met:
#*
#* * Redistributions of source code must retain the above copyright
#* notice, this list of conditions and the following disclaimer.
#* * Redistributions in binary form must reproduce the above
#* copyright notice, this list of conditions and the following
#* disclaimer in the documentation and/or other materials provided
#* with the distribution.
#* * Neither the name of the Willow Garage nor the names of its
#* contributors may be used to endorse or promote products derived
#* from this software without specific prior written permission.
#*
#* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
#* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
#* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
#* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
#* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
#* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
#* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
#* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
#* POSSIBILITY OF SUCH DAMAGE.
#***********************************************************
# Author: Blaise Gassend
import rospy
import roslib
import actionlib
import os, sys
from sound_play.msg import SoundRequest
from sound_play.msg import SoundRequestGoal
from sound_play.msg import SoundRequestAction
## \brief Class that publishes messages to the sound_play node.
##
## This class is a helper class for communicating with the sound_play node
## via the \ref sound_play.SoundRequest message. It has two ways of being used:
##
## - It can create Sound classes that represent a particular sound which
## can be played, repeated or stopped.
##
## - It provides methods for each way in which the sound_play.SoundRequest
## message can be invoked.
class Sound(object):
def __init__(self, client, snd, arg, volume=1.0):
self.client = client
self.snd = snd
self.arg = arg
self.vol = volume
## \brief Play the Sound.
##
## This method causes the Sound to be played once.
def play(self, **kwargs):
self.client.sendMsg(self.snd, SoundRequest.PLAY_ONCE, self.arg,
vol=self.vol, **kwargs)
## \brief Play the Sound repeatedly.
##
## This method causes the Sound to be played repeatedly until stop() is
## called.
def repeat(self, **kwargs):
self.client.sendMsg(self.snd, SoundRequest.PLAY_START, self.arg,
vol=self.vol, **kwargs)
## \brief Stop Sound playback.
##
## This method causes the Sound to stop playing.
def stop(self):
self.client.sendMsg(self.snd, SoundRequest.PLAY_STOP, self.arg)
## This class is a helper class for communicating with the sound_play node
## via the \ref sound_play.SoundRequest message. There is a one-to-one mapping
## between methods and invocations of the \ref sound_play.SoundRequest message.
class SoundClient(object):
def __init__(self, blocking=False):
"""
The SoundClient can send SoundRequests in two modes: non-blocking mode
(by publishing a message to the soundplay_node directly) which will
return as soon as the sound request has been sent, or blocking mode (by
using the actionlib interface) which will wait until the sound has
finished playing completely.
The blocking parameter here is the standard behavior, but can be
over-ridden. Each say/play/start/repeat method can take in an optional
`blocking=True|False` argument that will over-ride the class-wide
behavior. See soundclient_example.py for an example of this behavior.
:param blocking: Used as the default behavior unless over-ridden,
(default = false)
"""
self._blocking = blocking
# NOTE: only one of these will be used at once, but we need to create
# both the publisher and actionlib client here.
self.actionclient = actionlib.SimpleActionClient(
'sound_play', SoundRequestAction)
self.pub = rospy.Publisher('robotsound', SoundRequest, queue_size=5)
## \brief Create a voice Sound.
##
## Creates a Sound corresponding to saying the indicated text.
##
## \param s Text to say
def voiceSound(self, s, volume=1.0):
return Sound(self, SoundRequest.SAY, s, volume=volume)
## \brief Create a wave Sound.
##
## Creates a Sound corresponding to indicated file.
##
## \param s File to play. Should be an absolute path that exists on the
## machine running the sound_play node.
def waveSound(self, sound, volume=1.0):
if sound[0] != "/":
rootdir = os.path.join(roslib.packages.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
return Sound(self, SoundRequest.PLAY_FILE, sound, volume=volume)
## \brief Create a builtin Sound.
##
## Creates a Sound corresponding to indicated builtin wave.
##
## \param id Identifier of the sound to play.
def builtinSound(self, id, volume=1.0):
return Sound(self, id, "", volume)
## \brief Say a string
##
## Send a string to be said by the sound_node. The vocalization can be
## stopped using stopSaying or stopAll.
##
## \param text String to say
def say(self,text, voice='', volume=1.0, **kwargs):
self.sendMsg(SoundRequest.SAY, SoundRequest.PLAY_ONCE, text, voice,
volume, **kwargs)
## \brief Say a string repeatedly
##
## The string is said repeatedly until stopSaying or stopAll is used.
##
## \param text String to say repeatedly
def repeat(self,text, volume=1.0, **kwargs):
self.sendMsg(SoundRequest.SAY, SoundRequest.PLAY_START, text,
vol=volume, **kwargs)
## \brief Stop saying a string
##
## Stops saying a string that was previously started by say or repeat. The
## argument indicates which string to stop saying.
##
## \param text Same string as in the say or repeat command
def stopSaying(self,text):
self.sendMsg(SoundRequest.SAY, SoundRequest.PLAY_STOP, text)
## \brief Plays a WAV or OGG file
##
## Plays a WAV or OGG file once. The playback can be stopped by stopWave or
## stopAll.
##
## \param sound Filename of the WAV or OGG file. Must be an absolute path valid
## on the computer on which the sound_play node is running
def playWave(self, sound, volume=1.0, **kwargs):
if sound[0] != "/":
rootdir = os.path.join(roslib.packages.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_ONCE, sound,
vol=volume, **kwargs)
## \brief Plays a WAV or OGG file repeatedly
##
## Plays a WAV or OGG file repeatedly until stopWave or stopAll is used.
##
## \param sound Filename of the WAV or OGG file. Must be an absolute path valid
## on the computer on which the sound_play node is running.
def startWave(self, sound, volume=1.0, **kwargs):
if sound[0] != "/":
rootdir = os.path.join(roslib.packages.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_START, sound,
vol=volume, **kwargs)
## \brief Stop playing a WAV or OGG file
##
## Stops playing a file that was previously started by playWave or
## startWave.
##
## \param sound Same string as in the playWave or startWave command
def stopWave(self,sound):
if sound[0] != "/":
rootdir = os.path.join(roslib.package.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_STOP, sound)
## \brief Plays a WAV or OGG file
##
## Plays a WAV or OGG file once. The playback can be stopped by stopWaveFromPkg or
## stopAll.
##
## \param package Package name containing the sound file.
## \param sound Filename of the WAV or OGG file. Must be an path relative to the package valid
## on the computer on which the sound_play node is running
def playWaveFromPkg(self, package, sound, volume=1.0, **kwargs):
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_ONCE, sound, package,
volume, **kwargs)
## \brief Plays a WAV or OGG file repeatedly
##
## Plays a WAV or OGG file repeatedly until stopWaveFromPkg or stopAll is used.
##
## \param package Package name containing the sound file.
## \param sound Filename of the WAV or OGG file. Must be an path relative to the package valid
## on the computer on which the sound_play node is running
def startWaveFromPkg(self, package, sound, volume=1.0, **kwargs):
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_START, sound,
package, volume, **kwargs)
## \brief Stop playing a WAV or OGG file
##
## Stops playing a file that was previously started by playWaveFromPkg or
## startWaveFromPkg.
##
## \param package Package name containing the sound file.
## \param sound Filename of the WAV or OGG file. Must be an path relative to the package valid
## on the computer on which the sound_play node is running
def stopWaveFromPkg(self,sound, package):
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_STOP, sound, package)
## \brief Play a buildin sound
##
## Starts playing one of the built-in sounds. built-ing sounds are documented
## in \ref SoundRequest.msg. Playback can be stopped by stopall.
##
## \param sound Identifier of the sound to play.
def play(self,sound, volume=1.0, **kwargs):
self.sendMsg(sound, SoundRequest.PLAY_ONCE, "", vol=volume, **kwargs)
## \brief Play a buildin sound repeatedly
##
## Starts playing one of the built-in sounds repeatedly until stop or
## stopall is used. Built-in sounds are documented in \ref SoundRequest.msg.
##
## \param sound Identifier of the sound to play.
def start(self,sound, volume=1.0, **kwargs):
self.sendMsg(sound, SoundRequest.PLAY_START, "", vol=volume, **kwargs)
## \brief Stop playing a built-in sound
##
## Stops playing a built-in sound started with play or start.
##
## \param sound Same sound that was used to start playback
def stop(self,sound):
self.sendMsg(sound, SoundRequest.PLAY_STOP, "")
## \brief Stop all currently playing sounds
##
## This method stops all speech, wave file, and built-in sound playback.
def stopAll(self):
self.stop(SoundRequest.ALL)
def sendMsg(self, snd, cmd, s, arg2="", vol=1.0, **kwargs):
"""
Internal method that publishes the sound request, either directly as a
SoundRequest to the soundplay_node or through the actionlib interface
(which blocks until the sound has finished playing).
The blocking behavior is nominally the class-wide setting unless it has
been explicitly specified in the play call.
"""
# Use the passed-in argument if it exists, otherwise fall back to the
# class-wide setting.
blocking = kwargs.get('blocking', self._blocking)
msg = SoundRequest()
msg.sound = snd
# Threshold volume between 0 and 1.
msg.volume = max(0, min(1, vol))
msg.command = cmd
msg.arg = s
msg.arg2 = arg2
rospy.logdebug('Sending sound request with volume = {}'
' and blocking = {}'.format(msg.volume, blocking))
# Defensive check for the existence of the correct communicator.
if blocking and not self.pub:
rospy.logerr('Publisher for SoundRequest must exist')
return
if not blocking and not self.actionclient:
rospy.logerr('Action client for SoundRequest does not exist.')
return
if not blocking: # Publish message directly and return immediately
self.pub.publish(msg)
if self.pub.get_num_connections() < 1:
rospy.logwarn("Sound command issued, but no node is subscribed"
" to the topic. Perhaps you forgot to run"
" soundplay_node.py?")
else: # Block until result comes back.
assert self.actionclient, 'Actionclient must exist'
rospy.logdebug('Sending action client sound request [blocking]')
self.actionclient.wait_for_server()
goal = SoundRequestGoal()
goal.sound_request = msg
self.actionclient.send_goal(goal)
self.actionclient.wait_for_result()
rospy.logdebug('sound request response received')
return
|
[
"genesis@nuc"
] |
genesis@nuc
|
003e7a2f14683e4f88eca45a7c30f8fd449eba92
|
aafedf44cbb6a2fd0616f01311e59bc55d19a87a
|
/todo/notes.py
|
42ee723a6e0d7170fed945cff77a7390a0c070dc
|
[
"MIT"
] |
permissive
|
rakeshsingh/todo.txt
|
57ae6b86a9815945eebe9a2f25894ae5df060326
|
6d9d13aae99a4714bfa67c1c7b9d7d11b1ce6460
|
refs/heads/master
| 2021-08-16T22:13:24.960790
| 2021-06-30T20:37:53
| 2021-06-30T20:37:53
| 144,942,401
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,986
|
py
|
import argparse
from datetime import datetime
import os
import re
import logging
from .log import setup_logging
from .exceptions import (
InvalidNotesFile,
)
from .utils import(
print_colorful,
set_file,
get_file,
)
from .constants import (
DEFAULT_FILE_SEPERATOR,
DEFAULT_FILE_SUFFIX,
DEFAULT_NOTES_PREFIX,
DEFAULT_NOTES_CATEGORY
)
logger = logging.getLogger(__name__)
'''
- Label it with the current month (it's best to start this at the beginning of a month if you can). Put contact information on the inside cover in case you lose it.
- Start a new page every day. Put the date at the top of the page.
- Metadata about the day goes in the margins. I use this to keep a log of who is front as well as taking medicine.
- Write prose freely.
- TODO items start with a -. Those represent things you need to do but haven't done yet.
- When the item is finished, put a vertical line through the - to make it a +.
- If the item either can't or won't be done, cross out the - to make it into a *.
- If you have to put off a task to a later date, turn the - into a ->. If there is room, put a brief description of why it needs to be moved or when it is moved to. If there's no room feel free to write it out in prose form at the end of your page.
- Notes start with a middot (·). They differ from prose as they are not complete sentences. If you need to, you can always turn them into TODO items later.
- Write in pencil so you can erase mistakes. Erase carefully to avoid ripping the paper, You hardly need to use any force to erase things.
- There is only one action, appending. Don't try and organize things by topic as you would on a computer. This is not a computer, this is paper. Paper works best when you append only. There is only one direction, forward.
- If you need to relate a bunch of notes or todo items with a topic, skip a line and write out the topic ending with a colon. When ending the topical notes, skip another line.
- Don't be afraid to write in it. If you end up using a whole notebook before the month is up, that is a success. Record insights, thoughts, feelings and things that come to your mind. You never know what will end up being useful later.
- At the end of the month, look back at the things you did and summarize/index them in the remaining pages. Discover any leftover items that you haven't completed yet so you can either transfer them over to next month or discard them. It's okay to not get everything done. You may also want to scan it to back it up into the cloud. You may never reference these scans, but backups never hurt.
'''
class NotesManager:
"""
Note file format: YYYYMMDD_[notes].txt
examples:
20210101_notes.txt(default)
20180101-meeting-generic.txt
20180102_one-on-one_rohit.txt
20180102_interview_shiva-sundaram.txt
"""
def __init__(self):
self.BASEDIR='./data/'
def list(self, category=None):
if category is None:
category = ''
for filename in os.listdir(self.BASEDIR):
if category in filename:
print(filename)
class Note:
"""
Note file format: YYYYMMDD_[notes].txt
examples:
20210101_notes.txt(default)
20180101-meeting-generic.txt
20180102_one-on-one_rohit.txt
20180102_interview_shiva-sundaram.txt
"""
def __init__(self, notes_date:str=datetime.today().strftime('%Y%m%d'),notes_prefix:str=DEFAULT_NOTES_PREFIX, notes_category:str=DEFAULT_NOTES_CATEGORY):
self.notes_date = notes_date
self.notes_category = notes_category
self.notes_file_name = DEAFULT_FILE_SEPERATOR.join(notes_date, notes_prefix, notes_category,DEFAULT_FILE_SUFFIX)
if get_file(self.notes_file_name):
pass
else:
set_file(notes_file_name)
def edit(line:int):
pass
def list_todos():
pass
def list_notes():
pass
def list_categories():
pass
def list_people():
pass
def get_context():
'''Provides summary of the note. What all people are mentioned in the note, what are the action items, and their statuses, what are the related topics'''
pass
if __name__ =='__main__':
nm = NotesManager()
parser = argparse.ArgumentParser()
parser.add_argument("-n", "--new", action="store_true", help="create a new note\n usage: notes.py -c category detail")
parser.add_argument("-l", "--list", action="store_true", help="list most recent notes")
parser.add_argument("-c", "--category", help="category of the notes, e.g. meeting, todo, oneonone")
parser.add_argument("-d", "--detail", help="additional details for the notes e.g. meeting subject, 1_1 person")
args = parser.parse_args()
if args.list:
nm.list(args.category)
elif args.new:
nm.create(args.category, args.detail)
nm.list(args.category)
else:
nm.list(args.category)
|
[
"kumar.rakesh@gmail.com"
] |
kumar.rakesh@gmail.com
|
424d47cd8ca4fd8aff8087b739e35b6c874f1161
|
35ce7e974e1651b1031841d84bce190be73be196
|
/autonomous_car/system/controller/pid_controller.py
|
7da59857bae3bd7f590eaffa8b93d5e6a45e0ef8
|
[] |
no_license
|
th-sato/py-graduate-final-project
|
459b515b1ba4629fb55ca0f028f0bb92ab715167
|
48d7afb0c0c49ed98a1263c9efdfcf26e99673cd
|
refs/heads/master
| 2022-01-25T15:46:28.268045
| 2019-07-07T03:48:02
| 2019-07-07T03:48:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,583
|
py
|
class PIDController:
def __init__(self):
# Constantes do controlador
# Proporcional
self.KP = {'angle': 250.0, 'speed': 5.0}
# Integral
# self.KI = {'angle': 30.0, 'speed': 1.0}
# Derivative
# self.KD = {'angle': 1.0, 'speed': 1.0}
self.max_error = 100000.0
self.min_error = -100000.0
# self.previous_i_error = {'angle': 0.0, 'speed': 0.0}
# self.previous_d_error = {'angle': 0.0, 'speed': 0.0}
# self.previous_time = 0.0
def reset(self, now_time):
# self.previous_i_error = {'angle': 0, 'speed': 0}
# self.previous_d_error = {'angle': 0, 'speed': 0}
# self.previous_time = now_time
return
def proportional(self, error, variable):
proporcional_controller = self.KP[variable] * error
return self.__set_max(proporcional_controller, self.min_error, self.max_error)
# def integral(self, error, time_interval, variable):
# actual_error = error * time_interval
# integral_part = self.previous_i_error[variable] + actual_error
# self.previous_i_error[variable] = integral_part
# integral_controller = self.KI[variable] * integral_part
# return self.__set_max(integral_controller, self.min_error, self.max_error)
# def derivative(self, error, time_interval, variable):
# derivative_part = (error - self.previous_d_error[variable]) / time_interval
# self.previous_d_error[variable] = error
# derivative_controller = self.KD[variable] * derivative_part
# return self.__set_max(derivative_controller, self.min_error, self.max_error)
def p_controller(self, error, variable, interval=0.01):
p = self.proportional(error, variable)
# i = self.integral(error, interval, variable)
# d = self.derivative(error, interval, variable)
return p
def output(self, distance_center, radius_curvature, run_time):
try:
# interval = run_time - self.previous_time
# self.previous_time = run_time
angle = self.p_controller(distance_center, 'angle')
# speed = self.pid_controller(radius_curvature, interval)
speed = 45
return speed, angle
except Exception as e:
print str(e)
return 0, 0
@staticmethod
def __set_max(value, min_value, max_value):
if value > 0 and value > max_value:
return max_value
if value < 0 and value < min_value:
return min_value
return value
|
[
"thiagosato22@hotmail.com"
] |
thiagosato22@hotmail.com
|
d6e8faee78b555a964bcdabf9d7b434fba09a3c0
|
b96f1bad8a74d31d8ff79bc955813bfcd17d7b26
|
/24. Swap Nodes in Pairs.py
|
75e6d9a0451fd14aadd62f665ddbd922cfa44910
|
[] |
no_license
|
brianhu0716/LeetCode-Solution
|
e7177af15e84e833ce8ab05027683ed4ac489643
|
158a4359c90b723545b22c4898047274cc1b80a6
|
refs/heads/main
| 2023-07-11T05:29:56.783795
| 2021-08-28T12:53:14
| 2021-08-28T12:53:14
| 374,991,658
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 444
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 11 15:06:14 2021
@author: Brian
"""
'''
交換相鄰兩node的值即可
'''
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
while not head or not head.next : return head
ptr = head
while ptr and ptr.next:
temp = ptr.val
ptr.val = ptr.next.val
ptr.next.val = temp
ptr = ptr.next.next
return head
|
[
"85205343+brianhu0716@users.noreply.github.com"
] |
85205343+brianhu0716@users.noreply.github.com
|
6846461a15b491de3c42e18d6aa4d646d87bad7a
|
4bd5e9b67d98bfcc9611bd8b774c9ab9f4f4d446
|
/Python基础笔记/13/代码/3.多继承.py
|
1693fc8f7b66401a95f44f287cfcb7d4c149f841
|
[] |
no_license
|
zhenguo96/test1
|
fe21510aea7feb674e52fd7a86d4177666f841c5
|
0d8de7e73e7e635d26462a0bc53c773d999498be
|
refs/heads/master
| 2020-05-03T13:09:53.592103
| 2019-04-06T07:08:47
| 2019-04-06T07:08:47
| 178,646,627
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 623
|
py
|
# # 多继承
# class Base1:
# def __init__(self,name):
# self.name = name
# def t1(self):
# print("Base1")
#
# class Base2:
# def __init__(self,name):
# self.name = name
# def t2(self):
# print("Base2")
#
# class Base3:
# def __init__(self, name):
# self.name = name
# def t3(self):
# print("Base3")
#
# # 多继承的子类
# class Child(Base1,Base2,Base3):
# pass
# child = Child('tom')
# print(child.__dict__)
# child.t1()
# child.t2()
# # 继承顺序
# print(Child.mro())
# print(Child.__mro__)
#
|
[
"1148421588@qq.com"
] |
1148421588@qq.com
|
a911b9df049a048325fc20b414321d00fb1d10f4
|
64d36f801ff52465bb04b6783fab26359ffc6177
|
/python-basic/meta10.4.py
|
27ee8518f5623e3e896ed81b39d4010990855314
|
[] |
no_license
|
asiachrispy/tensor
|
3ee3257e4c7cc518677395f81158a92339d410a2
|
29bbcd6350d3e4ce8363b25e86d708bce6612eac
|
refs/heads/master
| 2021-01-11T20:19:14.381846
| 2018-05-15T09:33:13
| 2018-05-15T09:33:13
| 79,088,526
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 919
|
py
|
# early stopping
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.cross_validation import train_test_split
from sklearn.metrics import accuracy_score
# load data
dataset = loadtxt('pima-indians-diabetes.csv', delimiter=",")
# split data into X and y
X = dataset[:,0:8]
Y = dataset[:,8]
# split data into train and test sets
seed = 7
test_size = 0.33
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_size,
random_state=seed)
# fit model no training data
model = XGBClassifier()
eval_set = [(X_test, y_test)]
model.fit(X_train, y_train, early_stopping_rounds=10, eval_metric="logloss",
eval_set=eval_set, verbose=True)
# make predictions for test data
y_pred = model.predict(X_test)
predictions = [round(value) for value in y_pred]
# evaluate predictions
accuracy = accuracy_score(y_test, predictions)
print("Accuracy: %.2f%%" % (accuracy * 100.0))
|
[
"huangzhong@jd.com"
] |
huangzhong@jd.com
|
7374ce7e683ccf1d4913b6f64fb04fb50b016df7
|
6c686d118e6d3072b3694c02c684a6619d4dd03e
|
/rsdns/tests/test_client.py
|
cb34bcfaef1aa74df689f00debfbff8959f697df
|
[
"Apache-2.0"
] |
permissive
|
masthalter/reddwarf
|
02e7b78e1e61178647fe8d98ab53eadfabe66e7f
|
72cf41d573cd7c35a222d9b7a8bfaad937f17754
|
HEAD
| 2016-11-08T16:12:16.783829
| 2012-04-26T22:26:56
| 2012-04-26T22:26:56
| 2,387,563
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,033
|
py
|
import httplib2
import mox
import unittest
from eventlet import pools
from novaclient.client import HTTPClient
from novaclient import exceptions
from rsdns.client.dns_client import DNSaasClient
ACCOUNT_ID = 1155
USERNAME = "test_user"
API_KEY="key"
AUTH_URL="urly"
MANAGEMENT_BASE_URL="mgmter"
class FakeResponse(object):
def __init__(self, status):
self.status = status
class WhenDNSaasClientConnectsSuccessfully(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.VerifyAll()
def fake_auth(self, *args, **kwargs):
self.auth_called = True
def create_mock_client(self, fake_request_method):
"""
Creates a mocked DNSaasClient object, which calls "fake_request_method"
instead of httplib2.request.
"""
class FakeHttpLib2(object):
pass
FakeHttpLib2.request = fake_request_method
mock_client = self.mox.CreateMock(DNSaasClient)
mock_client.http_pool = pools.Pool()
mock_client.http_pool.create = FakeHttpLib2
mock_client.auth_token = 'token'
return mock_client
def test_make_request(self):
kwargs = {
'headers': {},
'body': "{}"
}
def fake_request(self, *args, **kwargs):
return FakeResponse(200), '{"hi":"hello"}'
mock_client = self.create_mock_client(fake_request)
resp, body = DNSaasClient.request(mock_client, **kwargs)
self.assertEqual(200, resp.status)
self.assertEqual({"hi":"hello"}, body)
def test_make_request_with_old_token(self):
kwargs = {
'headers': {},
'body': '{"message":"Invalid authentication token. Please renew."}'
}
def fake_request(self, *args, **kwargs):
return FakeResponse(401), \
'{"message":"Invalid authentication token. Please renew."}'
mock_client = self.create_mock_client(fake_request)
mock_client.authenticate()
mock_client.authenticate()
mock_client.authenticate()
self.mox.ReplayAll()
self.assertRaises(exceptions.Unauthorized, DNSaasClient.request,
mock_client, **kwargs)
def test_make_request_with_old_token_2(self):
kwargs = {
'headers': {},
'body': "{}"
}
self.count = 0
def fake_request(_self, *args, **kwargs):
self.count += 1
if self.count > 1:
return FakeResponse(200), '{"hi":"hello"}'
else:
return FakeResponse(401), \
'{"message":"Invalid authentication token. ' \
'Please renew."}'
mock_client = self.create_mock_client(fake_request)
mock_client.authenticate()
self.mox.ReplayAll()
resp, body = DNSaasClient.request(mock_client, **kwargs)
self.assertEqual(200, resp.status)
self.assertEqual({"hi":"hello"}, body)
|
[
"tim.simpson@rackspace.com"
] |
tim.simpson@rackspace.com
|
0153dcd0356625cc5eeb56abefd0fa3a6ed54d56
|
1f0837796a613fcf1788b2361fe4d8d5943bcc47
|
/Tarea Nº1/Ejer7.py
|
5bd8720ac4dfec14680de5faa08d98c167e2b592
|
[] |
no_license
|
LeonardoArroba/Tarea-de-Estructura
|
557f47206c007dad6a7ed5ecc8e29f73c1ab895c
|
d37e9e9fdf1a562c8f5d29c5fee8e836d87a32c9
|
refs/heads/main
| 2023-06-02T12:36:43.153125
| 2021-06-30T02:53:46
| 2021-06-30T02:53:46
| 381,556,550
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,180
|
py
|
#Determinar la cantidad de dinero que recibirá un trabajador por concepto de las horas extras trabajadas en una empresa,
# sabiendo que cuando las horas de trabajo exceden de 40, el resto se consideran horas extras y que éstas se pagan al doble
# de una hora normal cuando no exceden de 8; si las horas extras exceden de 8 se pagan las primeras 8 al doble de lo que
# se paga por una hora normal y el resto al triple
"""Ejercicio 7"""
class Trabajador:
def __init__(self):
pass
def calcularJornada(self):
ht, he, het=0,0,0
ph, phe, pt, ph8=0,0,0,0
ht = int(input("Ingrese horas trabajadas: "))
ph = float(input("Ingrese valor hora: "))
if ht > 40:
he = ht-40
if he > 8:
het = he-8
ph8 = 8*ph*2
ph8 =het*ph*3
else:
ph8 = he*ph*2
pt = 40*ph+phe+ph8
else:
pt = ht*ph
print("Sobretiempo<8:{} Sobretiempo>8:{} Jornada:{} ".format(ph8,phe,pt))
tarea = Trabajador()
tarea.calcularJornada()
|
[
"noreply@github.com"
] |
noreply@github.com
|
919cfaed50624e9fb97a068432bd5441d59ed4a1
|
ff9c646af161bc4d1d38093380d8e2deb54f42d1
|
/MiniAODAnalysis2/MiniAODAnalysis2/python/ConfFile_cfg.py
|
e4fe4e7ad2a2e67f021cd0c065696543022fb7a7
|
[] |
no_license
|
MatthiasHamer/CMS
|
3ab99a9008324e104913c03c2aa322a816a2d582
|
3594ba61e5d794917cf5e0136da743fd5e6190f9
|
refs/heads/master
| 2016-09-10T03:55:30.810030
| 2015-07-27T16:45:40
| 2015-07-27T16:45:40
| 22,997,971
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 475
|
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("Demo")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.source = cms.Source("PoolSource",
# replace 'myfile.root' with the source file you want to use
fileNames = cms.untracked.vstring(
'file:myfile.root'
)
)
process.demo = cms.EDAnalyzer('MiniAODAnalysis2'
)
process.p = cms.Path(process.demo)
|
[
"Matthias.Hamer@cern.ch"
] |
Matthias.Hamer@cern.ch
|
89b926a667374d24e238ffecadfdddc4cc558e8b
|
79164d3ac85ab44b8b29dbea8ff7395b1bf15cba
|
/configurations/configurations.py
|
8b3390c66948a3f2bb2856b3d26bbda53e6ce345
|
[] |
no_license
|
ReubenBagtas/pytestFramework
|
60cb3daa5a8613ef0bca4f21d3282cdfa2cfd6a0
|
cceca23007aab71447949d54949a0c306a4281f1
|
refs/heads/master
| 2020-06-02T04:06:45.139851
| 2019-06-09T16:34:38
| 2019-06-09T16:34:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 101
|
py
|
ALPHA = {}
DELTA = {}
PROD = {}
DEMO = {
'google-homepage': 'https://www.google.com'
}
DEFAULT = {}
|
[
"reubenbagtas.com"
] |
reubenbagtas.com
|
c7293eeb3014b78e8398f1c4818bd0ae1e72f0a8
|
7ddae3bb754cadccae7bb9a9e11986c5d3bc06b2
|
/sample/getweather.py
|
d99bd9b98f4d400b82dc3ae5c90f9567613ae096
|
[
"MIT"
] |
permissive
|
joembis/weather_texter
|
b9fa1449ad9097a292362556c26e4d2700713ed2
|
046fdf6811847072662b52e3873fae8bd45a8e85
|
refs/heads/main
| 2023-03-04T09:16:44.055293
| 2021-02-15T10:42:17
| 2021-02-15T10:42:17
| 339,039,341
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,469
|
py
|
import requests
from datetime import datetime as dt
from datetime import timedelta
def request_weather(url):
"""request the weather from openweathermap.org API. Returns a dict of the json file"""
response = requests.get(url)
response_dict = response.json()
return response_dict
def parse_weather(weather_data_raw):
"""parse the useful weather data from dict of the openweathermap.org json data.
returns another, slimmed down dict with the chosen elements"""
parsed_weather = {}
parsed_weather['sunrise'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunrise")).time()
parsed_weather['sunset'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunset")).time()
for period in weather_data_raw['list']:
# limiting the parsed weather data to weather for the next day
if dt.fromtimestamp(period.get("dt")).date() == dt.today().date() + timedelta(days=1):
time_period = dt.fromtimestamp(period.get("dt"))
# the dict key for each period is a 2-dight 24-hour time, e.g 15 for 3.00pm
parsed_weather[str(time_period.time())[:2]] = [
str(time_period.time())[:2],
round(period.get("main").get("temp")),
period.get("weather")[0].get("main").center(15),
str(period.get("clouds").get("all")).zfill(3),
str(round(period.get("wind").get("speed"))).zfill(3)
]
return parsed_weather
|
[
"76848685+jobororo@users.noreply.github.com"
] |
76848685+jobororo@users.noreply.github.com
|
964887bf2513e38eadfe80819f983c5826f676de
|
0b9f6534a99ff551f0006df78a24e8af30340580
|
/Source/BackupFiles/HDAPI/hdapi/API 문서/python_sample.py
|
08a1d2e3fc1c3952637678e603a976500b276f82
|
[] |
no_license
|
jayirum/ChartProjSvr
|
9bac49865d1e081de0cbd7559d0d2cbdd26279c2
|
d69edfcb3ac3698e1bdfcf5862d5e63bb305cb52
|
refs/heads/master
| 2020-03-24T03:22:18.176911
| 2019-07-05T01:38:03
| 2019-07-05T01:38:03
| 142,416,528
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130,254
|
py
|
#-*- coding: utf-8 -*-
# Form implementation generated from reading ui file './python_sample.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
# Python version 2.7
from PyQt4 import QtCore, QtGui
#from PyQt4.QAxContainer import *
#from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QAxContainer import *
import time
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
CONST_FO_SERIES = "101L6000"
CONST_FO_SISE_51 = "51" # 국내선물호가
CONST_FO_SISE_65 = "65" # 국내선물체결
CONST_FO_SISE_58 = "58" # 국내상품선물호가
CONST_FO_SISE_71 = "71" # 국내상품선물체결
CONST_FO_ORDER_181 = "181" # 국내주문실시간접수
CONST_FO_ORDER_182 = "182" # 국내주문실시간미체결
CONST_FO_ORDER_183 = "183" # 국내주문실시간미결제
CONST_FO_ORDER_184 = "184" # 국내주문실시간잔고
CONST_FO_ORDER_185 = "185" # 국내주문실시간체결
CONST_FR_SERIES = "6AH16"
CONST_FR_SISE_76 = "76" # 해외선물호가
CONST_FR_SISE_82 = "82" # 해외선물체결
CONST_FR_ORDER_196 = "196" # 해외주문실시간접수
CONST_FR_ORDER_186 = "186" # 해외주문실시간미체결
CONST_FR_ORDER_187 = "187" # 해외주문실시간미결제
CONST_FR_ORDER_188 = "188" # 해외주문실시간잔고
CONST_FR_ORDER_189 = "189" # 해외주문실시간체결
CONST_FX_SERIES = "EUR/USD"
CONST_FX_SISE_171 = "171"
#attributes
m_AccListFO = {}
m_AccListFR = {}
m_AccListFX = {}
m_TrQueryListFO = {} # 국내 조회
m_FidQueryListFO = {} # 국내 FID조회
m_TrOrderListFO = {} # 국내 주문
m_TrQueryListFR = {}
m_FidQueryListFR = {} # 해외 FID조회
m_TrOrderListFR = {}
m_TrQueryListFX = {}
m_FidQueryListFX = {} # 해외 FID조회
m_TrOrderListFX = {}
# 실시간
m_RealSiseListFO = {}
m_RealOrderListFO = {}
m_RealSiseListFR = {}
m_RealOrderListFR = {}
m_RealSiseListFX = {}
m_RealOrderListFX = {}
m_bUseComboTrChange = True
def setupUi(self, MainWindow):
_TOP_Pos = 50
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(800, 600)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.label_6 = QtGui.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(10, 10, 56, 12))
self.label_6.setObjectName(_fromUtf8("label_6"))
self.Edit_Path = QtGui.QLineEdit(self.centralwidget)
self.Edit_Path.setGeometry(QtCore.QRect(70, 7, 400, 20))
self.Edit_Path.setObjectName(_fromUtf8("Edit_Path"))
self.BTN_Connect = QtGui.QPushButton(self.centralwidget)
self.BTN_Connect.setGeometry(QtCore.QRect(10, _TOP_Pos+10, 121, 23))
self.BTN_Connect.setObjectName(_fromUtf8("BTN_Connect"))
self.BTN_DisConnect = QtGui.QPushButton(self.centralwidget)
self.BTN_DisConnect.setGeometry(QtCore.QRect(150, _TOP_Pos+10, 111, 23))
self.BTN_DisConnect.setObjectName(_fromUtf8("BTN_DisConnect"))
self.label = QtGui.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(10, _TOP_Pos+50, 56, 12))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(200, _TOP_Pos+50, 70, 12))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtGui.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(401, _TOP_Pos+50, 111, 16))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.Edit_UserID = QtGui.QLineEdit(self.centralwidget)
self.Edit_UserID.setGeometry(QtCore.QRect(70, _TOP_Pos+45, 113, 20))
self.Edit_UserID.setObjectName(_fromUtf8("Edit_UserID"))
self.Edit_UserPW = QtGui.QLineEdit(self.centralwidget)
self.Edit_UserPW.setGeometry(QtCore.QRect(280, _TOP_Pos+46, 113, 20))
self.Edit_UserPW.setEchoMode(QtGui.QLineEdit.Password)
self.Edit_UserPW.setObjectName(_fromUtf8("Edit_UserPW"))
self.Edit_CertPW = QtGui.QLineEdit(self.centralwidget)
self.Edit_CertPW.setGeometry(QtCore.QRect(510, _TOP_Pos+47, 113, 20))
self.Edit_CertPW.setEchoMode(QtGui.QLineEdit.Password)
self.Edit_CertPW.setObjectName(_fromUtf8("Edit_CertPW"))
self.Btn_Login = QtGui.QPushButton(self.centralwidget)
self.Btn_Login.setGeometry(QtCore.QRect(390, _TOP_Pos+80, 75, 23))
self.Btn_Login.setObjectName(_fromUtf8("Btn_Login"))
self.Btn_Logout = QtGui.QPushButton(self.centralwidget)
self.Btn_Logout.setGeometry(QtCore.QRect(550, _TOP_Pos+80, 75, 23))
self.Btn_Logout.setObjectName(_fromUtf8("Btn_Logout"))
# 국내/해외/FX
self.groupBox1 = QtGui.QGroupBox(self.centralwidget)
self.groupBox1.setGeometry(QtCore.QRect(10, _TOP_Pos+110, 200, 41))
self.groupBox1.setObjectName(_fromUtf8("groupBox"))
self.BtnRadio1 = QtGui.QRadioButton(self.groupBox1)
self.BtnRadio1.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.BtnRadio1.setObjectName(_fromUtf8("BtnRadio1"))
self.BtnRadio2 = QtGui.QRadioButton(self.groupBox1)
self.BtnRadio2.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadio2.setObjectName(_fromUtf8("BtnRadio2"))
self.BtnRadio3 = QtGui.QRadioButton(self.groupBox1)
self.BtnRadio3.setGeometry(QtCore.QRect(140, 10, 111, 22))
self.BtnRadio3.setObjectName(_fromUtf8("BtnRadio3"))
# Tr조회, Fid조회, 주문
self.groupBox2 = QtGui.QGroupBox(self.centralwidget)
self.groupBox2.setGeometry(QtCore.QRect(251, _TOP_Pos+110, 200, 41))
self.groupBox2.setObjectName(_fromUtf8("groupBox2"))
self.BtnRadioQry1 = QtGui.QRadioButton(self.groupBox2)
self.BtnRadioQry1.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.BtnRadioQry1.setObjectName(_fromUtf8("BtnRadioQry1"))
self.BtnRadioQry2 = QtGui.QRadioButton(self.groupBox2)
self.BtnRadioQry2.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadioQry2.setObjectName(_fromUtf8("BtnRadioQry2"))
self.BtnRadioQry3 = QtGui.QRadioButton(self.groupBox2)
self.BtnRadioQry3.setGeometry(QtCore.QRect(140, 10, 111, 22))
self.BtnRadioQry3.setObjectName(_fromUtf8("BtnRadioQry3"))
#해외 종목코드 수신
self.Btn_ReqJMCodeFR = QtGui.QPushButton(self.centralwidget)
self.Btn_ReqJMCodeFR.setGeometry(QtCore.QRect(460, _TOP_Pos+120, 120, 23))
self.Btn_ReqJMCodeFR.setObjectName(_fromUtf8("Btn_ReqJMCodeFR"))
# 계좌 콤보
self.label_4 = QtGui.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(10, _TOP_Pos+165, 56, 12))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.ComboAcc = QtGui.QComboBox(self.centralwidget)
self.ComboAcc.setGeometry(QtCore.QRect(70, _TOP_Pos+160, 111, 22))
self.ComboAcc.setObjectName(_fromUtf8("ComboAcc"))
self.label_acctNm = QtGui.QLabel(self.centralwidget)
self.label_acctNm.setGeometry(QtCore.QRect(450, _TOP_Pos+165, 56, 12))
self.label_acctNm.setObjectName(_fromUtf8("label_acctNm"))
#계좌비번
self.label_7 = QtGui.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(200, _TOP_Pos+165, 56, 12))
self.label_7.setObjectName(_fromUtf8("label_7"))
self.Edit_AcctPW = QtGui.QLineEdit(self.centralwidget)
self.Edit_AcctPW.setGeometry(QtCore.QRect(280, _TOP_Pos+160, 113, 20))
self.Edit_AcctPW.setEchoMode(QtGui.QLineEdit.Password)
self.Edit_AcctPW.setObjectName(_fromUtf8("Edit_AcctPW"))
#서비스 콤부
self.label_5 = QtGui.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(10, _TOP_Pos+200, 56, 12))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.ComboTr = QtGui.QComboBox(self.centralwidget)
self.ComboTr.setGeometry(QtCore.QRect(70, _TOP_Pos+195, 200, 22))
self.ComboTr.setObjectName(_fromUtf8("ComboTr"))
self.label_8 = QtGui.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(280, _TOP_Pos+200, 60, 12))
self.label_8.setObjectName(_fromUtf8("label_8"))
self.label_tr = QtGui.QLabel(self.centralwidget)
self.label_tr.setGeometry(QtCore.QRect(340, _TOP_Pos+200, 150, 12))
self.label_tr.setObjectName(_fromUtf8("label_tr"))
# 입력 부분
self.label_10 = QtGui.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(10, _TOP_Pos+230, 60, 12))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.Edit_Input = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input.setGeometry(QtCore.QRect(70, _TOP_Pos+225, 450, 20))
self.Edit_Input.setObjectName(_fromUtf8("Edit_Input"))
self.Edit_Input_FID = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input_FID.setGeometry(QtCore.QRect(525, _TOP_Pos+225, 120, 20))
self.Edit_Input_FID.setObjectName(_fromUtf8("Edit_Input_FID"))
self.BTN_Query = QtGui.QPushButton(self.centralwidget)
self.BTN_Query.setGeometry(QtCore.QRect(650, _TOP_Pos+224, 60, 22))
self.BTN_Query.setObjectName(_fromUtf8("BTN_Query"))
# 실시간
self.groupBox3 = QtGui.QGroupBox(self.centralwidget)
self.groupBox3.setGeometry(QtCore.QRect(10, _TOP_Pos+250, 200, 41))
self.groupBox3.setObjectName(_fromUtf8("groupBox"))
self.label_11 = QtGui.QLabel(self.groupBox3)
self.label_11.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.BtnRadioSiseReal = QtGui.QRadioButton(self.groupBox3)
self.BtnRadioSiseReal.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadioSiseReal.setObjectName(_fromUtf8("BtnRadioSiseReal"))
self.BtnRadioOrderReal = QtGui.QRadioButton(self.groupBox3)
self.BtnRadioOrderReal.setGeometry(QtCore.QRect(140, 10, 111, 22))
self.BtnRadioOrderReal.setObjectName(_fromUtf8("BtnRadioOrderReal"))
# 실시간 등록/해지
self.groupBox4 = QtGui.QGroupBox(self.centralwidget)
self.groupBox4.setGeometry(QtCore.QRect(251, _TOP_Pos+250, 150, 41))
self.groupBox4.setObjectName(_fromUtf8("groupBox"))
self.BtnRadioRealRegister = QtGui.QRadioButton(self.groupBox4)
self.BtnRadioRealRegister.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.BtnRadioRealRegister.setObjectName(_fromUtf8("BtnRadioRealRegister"))
self.BtnRadioRealUnRegister = QtGui.QRadioButton(self.groupBox4)
self.BtnRadioRealUnRegister.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadioRealUnRegister.setObjectName(_fromUtf8("BtnRadioRealUnRegister"))
self.ComboReal = QtGui.QComboBox(self.centralwidget)
self.ComboReal.setGeometry(QtCore.QRect(420, _TOP_Pos+260, 200, 22))
self.ComboReal.setObjectName(_fromUtf8("ComboReal"))
self.label_12 = QtGui.QLabel(self.centralwidget)
self.label_12.setGeometry(QtCore.QRect(10, _TOP_Pos+300, 60, 12))
self.label_12.setObjectName(_fromUtf8("label_12"))
self.Edit_Input_Real_Key = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input_Real_Key.setGeometry(QtCore.QRect(70, _TOP_Pos+295, 50, 20))
self.Edit_Input_Real_Key.setObjectName(_fromUtf8("Edit_Input_Real_Key"))
self.Edit_Input_Real_Val = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input_Real_Val.setGeometry(QtCore.QRect(130, _TOP_Pos+295, 250, 20))
self.Edit_Input_Real_Val.setObjectName(_fromUtf8("Edit_Input_Real_Val"))
self.BTN_Query_Real = QtGui.QPushButton(self.centralwidget)
self.BTN_Query_Real.setGeometry(QtCore.QRect(380, _TOP_Pos+294, 60, 22))
self.BTN_Query_Real.setObjectName(_fromUtf8("BTN_Query_Real"))
self.multiline = QtGui.QPlainTextEdit(self.centralwidget)
self.multiline.setGeometry(QtCore.QRect(10, _TOP_Pos+330, 781, 191))
self.multiline.setObjectName(_fromUtf8("listView"))
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.SetupTabOrder()
self.SetupSignal()
def SetupSignal(self):
QtCore.QObject.connect(self.BTN_Connect, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnConnect)
QtCore.QObject.connect(self.BTN_DisConnect, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnDisConnect)
QtCore.QObject.connect(self.Btn_Login, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnLogin)
QtCore.QObject.connect(self.Btn_Logout, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnLogout)
QtCore.QObject.connect(self.BtnRadio1, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioAcctTpChange)
QtCore.QObject.connect(self.BtnRadio2, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioAcctTpChange)
QtCore.QObject.connect(self.BtnRadio3, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioAcctTpChange)
QtCore.QObject.connect(self.BtnRadioQry1, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioTrChange)
QtCore.QObject.connect(self.BtnRadioQry2, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioTrChange)
QtCore.QObject.connect(self.BtnRadioQry3, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioTrChange)
QtCore.QObject.connect(self.Btn_ReqJMCodeFR, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnReqJmCodeFr)
QtCore.QObject.connect(self.ComboAcc, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), self.OnComboAccChange)
QtCore.QObject.connect(self.ComboTr, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), self.OnComboTrChange)
QtCore.QObject.connect(self.BTN_Query, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnQuery)
QtCore.QObject.connect(self.BtnRadioSiseReal, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioRealChange)
QtCore.QObject.connect(self.BtnRadioOrderReal, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioRealChange)
QtCore.QObject.connect(self.ComboReal, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), self.OnComboReal)
QtCore.QObject.connect(self.BTN_Query_Real, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRealRegister)
def SetupTabOrder(self):
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.BTN_Connect, self.Edit_UserID)
MainWindow.setTabOrder(self.Edit_UserID, self.Edit_UserPW)
MainWindow.setTabOrder(self.Edit_UserPW, self.Edit_CertPW)
MainWindow.setTabOrder(self.Edit_CertPW, self.Btn_Login)
MainWindow.setTabOrder(self.Btn_Login, self.ComboAcc)
MainWindow.setTabOrder(self.ComboAcc, self.ComboTr)
MainWindow.setTabOrder(self.ComboTr, self.Btn_Logout)
MainWindow.setTabOrder(self.Btn_Logout, self.BTN_DisConnect)
def SetupHDFOcx(self, MainWindow):
self.HDF = QAxWidget("HDFCOMMAGENT.HDFCommAgentCtrl.1")
self.HDF.connect(self.HDF, QtCore.SIGNAL("OnDataRecv(QString, int)"), self.OnDataRecv)
self.HDF.connect(self.HDF, QtCore.SIGNAL("OnGetBroadData(QString, int)"), self.OnGetBroadData)
self.HDF.connect(self.HDF, QtCore.SIGNAL("OnGetMsgWithRqId(int, QString, QString)"), self.OnGetMsgWithRqId)
# 실행위치 설정
strPath = os.getcwd()
self.Edit_Path.setText(_translate("MainWindow", strPath, None))
self.HDF.dynamicCall("CommSetOCXPath(" + strPath + ")")
def HD_CommGetRepeatCnt(self, strTrCode, nRealType, strFildNm):
nRqID = self.HDF.dynamicCall("CommGetRepeatCnt(QString, int, QString)", strTrCode, nRealType, strFildNm)
return range(nRqID.toLongLong()[0])
def HD_CommRqData(self, strTrCode, strInputData, nInputLength, strPrevOrNext):
nRqID = self.HDF.dynamicCall("CommRqData(QString, QString, LONG, QString)", strTrCode, strInputData, nInputLength, strPrevOrNext)
return nRqID.toLongLong()[0]
def HD_CommFIDRqData(self, strTrCode, strInputData, sReqFidList, sPrevOrNext):
#BSTR sFidCode, BSTR sInputData, BSTR sReqFidList, LONG nLength, BSTR sPrevOrNext
nRqID = self.HDF.dynamicCall("CommFIDRqData(QString, QString, QString, LONG, QString)", strTrCode, strInputData, sReqFidList, len(strInputData), sPrevOrNext)
return nRqID.toLongLong()[0]
def HD_CommJumunSvr(self, strTrCode, strInputData):
nRqID = self.HDF.dynamicCall("CommJumunSvr(QString, QString)", strTrCode, strInputData)
return nRqID.toLongLong()[0]
def HD_CommGetData(self, strTrCode, nRealType, strRecNm, nIndex, strFieldNm):
strFieldNm = unicode(str(strFieldNm).strip(), 'utf-8')
strData = self.HDF.dynamicCall("CommGetData(QString, int, QString, int, QString)", strTrCode, nRealType, strRecNm, nIndex, strFieldNm)
return strData.toString()
def HD_RegReal(self, bReg, strValue, nKeyType):
if ( bReg == True ):
nRet = self.HDF.dynamicCall("CommSetBroad(QString, LONG)", strValue, nKeyType)
else:
nRet = self.HDF.dynamicCall("CommRemoveBroad(QString, LONG)", strValue, nKeyType)
return nRet
def HD_CommGetAccInfo(self):
strData = self.HDF.dynamicCall("CommGetAccInfo()")
return strData.toString()
def HD_Login(self, strUserID, strUserWd, strCertPw):
ret = self.HDF.dynamicCall("CommLogin(QString, QString, QString)", strUserID, strUserWd, strCertPw)
def HD_Logout(self, strUserID):
ret = self.HDF.dynamicCall("CommLogout(QString)", strUserID)
def HD_CommReqMakeCod(self):
ret = self.HDF.dynamicCall("CommReqMakeCod(QString, BOOL)", "JMCODE", False)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.label_6.setText(_translate("MainWindow", "실행위치", None))
self.BTN_Connect.setText(_translate("MainWindow", "통신관리자 실행", None))
self.BTN_DisConnect.setText(_translate("MainWindow", "통신관리자 종료", None))
self.label.setText(_translate("MainWindow", "사용자 ID", None))
self.label_2.setText(_translate("MainWindow", "HTS비번", None))
self.label_3.setText(_translate("MainWindow", "공인인증 비밀번호", None))
self.Btn_Login.setText(_translate("MainWindow", "로그인", None))
self.Btn_Logout.setText(_translate("MainWindow", "로그아웃", None))
self.label_4.setText(_translate("MainWindow", "계좌번호", None))
self.label_7.setText(_translate("MainWindow", "계좌비번", None))
self.label_5.setText(_translate("MainWindow", "조회유형", None))
self.label_8.setText(_translate("MainWindow", "TRCode : ", None))
self.label_10.setText(_translate("MainWindow", "Input : ", None))
self.BTN_Query.setText(_translate("MainWindow", "조회", None))
self.BtnRadio1.setText(_translate("MainWindow", "국내", None))
self.BtnRadio2.setText(_translate("MainWindow", "해외", None))
self.BtnRadio3.setText(_translate("MainWindow", "FX", None))
self.BtnRadio1.setChecked(True)
self.BtnRadioQry1.setText(_translate("MainWindow", "TR", None))
self.BtnRadioQry2.setText(_translate("MainWindow", "FID", None))
self.BtnRadioQry3.setText(_translate("MainWindow", "주문", None))
self.BtnRadioQry1.setChecked(True)
self.Btn_ReqJMCodeFR.setText(_translate("MainWindow", "해외종목코드 수신", None))
self.label_11.setText(_translate("MainWindow", "실시간", None))
self.BtnRadioSiseReal.setText(_translate("MainWindow", "시세", None))
self.BtnRadioOrderReal.setText(_translate("MainWindow", "주문", None))
self.BtnRadioSiseReal.setChecked(True)
self.BtnRadioRealRegister.setText(_translate("MainWindow", "등록", None))
self.BtnRadioRealUnRegister.setText(_translate("MainWindow", "해지", None))
self.BtnRadioRealRegister.setChecked(True)
self.label_12.setText(_translate("MainWindow", "실시간", None))
self.BTN_Query_Real.setText(_translate("MainWindow", "등록/해지", None))
self.Edit_UserID.setText(u"sivas99")
self.Edit_UserPW.setText(u"qwe123")
self.Edit_AcctPW.setText(u"1234")
self.SetTrToDictionary_FO()
self.SetTrToDictionary_FR()
self.SetTrToDictionary_FX()
self.SetRealToDictionary_FO()
self.SetRealToDictionary_FR()
self.SetRealToDictionary_FX()
self.OnRadioTrChange()
self.SetComboReal()
pass
def SetTrToDictionary_FO(self):
self.m_TrQueryListFO.update({u"01.국내미체결주문조회" :[u"g11002.DQ0104&",u"계좌번호,11,딜러번호,3,계좌비번,8"]})
self.m_TrQueryListFO.update({u"02.국내체결내역리스트" :[u"g11002.DQ0107&",u"계좌번호,11,딜러번호,3,계좌비번,8"]})
self.m_TrQueryListFO.update({u"03.국내미결제약정조회" :[u"g11002.DQ0110&",u"계좌번호,11,딜러번호,3,계좌비번,8"]})
self.m_TrQueryListFO.update({u"04.국내고객예탁금조회" :[u"g11002.DQ0242&",u"계좌번호,11,계좌비번,8" ]})
self.m_TrQueryListFO.update({u"05.국내종목명조회" :[u"g11002.DQ0622&",u"exchtp,1,exchid,2,fotp,1"]})
self.m_TrQueryListFO.update({u"06.야간CME미체결주문조회":[u"g11002.DQ0116&",u"계좌번호,11,딜러번호,3,계좌비번,8"]})
self.m_TrQueryListFO.update({u"07.야간CME체결내역리스트":[u"g11002.DQ0119&",u"계좌번호,11,딜러번호,3,계좌비번,8"]})
self.m_TrQueryListFO.update({u"08.야간CME미결제약정조회":[u"g11002.DQ0122&",u"계좌번호,11,딜러번호,3,계좌비번,8"]})
self.m_FidQueryListFO.update({u"국내선물옵션마스터" :[u"s20001",u"종목,8,조회모드,1,조회건수,4",u"000075051057"]})
self.m_FidQueryListFO.update({u"국내주식선물마스터" :[u"s30001",u"종목,8,조회모드,1,조회건수,4",u"000075051057"]})
self.m_FidQueryListFO.update({u"야간CME선물옵션마스터" :[u"s21001",u"종목,8,조회모드,1,조회건수,4",u"000075051057"]})
self.m_TrOrderListFO.update({u"01.국내신규주문" :[u"g12001.DO1601&",u"계좌번호,11,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,13,주문수량,5"]})
self.m_TrOrderListFO.update({u"02.국내정정주문" :[u"g12001.DO1901&",u"계좌번호,11,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,13,주문수량,5,주문번호,7"]})
self.m_TrOrderListFO.update({u"03.국내취소주문" :[u"g12001.DO1701&",u"계좌번호,11,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,13,주문수량,5,주문번호,7"]})
self.m_TrOrderListFO.update({u"04.CME국내신규주문" :[u"g12001.DO2201&",u"계좌번호,11,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,13,주문수량,5"]})
self.m_TrOrderListFO.update({u"05.CME국내정정주문" :[u"g12001.DO2101&",u"계좌번호,11,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,13,주문수량,5,주문번호,7"]})
self.m_TrOrderListFO.update({u"06.CME국내취소주문" :[u"g12001.DO2001&",u"계좌번호,11,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,13,주문수량,5,주문번호,7"]})
pass
def SetTrToDictionary_FR(self):
self.m_TrQueryListFR.update({u"01.해외미체결주문내역" :[u"g11004.AQ0401%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8"]})
self.m_TrQueryListFR.update({u"02.해외체결주문내역" :[u"g11004.AQ0402%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8"]})
self.m_TrQueryListFR.update({u"03.해외미결제주문내역" :[u"g11004.AQ0403%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8"]})
self.m_TrQueryListFR.update({u"04.해외예탁자산조회" :[u"g11004.AQ0607%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8,groupnm,20,통화코드,3"]})
self.m_TrQueryListFR.update({u"05.해외일주월분틱" :[u"o44005",u"keyvalue,18,코드,32,조회일자,8,조회시간,10,다음구분,1,데이터구분,1,시간구분,3,데이터건수,5,장구분,1,허봉실봉구분,1"]})
self.m_FidQueryListFR.update({u"01.해외선물옵션마스터" :[u"o51000",u"종목,32", u"000001002003004005006007008009010011012013014015016017018019020021022023024025026027028029030031032033034035036037"]})
self.m_FidQueryListFR.update({u"02.해외호가정보" :[u"o51010",u"종목,32", u"000001002003004005006007"]})
self.m_FidQueryListFR.update({u"03.해외종목정보" :[u"o51210",u"종목,32", u"000001002003004005006007008009010011012013014015016017018019020021"]})
self.m_TrOrderListFR.update({u"01.해외신규주문" :[u"g12003.AO0401%",u"계좌번호,6,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,15,주문수량,10,strtp,1,stopp,15,minqty,10"]})
self.m_TrOrderListFR.update({u"02.해외정정주문" :[u"g12003.AO0402%",u"계좌번호,6,계좌비번,8,종목,32,매매구분,1,체결구분,1,체결조건,1,주문가격,15,주문수량,10,주문번호,10,strtp,1,stopp,15,minqty,10"]})
self.m_TrOrderListFR.update({u"03.해외취소주문" :[u"g12003.AO0403%",u"계좌번호,6,계좌비번,8,종목,32,체결구분,1,체결조건,1,주문가격,15,주문수량,10,주문번호,10"]})
pass
def SetTrToDictionary_FX(self):
self.m_TrQueryListFX.update({u"01.FX자산내역조회" :[u"g11004.AQ0901%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8"]})
self.m_TrQueryListFX.update({u"02.FX미체결내역조회" :[u"g11004.AQ0904%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8"]})
self.m_TrQueryListFX.update({u"03.FX계좌정보Summary" :[u"g11004.AQ906%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8"]})
self.m_TrQueryListFX.update({u"04.FX미청산포지션조회" :[u"g11004.AQ0908%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8"]})
self.m_TrQueryListFX.update({u"05.FX청산포지션조회" :[u"g11004.AQ0910%",u"조회유형,1,사용자ID,8,계좌번호,6,계좌비번,8,groupnm,20,시작일자,8,종료일자,8"]})
self.m_FidQueryListFX.update({u"01.FX마스터" :[u"x00001",u"종목,10", u"000001002003004005006007008009010011012013014015016017018019020021022"]})
self.m_TrOrderListFX.update({u"01.FX신규주문" :[u"g12003.AO0501%",u"계좌번호,6,계좌비번,8,종목,32,매매구분,1,주문가격,20,주문수량,15,ordertr,10,체결구분,1,strtp,1,stopp,20,limitp,20,mrktrange,5,trailingstop,10,trdno,20,trdseq,5"]})
self.m_TrOrderListFX.update({u"02.FX정정주문" :[u"g12003.AO0502%",u"계좌번호,6,계좌비번,8,종목,32,주문번호,20,주문SEQ,15,주문가격,15,주문수량,10,ordertr,10,strtp,1,stopp,15,limitp,20,trailingstop,10"]})
self.m_TrOrderListFX.update({u"03.FX취소주문" :[u"g12003.AO0503%",u"계좌번호,6,계좌비번,8,종목,32,주문번호,20"]})
pass
def SetRealToDictionary_FO(self):
self.m_RealSiseListFO.update({u"01.국내선물호가" :[self.CONST_FO_SISE_51,u"종목"]})
self.m_RealSiseListFO.update({u"02.국내선물체결" :[self.CONST_FO_SISE_65,u"종목"]})
self.m_RealSiseListFO.update({u"03.국내옵션호가" :["52",u"종목"]})
self.m_RealSiseListFO.update({u"04.국내옵션체결" :["66",u"종목"]})
self.m_RealSiseListFO.update({u"05.국내상품선물호가" :[self.CONST_FO_SISE_58,u"종목"]})
self.m_RealSiseListFO.update({u"06.국내상품선물체결" :[self.CONST_FO_SISE_71,u"종목"]})
self.m_RealSiseListFO.update({u"07.국내상품옵션호가" :[u"59",u"종목"]})
self.m_RealSiseListFO.update({u"08.국내상품옵션체결" :[u"73",u"종목"]})
self.m_RealSiseListFO.update({u"09.국내CME선물호가" :[u"75",u"종목"]})
self.m_RealSiseListFO.update({u"10.국내CME선물체결" :[u"77",u"종목"]})
self.m_RealSiseListFO.update({u"11.국내주식선물호가" :[u"56",u"종목"]})
self.m_RealSiseListFO.update({u"12.국내주식선물체결" :[u"68",u"종목"]})
self.m_RealOrderListFO.update({u"01.국내주문실시간접수" :[self.CONST_FO_ORDER_181,u"계좌번호"]})
self.m_RealOrderListFO.update({u"02.국내주문실시간미체결" :[self.CONST_FO_ORDER_182,u"계좌번호"]})
self.m_RealOrderListFO.update({u"03.국내주문실시간미결제" :[self.CONST_FO_ORDER_183,u"계좌번호"]})
self.m_RealOrderListFO.update({u"04.국내주문실시간잔고" :[self.CONST_FO_ORDER_184,u"계좌번호"]})
self.m_RealOrderListFO.update({u"05.국내주문실시간체결" :[self.CONST_FO_ORDER_185,u"계좌번호"]})
pass
def SetRealToDictionary_FR(self):
self.m_RealSiseListFR.update({u"01.해외선물호가" :[self.CONST_FR_SISE_76,u"종목"]})
self.m_RealSiseListFR.update({u"02.해외선물체결" :[self.CONST_FR_SISE_82,u"종목"]})
self.m_RealOrderListFR.update({u"01.해외주문실시간접수" :[self.CONST_FR_ORDER_196,u"계좌번호"]})
self.m_RealOrderListFR.update({u"02.해외주문실시간미체결" :[self.CONST_FR_ORDER_186,u"계좌번호"]})
self.m_RealOrderListFR.update({u"03.해외주문실시간미결제" :[self.CONST_FR_ORDER_187,u"계좌번호"]})
self.m_RealOrderListFR.update({u"04.해외주문실시간잔고" :[self.CONST_FR_ORDER_188,u"계좌번호"]})
self.m_RealOrderListFR.update({u"05.해외주문실시간체결" :[self.CONST_FR_ORDER_189,u"계좌번호"]})
pass
def SetRealToDictionary_FX(self):
self.m_RealSiseListFX.update({u"01.FX체결" :[self.CONST_FX_SISE_171,u"종목"]})
self.m_RealOrderListFX.update({u"01.FX주문실시간접수" :[u"197",u"계좌번호"]})
self.m_RealOrderListFX.update({u"02.FX주문실시간미체결" :[u"191",u"계좌번호"]})
self.m_RealOrderListFX.update({u"03.FX주문실시간미청산" :[u"192",u"계좌번호"]})
self.m_RealOrderListFX.update({u"04.FX주문실시간청산" :[u"193",u"계좌번호"]})
self.m_RealOrderListFX.update({u"05.FX주문실시간자산" :[u"194",u"계좌번호"]})
self.m_RealOrderListFX.update({u"06.FX주문실시간요약" :[u"195",u"계좌번호"]})
pass
def Util_WriteLog(self, strMsg):
now = time.localtime(time.time())
strHead = time.strftime("[%H:%M:%S]", now)
self.multiline.insertPlainText( strHead + strMsg + "\n")
self.multiline.moveCursor(QTextCursor.End)
def Util_GetCurDate(self):
now = time.localtime(time.time())
strDate = time.strftime("%Y%m%d", now)
return strDate
def Util_FillStr(self, strSrc, nTotLen, chPad):
nPadLen = nTotLen - len(strSrc)
strPad = strSrc + chPad * nPadLen
return strPad
def OnConnect(self):
self.Util_WriteLog("Connect")
ret = self.HDF.dynamicCall("CommInit(1)")
def OnDisConnect(self):
self.OnLogout()
self.HDF.dynamicCall("CommTerminate(1)")
def OnLogin(self):
self.Util_WriteLog("Login")
strUserID = self.Edit_UserID.text()
strUserPW = self.Edit_UserPW.text()
strCertPW = self.Edit_CertPW.text()
nRet = self.HD_Login(strUserID, strUserPW, strCertPW)
pass
def OnLogout(self):
self.Util_WriteLog("Logout")
strUserID = self.Edit_UserID.text()
self.HD_Logout(strUserID)
pass
def OnReqJmCodeFr(self):
self.Util_WriteLog("해외종목요청")
self.HD_CommReqMakeCod()
pass
def GetAccOrTrDic(self, strQryTp):
tmpList = []
tmpAccDic = {}
tmpTrDic = {}
tmpFidDic = {}
if ( self.BtnRadio1.isChecked() == True ): #국내
tmpAccDic = self.m_AccListFO
if ( self.BtnRadioQry1.isChecked() == True ): #TR조회
tmpTrDic = self.m_TrQueryListFO
elif ( self.BtnRadioQry2.isChecked() == True ): #FID조회
tmpTrDic = self.m_FidQueryListFO
elif ( self.BtnRadioQry3.isChecked() == True ): #주문
tmpTrDic = self.m_TrOrderListFO
elif ( self.BtnRadio2.isChecked() == True ): #해외
tmpAccDic = self.m_AccListFR
if ( self.BtnRadioQry1.isChecked() == True ): #TR조회
tmpTrDic = self.m_TrQueryListFR
elif ( self.BtnRadioQry2.isChecked() == True ): #FID조회
tmpTrDic = self.m_FidQueryListFR
elif ( self.BtnRadioQry3.isChecked() == True ): #주문
tmpTrDic = self.m_TrOrderListFR
elif ( self.BtnRadio3.isChecked() == True ): #FX
tmpAccDic = self.m_AccListFX
if ( self.BtnRadioQry1.isChecked() == True ): #TR조회
tmpTrDic = self.m_TrQueryListFX
elif ( self.BtnRadioQry2.isChecked() == True ): #FID조회
tmpTrDic = self.m_FidQueryListFX
elif ( self.BtnRadioQry3.isChecked() == True ): #주문
tmpTrDic = self.m_TrOrderListFX
if ( strQryTp == "ACC"):
return tmpAccDic
elif (strQryTp == "TR"):
return tmpTrDic
pass
pass
def GetRealDic(self):
tmpRealDic = {}
if ( self.BtnRadio1.isChecked() == True ): # 국내
if ( self.BtnRadioSiseReal.isChecked() == True ):
# 시세 로드
tmpRealDic = self.m_RealSiseListFO
else:
#주문 로드
tmpRealDic = self.m_RealOrderListFO
pass
elif ( self.BtnRadio2.isChecked() == True ):
if ( self.BtnRadioSiseReal.isChecked() == True ):
# 시세 로드
tmpRealDic = self.m_RealSiseListFR
else:
#주문 로드
tmpRealDic = self.m_RealOrderListFR
pass
elif ( self.BtnRadio3.isChecked() == True ):
if ( self.BtnRadioSiseReal.isChecked() == True ):
# 시세 로드
tmpRealDic = self.m_RealSiseListFX
else:
#주문 로드
tmpRealDic = self.m_RealOrderListFX
pass
return tmpRealDic
pass
def OnRadioAcctTpChange(self):
self.ComboAcc.clear()
tmpAccDic = self.GetAccOrTrDic("ACC")
tmpList = list(tmpAccDic.keys())
for i in range(len(tmpList)):
strAcctNo = tmpList[i]
self.ComboAcc.addItem(strAcctNo)
pass
self.OnRadioTrChange()
self.OnRadioRealChange()
pass
def OnRadioTrChange(self):
self.ComboTr.clear()
tmpTrDic = self.GetAccOrTrDic("TR")
tmpList = list(tmpTrDic.keys())
for i in range(len(tmpList)):
strServiceNm = tmpList[i]
self.ComboTr.addItem(strServiceNm)
self.ComboTr.model().sort(0)
bShow = False
if ( self.BtnRadioQry2.isChecked() == True ):
bShow = True
self.Edit_Input_FID.setVisible(bShow)
self.m_bUseComboTrChange = True
pass
def OnComboAccChange(self, strSelData):
strInData = unicode(strSelData, 'utf-8')
tmpAccDic = self.GetAccOrTrDic("ACC")
strAcctNm = tmpAccDic.get(strInData)
self.label_acctNm.setText(_fromUtf8(strAcctNm))
self.OnComboTrChange(self.ComboTr.currentText())
pass
def OnComboTrChange(self, strSelData):
strInData = unicode(strSelData, 'utf-8')
tmpList = []
tmpTrDic = {}
tmpTrDic = self.GetAccOrTrDic("TR")
tmpList = tmpTrDic.get(strInData)
if ( tmpList is None ):
return None
strTrCode = tmpList[0]
self.label_tr.setText(strTrCode)
#data parsing
strData = str(tmpList[1])
commaArr = strData.split(",")
strInput = ""
self.Edit_Input.setText(strInput)
for strItem in commaArr:
if ( strItem == "계좌번호" ):
strAcctNo = self.ComboAcc.currentText()
strAcctNo = str(strAcctNo).strip()
if ( self.BtnRadio1.isChecked() == True ): #국내
strInput += self.Util_FillStr(strAcctNo, 11, ' ')
else:
strInput += self.Util_FillStr(strAcctNo, 6, ' ')
elif ( strItem == "계좌비번" ):
strInput += self.Util_FillStr(self.Edit_AcctPW.text(), 8, ' ')
elif ( strItem == "딜러번호" ):
strInput += self.Util_FillStr("001", 3, ' ') #무조건 "001"
elif ( strItem == "조회일자" or strItem == "시작일자" or strItem == "종료일자"):
strInput += self.Util_FillStr(self.Util_GetCurDate(), 8, ' ')
elif ( strItem == "exchtp" ):
strInput += self.Util_FillStr("1", 1, ' ') #1:KRX
elif ( strItem == "exchid" ):
strInput += self.Util_FillStr("01", 2, ' ') #종목명의 2,3번째자리 ex)101FC000 -> 01
elif ( strItem == "fotp" ):
strInput += self.Util_FillStr("F", 1, ' ') #F:선물, O:옵션
elif ( strItem == "종목" ):
if ( self.BtnRadio1.isChecked() == True ): #국내
if ( self.BtnRadioQry2.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FO_SERIES, 8, ' ') #F:선물, O:옵션
else:
strInput += self.Util_FillStr(self.CONST_FO_SERIES, 32, ' ') #F:선물, O:옵션
elif ( self.BtnRadio2.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FR_SERIES, 32, ' ')
elif ( self.BtnRadio3.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FX_SERIES, 32, ' ')
elif ( strItem == "매매구분" ):
strInput += self.Util_FillStr("1", 1, ' ') #1:매수, 2:매도
elif ( strItem == "체결구분" ):
strInput += self.Util_FillStr("1", 1, ' ') #1:지정가, 2:시장가
elif ( strItem == "체결조건" ):
if ( self.BtnRadio1.isChecked() == True ): #국내
strInput += self.Util_FillStr("1", 1, ' ') #1:FAS, 2:FOK, 3:FAK
else:
strInput += self.Util_FillStr("1", 1, ' ') #0:DAY, 1:GTC, 3:IOC FAK 4:FOK
elif ( strItem == "주문가격" ):
if ( self.BtnRadio1.isChecked() == True ): #국내
strInput += self.Util_FillStr("23055", 13, ' ') #주문가격, 소수점 없음
else:
strInput += self.Util_FillStr("8808", 15, ' ') #주문가격, 소수점 없음
elif ( strItem == "주문수량" ):
strInput += self.Util_FillStr("1", 5, ' ') #주문수량
elif ( strItem == "주문번호" ):
if ( self.BtnRadio1.isChecked() == True ): #국내
strInput += self.Util_FillStr("1111111", 7, ' ')
else:
strInput += self.Util_FillStr("0000000000", 10, ' ')
elif ( strItem == "사용자ID" ):
strInput += self.Util_FillStr(self.Edit_UserID.text(), 8, ' ')
elif ( strItem == "조회유형" ): #무조건 1
strInput += self.Util_FillStr("1", 1, ' ')
elif ( strItem == "groupnm" ):
strInput += self.Util_FillStr(" ", 20, ' ')
elif ( strItem == "통화코드" ):
strInput += self.Util_FillStr("USD", 3, ' ') #USD(미국달러),EUR(유로화),JPY(일본엔화),KRW(한국원화)
elif ( strItem == "부서코드" ):
strInput += self.Util_FillStr("001", 3, ' ')
elif ( strItem == "적요구분" ):
strInput += self.Util_FillStr("1", 1, ' ') # 0(전체), 1(손익), 2(이자), 3(수수료), 4(입금), 5(출금), 6(기타)
elif ( strItem == "조회모드" ):
strInput += self.Util_FillStr("3", 1, ' ') #(1:LT 2:LE 3:EQ 4:GE 5:GT)
elif ( strItem == "조회건수" ):
strInput += self.Util_FillStr("0001", 4, ' ')
elif ( strItem == "keyvalue"):
strInput += self.Util_FillStr(" ", 18, ' ')
elif ( strItem == "조회시간"):
strInput += self.Util_FillStr("9999999999", 10, ' ')
elif ( strItem == "다음구분"):
strInput += self.Util_FillStr("0", 1, ' ') # 0:기본
elif ( strItem == "데이터구분"):
strInput += self.Util_FillStr("1", 1, ' ') # 3:일, 4:주, 5:월, 2:분, 6:틱
elif ( strItem == "시간구분"):
strInput += self.Util_FillStr("001", 3, ' ') # N분/틱
elif ( strItem == "데이터건수"):
strInput += self.Util_FillStr("00010", 5, ' ') # 조회요청개수
elif ( strItem == "장구분"):
strInput += self.Util_FillStr("1", 1, ' ') # 1:전산장, 0:본장
elif ( strItem == "허봉실봉구분"):
strInput += self.Util_FillStr("0", 1, ' ') # 0:실봉만, 1:허봉+실봉
else:
try:
nFieldLen = int(strItem)
if ( type(nFieldLen) is int ): #입력필드 길이 이므로 pass
pass
else:
self.Util_WriteLog("UnKnown FieldNm : " + strItem)
except ValueError:
pass
self.Edit_Input.setText( "[" + strInput + "]")
if ( self.BtnRadioQry2.isChecked() == True ):
strFidInput = str(tmpList[2])
self.Edit_Input_FID.setText("[" + strFidInput + "]")
pass
def OnQuery(self):
strTr = self.label_tr.text()
strInput = self.Edit_Input.text()
strInput.replace("[", "")
strInput.replace("]", "")
strInputFid = self.Edit_Input_FID.text()
strInputFid.replace("[", "")
strInputFid.replace("]", "")
nRqID = 0
strMsg = ""
if ( self.BtnRadioQry3.isChecked() == True ):
nRqID = self.HD_CommJumunSvr(strTr, strInput)
strMsg = u"주문전송 UniqueID : " + str(nRqID)
elif ( self.BtnRadioQry2.isChecked() == True ):
nRqID = self.HD_CommFIDRqData(strTr, strInput, strInputFid, "")
strMsg = u"조회요청(FID) UniqueID : " + str(nRqID)
else:
nRqID = self.HD_CommRqData(strTr, strInput, len(strInput), u"")
strMsg = u"조회요청(TR) UniqueID : " + str(nRqID)
self.Util_WriteLog( strMsg )
pass
def OnRadioRealChange(self):
self.ComboReal.clear()
self.SetComboReal()
strSelData = self.ComboReal.currentText()
self.OnComboReal(strSelData)
pass
def SetComboReal(self):
tmpRealDic = {}
tmpRealDic = self.GetRealDic()
tmpList = []
tmpList = list(tmpRealDic.keys())
for i in range(len(tmpList)):
strRealServNo = tmpList[i]
self.ComboReal.addItem(strRealServNo)
self.ComboReal.model().sort(0)
pass
def OnComboReal(self, strSelData):
strInData = unicode(strSelData, 'utf-8')
tmpList = []
tmpRealDic = {}
tmpRealDic = self.GetRealDic()
tmpList = tmpRealDic.get(strInData)
strInput = ""
try:
strItem = tmpList[0]
self.Edit_Input_Real_Key.setText(tmpList[0])
strItem = tmpList[1]
if ( strItem == "종목" ):
if ( self.BtnRadio1.isChecked() == True ):
strInput = self.Util_FillStr(self.CONST_FO_SERIES, 32, ' ')
elif ( self.BtnRadio2.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FR_SERIES, 32, ' ')
elif ( self.BtnRadio3.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FX_SERIES, 10, ' ')
pass
elif (strItem == "계좌번호" ):
strAcctNo = self.ComboAcc.currentText()
strInput = self.Util_FillStr(strAcctNo, 11, ' ')
pass
except TypeError:
pass
self.Edit_Input_Real_Val.setText( "[" + strInput + "]" )
pass
def OnRealRegister(self):
bReg = True
strMsg = ""
strValue = self.Edit_Input_Real_Val.text()
nKeyType = int(self.Edit_Input_Real_Key.text())
strValue.replace("[", "")
strValue.replace("]", "")
if ( self.BtnRadioRealRegister.isChecked() == True ):
bReg = True
strMsg = u"실시간 등록 요청 Value[%s] Key[%d]" %(strValue, nKeyType)
elif ( self.BtnRadioRealUnRegister.isChecked() == True ):
bReg = False
strMsg = u"실시간 해지 요청 Value[%s] Key[%d]" %(strValue, nKeyType)
self.Util_WriteLog( strMsg )
self.HD_RegReal(bReg, strValue, nKeyType)
pass
def OnDataRecv(self, sTrCode, nRqID):
strRecv = u"조회응답 Tr = [%s] UniqueID = [%d]" %(sTrCode, nRqID)
self.Util_WriteLog( strRecv )
i = 0
if ( sTrCode == "o44020"): #MRKT.cod
pass
elif ( sTrCode == "o44021"): #PMCode.cod
pass
elif ( sTrCode == "o44022" ): #JMCode.cod(해외)
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"수신개수 : " + str(nRptCnt))
for i in nRptCnt:
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목코드")
self.Util_WriteLog(strData)
pass
elif (sTrCode == "g11002.DQ0104&" ): #국내미체결조회
self.procDQ0104(sTrCode)
pass
elif (sTrCode == "g11002.DQ0107&" ): #국내체결조회
self.procDQ0107(sTrCode)
pass
elif (sTrCode == "g11002.DQ0110&" ): #국내미체결조회
self.procDQ0110(sTrCode)
pass
elif (sTrCode == "g11002.DQ0242&" ): #국내고객예탁금조회
self.procDQ0242(sTrCode)
pass
elif sTrCode == "g11002.DQ0622&":#종목코드(국내)
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
for i in nRptCnt:
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목코드")
self.Util_WriteLog(strData)
pass
elif (sTrCode == "g11002.DQ0116&"):
pass
elif (sTrCode == "g11002.DQ0119&"): #야간CME체결내역리스트
pass
elif (sTrCode == "g11002.DQ0122&"): #야간CME미결제약정조회
pass
elif (sTrCode == "s20001" ): #국내선물옵션마스터
self.procs20001(sTrCode)
pass
elif (sTrCode == "s30001" ): #국내주식선물마스터
pass
elif (sTrCode == "s21001" ): #야간CME선물옵션마스터
pass
elif (sTrCode == "g12001.DO1601&" ): #국내신규주문
self.procDO1000(sTrCode)
pass
elif (sTrCode == "g12001.DO1901&" ): #국내정정주문
self.procDO1000(sTrCode)
pass
elif (sTrCode == "g12001.DO1701&" ): #국내취소주문
self.procDO1000(sTrCode)
pass
elif (sTrCode == "g12001.DO2201&" ): #CME국내신규주문
self.procDO2000(sTrCode)
pass
elif (sTrCode == "g12001.DO2101&" ): #CME국내정정주문
self.procDO2000(sTrCode)
pass
elif (sTrCode == "g12001.DO2001&" ): #CME국내취소주문
self.procDO2000(sTrCode)
pass
elif (sTrCode == "g11004.AQ0401%"): #해외미체결주문내역
self.procAQ0401(sTrCode)
pass
elif (sTrCode == "g11004.AQ0402%"): #해외미체결주문내역
self.procAQ0402(sTrCode)
pass
elif (sTrCode == "g11004.AQ0403%"):
self.procAQ0403(sTrCode)
pass
elif (sTrCode == "g11004.AQ0607%"):
self.procAQ0607(sTrCode)
pass
elif (sTrCode == "o51000" ): #해외체결정보
self.proco51000(sTrCode)
pass
elif (sTrCode == "o51010" ): #해외호가정보
self.proco51010(sTrCode)
pass
elif (sTrCode == "o51210" ): #해외종목정보
self.proco51210(sTrCode)
pass
elif (sTrCode == "g12003.AO0401" ): #해외신규주문
self.procAO0400(sTrCode)
pass
elif (sTrCode == "g12003.AO0402" ): #해외정정주문
self.procAO0400(sTrCode)
pass
elif (sTrCode == "g12003.AO0403" ): #해외취소주문
self.procAO0400(sTrCode)
pass
elif sTrCode == "g11004.AQ0101%": #ㅖ좌목록 수신
self.Util_WriteLog(u"Recv Account List")
self.procAQ0101(sTrCode)
pass
elif sTrCode == "g11004.AQ0450%": #해외미체결주문내역
self.procAQ0450(sTrCode)
pass
def OnGetBroadData(self, sTrCode, nRealType):
strRealType = unicode(str(nRealType), 'utf-8')
self.Util_WriteLog (u"실시간 수신 [%s] [%s]" % (sTrCode, strRealType))
if ( strRealType == self.CONST_FO_SISE_51 ):
self.procReal51(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_SISE_65 ):
self.procReal65(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_SISE_58 ):
self.procReal58(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_SISE_71 ):
self.procReal71(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_181 ):
self.procReal181(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_182 ):
self.procReal182(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_183 ):
self.procReal183(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_184 ):
self.procReal184(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_185 ):
self.procReal185(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FR_SISE_76 ):
self.procReal76(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FR_SISE_82 ): # 해외 시세 체결
strCode = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"기준체결시간")
strPric = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결가")
self.Util_WriteLog("[" + strCode + "][" + strData + "][" + strPric + "]")
pass
elif (strRealType == self.CONST_FR_ORDER_196 ):
self.procReal196(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_186 ):
self.procReal186(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_187 ):
self.procReal187(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_188 ):
self.procReal188(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_189 ):
self.procReal189(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FX_SISE_171):
self.procReal171(sTrCode, nRealType)
pass
pass
def procReal171(self, sTrCode, nRealType):
strTime = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"호가시간 ")
strBid = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수호가 ")
strData = strTime + " " + strBid
self.Util_WriteLog(strData)
pass
def procReal189(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문번호 ")
self.Util_WriteLog(u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호 ")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명 ")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목 ")
self.Util_WriteLog(u"종목 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분 ")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문가격 ")
self.Util_WriteLog(u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문수량 ")
self.Util_WriteLog(u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결가격 ")
self.Util_WriteLog(u"체결가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결수량 ")
self.Util_WriteLog(u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"STOP가격 ")
self.Util_WriteLog(u"STOP가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"약정금액 ")
self.Util_WriteLog(u"약정금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문유형 ")
self.Util_WriteLog(u"주문유형 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"수수료 ")
self.Util_WriteLog(u"수수료 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문일자 ")
self.Util_WriteLog(u"주문일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문시간 ")
self.Util_WriteLog(u"주문시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문자 ")
self.Util_WriteLog(u"주문자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결일자 ")
self.Util_WriteLog(u"체결일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결시간 ")
self.Util_WriteLog(u"체결시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"거래소일자 ")
self.Util_WriteLog(u"거래소일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"거래소시간 ")
self.Util_WriteLog(u"거래소시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"통화코드 ")
self.Util_WriteLog(u"통화코드 : " + strData)
pass
def procReal188(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호 ")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명 ")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문가능금액")
self.Util_WriteLog(u"주문가능금액: " + strData)
pass
def procReal187(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호 ")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명 ")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목 ")
self.Util_WriteLog(u"종목 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분 ")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"평균단가 ")
self.Util_WriteLog(u"평균단가 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"당일미결제수량 ")
self.Util_WriteLog(u"당일미결제수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"미결제약정금액 ")
self.Util_WriteLog(u"미결제약정금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"신규체결수량 ")
self.Util_WriteLog(u"신규체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"신규체결금액 ")
self.Util_WriteLog(u"신규체결금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전환매체결수량 ")
self.Util_WriteLog(u"전환매체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전환매체결금액 ")
self.Util_WriteLog(u"전환매체결금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전환매Hold수량 ")
self.Util_WriteLog(u"전환매Hold수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"수동청산Hold수량 ")
self.Util_WriteLog(u"수동청산Hold수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"위탁증거금 ")
self.Util_WriteLog(u"위탁증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"통화코드 ")
self.Util_WriteLog(u"통화코드 : " + strData)
pass
def procReal186(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호 ")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명 ")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"기준일자 ")
self.Util_WriteLog(u"기준일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문번호 ")
self.Util_WriteLog(u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목 ")
self.Util_WriteLog(u"종목 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분 ")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문가격 ")
self.Util_WriteLog(u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문수량 ")
self.Util_WriteLog(u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결수량 ")
self.Util_WriteLog(u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"잔량 ")
self.Util_WriteLog(u"잔량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"STOP 가격 ")
self.Util_WriteLog(u"STOP 가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문시간 ")
self.Util_WriteLog(u"주문시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"거래소시간 ")
self.Util_WriteLog(u"거래소시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문자 ")
self.Util_WriteLog(u"주문자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문구분 ")
self.Util_WriteLog(u"주문구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전략구분 ")
self.Util_WriteLog(u"전략구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"최초원주문번호 ")
self.Util_WriteLog(u"최초원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"그룹주문번호 ")
self.Util_WriteLog(u"그룹주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수주문수량 ")
self.Util_WriteLog(u"매수주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매도주문수량 ")
self.Util_WriteLog(u"매도주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문유형 ")
self.Util_WriteLog(u"주문유형 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"가격조건 ")
self.Util_WriteLog(u"가격조건 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결조건 ")
self.Util_WriteLog(u"체결조건 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"GTD일자 ")
self.Util_WriteLog(u"GTD일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"IOC최소체결수량 ")
self.Util_WriteLog(u"IOC최소체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"원주문번호 ")
self.Util_WriteLog(u"원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문순서 ")
self.Util_WriteLog(u"주문순서 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"통화코드 ")
self.Util_WriteLog(u"통화코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문금액 ")
self.Util_WriteLog(u"주문금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문시작일자 ")
self.Util_WriteLog(u"주문시작일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"옵션행사예약여부 ")
self.Util_WriteLog(u"옵션행사예약여부 : " + strData)
pass
def procReal196(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문번호")
self.Util_WriteLog(u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문가격")
self.Util_WriteLog(u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문수량")
self.Util_WriteLog(u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문시간")
self.Util_WriteLog(u"주문시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문구분")
self.Util_WriteLog(u"주문구분(1:신규, 2:정정, 3:취소) : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"처리구분")
self.Util_WriteLog(u"처리구분(0:정상, 1:거부) : " + strData)
pass
def procReal181(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문번호")
self.Util_WriteLog(u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문가격")
self.Util_WriteLog(u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문수량")
self.Util_WriteLog(u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"접수시간")
self.Util_WriteLog(u"접수시간 : " + strData)
pass
def procReal182(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"key")
self.Util_WriteLog(u"key : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러번호")
self.Util_WriteLog(u"딜러번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러명")
self.Util_WriteLog(u"딜러명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문번호")
self.Util_WriteLog(u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문가격")
self.Util_WriteLog(u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문수량")
self.Util_WriteLog(u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"정정수량")
self.Util_WriteLog(u"정정수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"취소수량")
self.Util_WriteLog(u"취소수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결수량")
self.Util_WriteLog(u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"잔량")
self.Util_WriteLog(u"잔량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"원주문번호")
self.Util_WriteLog(u"원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"최초원주문번호")
self.Util_WriteLog(u"최초원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"통신주문구분")
self.Util_WriteLog(u"통신주문구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문전략구분")
self.Util_WriteLog(u"주문전략구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문조작구분")
self.Util_WriteLog(u"주문조작구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"거래소접수시간")
self.Util_WriteLog(u"거래소접수시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"작업사원")
self.Util_WriteLog(u"작업사원 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"그룹주문번호")
self.Util_WriteLog(u"그룹주문번호 : " + strData)
pass
def procReal183(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"key")
self.Util_WriteLog(u"key : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러번호")
self.Util_WriteLog(u"딜러번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러명")
self.Util_WriteLog(u"딜러명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목")
self.Util_WriteLog(u"종목 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전일 미결제 수량")
self.Util_WriteLog(u"전일 미결제 수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"당일 미결제 수량")
self.Util_WriteLog(u"당일 미결제 수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"미체결수량")
self.Util_WriteLog(u"미체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"평균 단가")
self.Util_WriteLog(u"평균 단가 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"당일미결제약정금액")
self.Util_WriteLog(u"당일미결제약정금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"신규체결수량")
self.Util_WriteLog(u"신규체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"신규체결금액")
self.Util_WriteLog(u"신규체결금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전환매 체결수량")
self.Util_WriteLog(u"전환매 체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전환매 체결금액")
self.Util_WriteLog(u"전환매 체결금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"행사 신청수량")
self.Util_WriteLog(u"행사 신청수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"행사 배정수량")
self.Util_WriteLog(u"행사 배정수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"행사 거부수량")
self.Util_WriteLog(u"행사 거부수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"만기수량")
self.Util_WriteLog(u"행사 만기수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"장부단가")
self.Util_WriteLog(u"장부단가 : " + strData)
pass
def procReal184(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러번호")
self.Util_WriteLog(u"딜러번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러명")
self.Util_WriteLog(u"딜러명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문가능금액")
self.Util_WriteLog(u"주문가능금액 : " + strData)
pass
def procReal185(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"key")
self.Util_WriteLog(u"key : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌번호")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러번호")
self.Util_WriteLog(u"딜러번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"계좌명")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"딜러명")
self.Util_WriteLog(u"딜러명 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"주문번호")
self.Util_WriteLog(u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매매구분")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결가격")
self.Util_WriteLog(u"체결가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결수량")
self.Util_WriteLog(u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결시간")
self.Util_WriteLog(u"체결시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전략구분")
self.Util_WriteLog(u"전략구분 : " + strData)
pass
def procReal76(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"호가시간")
self.Util_WriteLog(u"호가시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매도호가1")
self.Util_WriteLog(u"매도호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수호가1")
self.Util_WriteLog(u"매수호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매도호가수량1")
self.Util_WriteLog(u"매도호가수량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수호가수량1")
self.Util_WriteLog(u"매수호가수량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매도호가건수1")
self.Util_WriteLog(u"매도호가건수1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수호가건수1")
self.Util_WriteLog(u"매수호가건수1 : " + strData)
pass
def procReal71(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결시간")
self.Util_WriteLog(u"체결시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"change")
self.Util_WriteLog(u"전일대비 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"현재가")
self.Util_WriteLog(u"현재가 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"cvolume")
self.Util_WriteLog(u"체결량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"누적거래량")
self.Util_WriteLog(u"누적거래량 : " + strData)
pass
def procReal58(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"kfutcode")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"hotime")
self.Util_WriteLog(u"호가시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"offerho1")
self.Util_WriteLog(u"매도호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"bidho1")
self.Util_WriteLog(u"매수호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"offerrem1")
self.Util_WriteLog(u"매도호가수량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"bidrem1")
self.Util_WriteLog(u"매수호가수량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"offercnt1")
self.Util_WriteLog(u"매도호가건수1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"bidcnt1")
self.Util_WriteLog(u"매수호가건수1 : " + strData)
pass
def procReal51(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"호가시간")
self.Util_WriteLog(u"호가시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매도호가1")
self.Util_WriteLog(u"매도호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수호가1")
self.Util_WriteLog(u"매수호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매도호가수량1")
self.Util_WriteLog(u"매도호가수량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수호가수량1")
self.Util_WriteLog(u"매수호가수량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매도호가건수1")
self.Util_WriteLog(u"매도호가건수1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"매수호가건수1")
self.Util_WriteLog(u"매수호가건수1 : " + strData)
pass
def procReal65(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결시간")
self.Util_WriteLog(u"체결시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"전일대비")
self.Util_WriteLog(u"전일대비 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"현재가")
self.Util_WriteLog(u"현재가 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"체결량")
self.Util_WriteLog(u"체결량 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"누적거래량")
self.Util_WriteLog(u"누적거래량 : " + strData)
pass
def procDO1000(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"접수구분")
self.Util_WriteLog( u"접수구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"처리구분")
self.Util_WriteLog( u"처리구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"계좌번호")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"주문번호")
self.Util_WriteLog( u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog( u"종목코드 : " + strData)
pass
def procDO2000(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"접수구분")
self.Util_WriteLog( u"접수구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"처리구분")
self.Util_WriteLog( u"처리구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"계좌번호")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"주문번호")
self.Util_WriteLog( u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog( u"종목코드 : " + strData)
pass
def procAO0400(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"접수구분")
self.Util_WriteLog( u"접수구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"주문번호")
self.Util_WriteLog( u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"처리코드")
self.Util_WriteLog( u"처리코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"처리메시지")
self.Util_WriteLog( u"처리메시지 : " + strData)
pass
def procAQ0401(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0401 반복회수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문번호 ")
self.Util_WriteLog( u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호 ")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명 ")
self.Util_WriteLog( u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목코드 ")
self.Util_WriteLog( u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"매매구분 ")
self.Util_WriteLog( u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문가격 ")
self.Util_WriteLog( u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문수량 ")
self.Util_WriteLog( u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결수량 ")
self.Util_WriteLog( u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"잔량 ")
self.Util_WriteLog( u"잔량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문유형 ")
self.Util_WriteLog( u"주문유형 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"STOP 가격 ")
self.Util_WriteLog( u"STOP 가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문시간 ")
self.Util_WriteLog( u"주문시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문자 ")
self.Util_WriteLog( u"주문자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통신구분 ")
self.Util_WriteLog( u"통신구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"전략구분 ")
self.Util_WriteLog( u"전략구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"최초원주문번호 ")
self.Util_WriteLog( u"최초원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문일자 ")
self.Util_WriteLog( u"주문일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"그룹 주문 번호 ")
self.Util_WriteLog( u"그룹 주문 번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"가격조건 ")
self.Util_WriteLog( u"가격조건 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결조건 ")
self.Util_WriteLog( u"체결조건 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문종료일자 ")
self.Util_WriteLog( u"주문종료일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"IOC 최소체결수량 ")
self.Util_WriteLog( u"IOC 최소체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통화코드 ")
self.Util_WriteLog( u"통화코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문금액 ")
self.Util_WriteLog( u"주문금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문시작일자 ")
self.Util_WriteLog( u"주문시작일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"옵션행사예약여부 ")
self.Util_WriteLog( u"옵션행사예약여부 : " + strData)
pass
def procAQ0402(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0402 반복회수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문번호 ")
self.Util_WriteLog( u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호 ")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명 ")
self.Util_WriteLog( u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목 ")
self.Util_WriteLog( u"종목 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"매매구분 ")
self.Util_WriteLog( u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문가격 ")
self.Util_WriteLog( u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문수량 ")
self.Util_WriteLog( u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결가격 ")
self.Util_WriteLog( u"체결가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결수량 ")
self.Util_WriteLog( u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"STOP가격 ")
self.Util_WriteLog( u"STOP가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"약정금액 ")
self.Util_WriteLog( u"약정금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문유형 ")
self.Util_WriteLog( u"주문유형 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"수수료 ")
self.Util_WriteLog( u"수수료 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문일자 ")
self.Util_WriteLog( u"주문일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문시간 ")
self.Util_WriteLog( u"주문시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문자 ")
self.Util_WriteLog( u"주문자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결일자 ")
self.Util_WriteLog( u"체결일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결시간 ")
self.Util_WriteLog( u"체결시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"거래소일자 ")
self.Util_WriteLog( u"거래소일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"거래소시간 ")
self.Util_WriteLog( u"거래소시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통화코드 ")
self.Util_WriteLog( u"통화코드 : " + strData)
pass
def procAQ0403(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
print(nRptCnt)
self.Util_WriteLog( u"AQ0403 반복회수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호 ")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명 ")
self.Util_WriteLog( u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목 ")
self.Util_WriteLog( u"종목 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"매매구분 ")
self.Util_WriteLog( u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"당일순 미결제수량 ")
self.Util_WriteLog( u"당일순 미결제수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"평균단가(소수점반영) ")
self.Util_WriteLog( u"평균단가(소수점반영) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"평균단가 ")
self.Util_WriteLog( u"평균단가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"현재가 ")
self.Util_WriteLog( u"현재가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"평가손익 ")
self.Util_WriteLog( u"평가손익 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"위탁증거금 ")
self.Util_WriteLog( u"위탁증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"신규제한일 ")
self.Util_WriteLog( u"신규제한일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"최종거래일 ")
self.Util_WriteLog( u"최종거래일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통화코드 ")
self.Util_WriteLog( u"통화코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"옵션시장가치 ")
self.Util_WriteLog( u"옵션시장가치 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"옵션행사예약여부 ")
self.Util_WriteLog( u"옵션행사예약여부 : " + strData)
pass
def procAQ0607(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0607 반복회수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호 ")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명 ")
self.Util_WriteLog( u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통화코드 ")
self.Util_WriteLog( u"통화코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"예탁금총액 ")
self.Util_WriteLog( u"예탁금총액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"예탁금잔액 ")
self.Util_WriteLog( u"예탁금잔액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"평가금액 ")
self.Util_WriteLog( u"평가금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"미수금액 ")
self.Util_WriteLog( u"미수금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"결제금액부족 ")
self.Util_WriteLog( u"결제금액부족 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"미결제약정증거금 ")
self.Util_WriteLog( u"미결제약정증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"인출가능금 ")
self.Util_WriteLog( u"인출가능금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문가능금 ")
self.Util_WriteLog( u"주문가능금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"유지증거금 ")
self.Util_WriteLog( u"유지증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문증거금 ")
self.Util_WriteLog( u"주문증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"위탁증거금 ")
self.Util_WriteLog( u"위탁증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"거래수수료 ")
self.Util_WriteLog( u"거래수수료 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"청산손익 ")
self.Util_WriteLog( u"청산손익 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"평가손익 ")
self.Util_WriteLog( u"평가손익 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"미발생 ")
self.Util_WriteLog( u"미발생 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"추가증거금 ")
self.Util_WriteLog( u"추가증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"총계정자산가치 ")
self.Util_WriteLog( u"총계정자산가치 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"외환고시환율 ")
self.Util_WriteLog( u"외환고시환율 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"옵션매매대금 ")
self.Util_WriteLog( u"옵션매매대금 : " + strData)
pass
def proco51000(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"종목코드 ")
self.Util_WriteLog( u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"한글종목명 ")
self.Util_WriteLog( u"한글종목명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"현재가 ")
self.Util_WriteLog( u"현재가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"전일대비구분 ")
self.Util_WriteLog( u"전일대비구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"전일대비 ")
self.Util_WriteLog( u"전일대비 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"전일대비등락율 ")
self.Util_WriteLog( u"전일대비등락율 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"직전대비구분 ")
self.Util_WriteLog( u"직전대비구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"시가 ")
self.Util_WriteLog( u"시가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"고가 ")
self.Util_WriteLog( u"고가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"저가 ")
self.Util_WriteLog( u"저가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT고가 ")
self.Util_WriteLog( u"LT고가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT고가일 ")
self.Util_WriteLog( u"LT고가일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT저가 ")
self.Util_WriteLog( u"LT저가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT저가일 ")
self.Util_WriteLog( u"LT저가일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"영업일 ")
self.Util_WriteLog( u"영업일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"상승거래량 ")
self.Util_WriteLog( u"상승거래량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"하락거래량 ")
self.Util_WriteLog( u"하락거래량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"누적거래량 ")
self.Util_WriteLog( u"누적거래량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"만기일 ")
self.Util_WriteLog( u"만기일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"정산가 ")
self.Util_WriteLog( u"정산가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"정산일 ")
self.Util_WriteLog( u"정산일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"잔존일수 ")
self.Util_WriteLog( u"잔존일수 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매도호가 ")
self.Util_WriteLog( u"매도호가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매수호가 ")
self.Util_WriteLog( u"매수호가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"기준가 ")
self.Util_WriteLog( u"기준가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"전일거래량 ")
self.Util_WriteLog( u"전일거래량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"전일거래량대비율 ")
self.Util_WriteLog( u"전일거래량대비율 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"체결시간 ")
self.Util_WriteLog( u"체결시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"국내시간 ")
self.Util_WriteLog( u"국내시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"거래소구분 ")
self.Util_WriteLog( u"거래소구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ETH시작 ")
self.Util_WriteLog( u"ETH시작 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ETH종료 ")
self.Util_WriteLog( u"ETH종료 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"영문종목명 ")
self.Util_WriteLog( u"영문종목명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"신규거래제한일 ")
self.Util_WriteLog( u"신규거래제한일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"최종거래일 ")
self.Util_WriteLog( u"최종거래일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"호가방식 ")
self.Util_WriteLog( u"호가방식 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"주문제한폭 ")
self.Util_WriteLog( u"주문제한폭 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"실물인수도시작일자")
self.Util_WriteLog( u"실물인수도시작일자: " + strData)
pass
def proco51010(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog( u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"호가시간")
self.Util_WriteLog( u"호가시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매도호가1")
self.Util_WriteLog( u"매도호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매수호가1")
self.Util_WriteLog( u"매수호가1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매도호가잔량1")
self.Util_WriteLog( u"매도호가잔량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매수호가잔량1")
self.Util_WriteLog( u"매수호가잔량1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매도호가건수1")
self.Util_WriteLog( u"매도호가건수1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매수호가건수1")
self.Util_WriteLog( u"매수호가건수1 : " + strData)
pass
def proco51210(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"종목코드")
self.Util_WriteLog( u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"최종거래일")
self.Util_WriteLog( u"최종거래일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"거래소")
self.Util_WriteLog( u"거래소 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"상장일")
self.Util_WriteLog( u"상장일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"만기일")
self.Util_WriteLog( u"만기일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"삭제일")
self.Util_WriteLog( u"삭제일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"정산가")
self.Util_WriteLog( u"정산가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"정산일")
self.Util_WriteLog( u"정산일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"가격표시")
self.Util_WriteLog( u"가격표시 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"장시작시간(CME)")
self.Util_WriteLog( u"장시작시간(CME) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"장종료시간(CME)")
self.Util_WriteLog( u"장종료시간(CME) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"장시작시간(한국)")
self.Util_WriteLog( u"장시작시간(한국) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"장종료시간(한국)")
self.Util_WriteLog( u"장종료시간(한국) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"거래통화")
self.Util_WriteLog( u"거래통화 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"상품구분")
self.Util_WriteLog( u"상품구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"매매여부")
self.Util_WriteLog( u"매매여부 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"잔존일수")
self.Util_WriteLog( u"잔존일수 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ticksize")
self.Util_WriteLog( u"ticksize : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"tickvalue")
self.Util_WriteLog( u"tickvalue : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"최종결제일")
self.Util_WriteLog( u"최종결제일 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"증거금")
self.Util_WriteLog( u"증거금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"계약단위")
self.Util_WriteLog( u"계약단위 : " + strData)
pass
def procs20001(self, sTrCode):
i = 0
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"단축코드")
self.Util_WriteLog(strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"호가수신시간")
self.Util_WriteLog(strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"현재가")
self.Util_WriteLog(strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"누적거래량")
self.Util_WriteLog(strData)
pass
def procAQ0101(self, sTrCode):
strData = self.HD_CommGetAccInfo()
nAcctCnt = int(strData[0:5])
nLenAcctNo = 11
nLenAcctNm = 30
nLenAcctGb = 1
strAcctInfo = str(strData[5:]).encode('cp949')
nLenAccInfo = nLenAcctNo + nLenAcctNm + nLenAcctGb
#strSelAccGb = "1" #1:해외, 2:FX, 9:국내
#if ( self.BtnRadio1.isChecked() == True ):
# strSelAccGb = "9" #국내
#elif ( self.BtnRadio2.isChecked() == True ):
# strSelAccGb = "1" #해외
#elif ( self.BtnRadio3.isChecked() == True ):
# strSelAccGb = "2" #FX
for i in range(0, nAcctCnt):
nStPos = (i*nLenAccInfo)
strAcctNo = strAcctInfo[nStPos :nStPos+nLenAcctNo]
strAcctNm = strAcctInfo[nStPos+(nLenAcctNo) :nStPos+nLenAcctNo+nLenAcctNm]
strAcctGb = strAcctInfo[nStPos+(nLenAcctNo+nLenAcctNm):nStPos+nLenAcctNo+nLenAcctNm+nLenAcctGb]
strAcctNm = unicode(strAcctNm, 'cp949')
strAcctNo = unicode(strAcctNo.strip(), 'utf-8')
strAcctNm = str(strAcctNm.strip())
tmpDic = {strAcctNo:strAcctNm}
if ( strAcctGb == "9" ):
self.m_AccListFO.update(tmpDic)
elif(strAcctGb == "1" ):
self.m_AccListFR.update(tmpDic)
elif(strAcctGb == "2" ):
self.m_AccListFX.update(tmpDic)
self.Util_WriteLog(strAcctNo + "," + strAcctNm + "," + strAcctGb)
self.OnRadioAcctTpChange()
pass
def procDQ0242(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"계좌번호")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"예탁금액-총액")
self.Util_WriteLog( u"예탁금액-총액 :" + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"예탁금액-현금")
self.Util_WriteLog( u"예탁금액-현금 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"예탁금액-대용")
self.Util_WriteLog( u"예탁금액-대용 :" + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"예탁외화")
self.Util_WriteLog( u"예탁외화 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"당일손익")
self.Util_WriteLog( u"당일손익 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"위탁수수료")
self.Util_WriteLog( u"위탁수수료 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"당일순손익")
self.Util_WriteLog( u"당일순손익 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"평가예탁총액")
self.Util_WriteLog( u"평가예탁총액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"순자산-총평가액")
self.Util_WriteLog( u"순자산-총평가액 : " + strData)
pass
def procAQ0450(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0450 조회개수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문번호")
self.Util_WriteLog(u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호")
self.Util_WriteLog(u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명")
self.Util_WriteLog(u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목코드")
self.Util_WriteLog(u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"매매구분")
self.Util_WriteLog(u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문가격")
self.Util_WriteLog(u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문수량")
self.Util_WriteLog(u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결수량")
self.Util_WriteLog(u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"잔량")
self.Util_WriteLog(u"잔량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문유형")
self.Util_WriteLog(u"주문유형 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"STOP 가격")
self.Util_WriteLog(u"STOP 가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문시간")
self.Util_WriteLog(u"주문시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문자 ")
self.Util_WriteLog(u"주문자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통신구분")
self.Util_WriteLog(u"통신구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"전략구분")
self.Util_WriteLog(u"전략구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"최초원주문번호")
self.Util_WriteLog(u"최초원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문일자")
self.Util_WriteLog(u"주문일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"그룹주문번호")
self.Util_WriteLog(u"그룹주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"가격조건")
self.Util_WriteLog(u"가격조건 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결조건")
self.Util_WriteLog(u"체결조건 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"GTD일자")
self.Util_WriteLog(u"GTD일자 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"IOC최소체결수량")
self.Util_WriteLog(u"IOC최소체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"원주문번호")
self.Util_WriteLog(u"원주문번호 : " + strData)
pass
def procDQ0104(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"DQ0104 조회개수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명")
self.Util_WriteLog( u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문번호")
self.Util_WriteLog( u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목코드")
self.Util_WriteLog( u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"매매구분")
self.Util_WriteLog( u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문가격")
self.Util_WriteLog( u"주문가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문수량")
self.Util_WriteLog( u"주문수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"정정수량")
self.Util_WriteLog( u"정정수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"취소수량")
self.Util_WriteLog( u"취소수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결수량")
self.Util_WriteLog( u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"잔량")
self.Util_WriteLog( u"잔량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"원주문번호")
self.Util_WriteLog( u"원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"최초원주문번호")
self.Util_WriteLog( u"최초원주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통신주문구분")
self.Util_WriteLog( u"통신주문구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문전략구분")
self.Util_WriteLog( u"주문전략구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문조작구분")
self.Util_WriteLog( u"주문조작구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"거래소접수시간")
self.Util_WriteLog( u"거래소접수시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"작업사원")
self.Util_WriteLog( u"작업사원 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"그룹주문번호")
self.Util_WriteLog( u"그룹주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"딜러번호")
self.Util_WriteLog( u"딜러번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"딜러명")
self.Util_WriteLog( u"딜러명 : " + strData)
pass
def procDQ0107(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"DQ0107 조회개수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명")
self.Util_WriteLog( u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"주문번호")
self.Util_WriteLog( u"주문번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목코드")
self.Util_WriteLog( u"종목코드 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"매매구분")
self.Util_WriteLog( u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결가격")
self.Util_WriteLog( u"체결가격 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결수량")
self.Util_WriteLog( u"체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"체결시간")
self.Util_WriteLog( u"체결시간 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"전략구분")
self.Util_WriteLog( u"전략구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"통신구분")
self.Util_WriteLog( u"통신구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"딜러번호")
self.Util_WriteLog( u"딜러번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"딜러좌명")
self.Util_WriteLog( u"딜러좌명 : " + strData)
pass
def procDQ0110(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"DQ0110 조회개수 : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌번호")
self.Util_WriteLog( u"계좌번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"계좌명")
self.Util_WriteLog( u"계좌명 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"종목")
self.Util_WriteLog( u"종목 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"매매구분")
self.Util_WriteLog( u"매매구분 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"전일미결제수량")
self.Util_WriteLog( u"전일미결제수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"당일미결제수량")
self.Util_WriteLog( u"당일미결제수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"미체결 수량")
self.Util_WriteLog( u"미체결 수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"평균단가")
self.Util_WriteLog( u"평균단가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"장부단가")
self.Util_WriteLog( u"장부단가 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"당일미결제약정금액")
self.Util_WriteLog( u"당일미결제약정금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"당일체결수량")
self.Util_WriteLog( u"당일체결수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"당일체결금액")
self.Util_WriteLog( u"당일체결금액 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"행사신청수량")
self.Util_WriteLog( u"행사신청수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"행사배정수량")
self.Util_WriteLog( u"행사배정수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"행사거부수량")
self.Util_WriteLog( u"행사거부수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"만기수량")
self.Util_WriteLog( u"만기수량 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"딜러번호")
self.Util_WriteLog( u"딜러번호 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"딜러명")
self.Util_WriteLog( u"딜러명 : " + strData)
pass
def OnGetMsgWithRqId(self, nRqID, strErrCode, strErrMsg):
strRecv = "조회응답 Tr = [%s] UniqueID = [%d]" %(strErrCode, nRqID)
strRecvMsg = "메시지수신 UniqueID = [%s] ErrorCode=[%s] ErrorMessage=[%s]" % (str(nRqID), unicode(strErrCode), unicode(strErrMsg))
self.Util_WriteLog( strRecvMsg )
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
ui.SetupHDFOcx(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
[
"jay.bwkim@gmail.com"
] |
jay.bwkim@gmail.com
|
c02698bcbb5677d5aa1cdf687d66869a34eea59c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02742/s024664971.py
|
37251941a04a71608f69d756b2f8eb6bf24e8a52
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 259
|
py
|
H, W = [int(_) for _ in input().split()]
if H == 1 or W == 1:
ans = 1
else:
ans = (H // 2) * (W // 2) * 2
if H % 2 == 1:
ans += W // 2
if W % 2 == 1:
ans += H // 2
if H % 2 == 1 and W % 2 == 1:
ans += 1
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
170427ab7a1e4482bd8912d41bdaa5ebbaf1c595
|
fe969d059327d767c9eb442a441395fd5e389d6a
|
/OriginalCode-v1/create_graphs.py
|
09812b2133780e5e29a5211411aabf3c7a09eb36
|
[
"MIT"
] |
permissive
|
Wayne-Bai/Graph
|
ba514418261189a89801ff10839fbfb651d98dc7
|
4b563c824d946471393a1c404810d6f39a49f7fb
|
refs/heads/master
| 2022-12-18T20:32:06.797842
| 2020-09-16T14:12:50
| 2020-09-16T14:12:50
| 277,702,901
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,766
|
py
|
import networkx as nx
import numpy as np
from utils import *
from data import *
from data_process import Graph_load_batch as ast_graph_load_batch
def create(args):
### load datasets
graphs=[]
# synthetic graphs
if args.graph_type=='ladder':
graphs = []
for i in range(100, 201):
graphs.append(nx.ladder_graph(i))
args.max_prev_node = 10
elif args.graph_type=='ladder_small':
graphs = []
for i in range(2, 11):
graphs.append(nx.ladder_graph(i))
args.max_prev_node = 10
elif args.graph_type=='tree':
graphs = []
for i in range(2,5):
for j in range(3,5):
graphs.append(nx.balanced_tree(i,j))
args.max_prev_node = 256
elif args.graph_type=='caveman':
# graphs = []
# for i in range(5,10):
# for j in range(5,25):
# for k in range(5):
# graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
graphs = []
for i in range(2, 3):
for j in range(30, 81):
for k in range(10):
graphs.append(caveman_special(i,j, p_edge=0.3))
args.max_prev_node = 100
elif args.graph_type=='caveman_small':
# graphs = []
# for i in range(2,5):
# for j in range(2,6):
# for k in range(10):
# graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
graphs = []
for i in range(2, 3):
for j in range(6, 11):
for k in range(20):
graphs.append(caveman_special(i, j, p_edge=0.8)) # default 0.8
args.max_prev_node = 20
elif args.graph_type=='caveman_small_single':
# graphs = []
# for i in range(2,5):
# for j in range(2,6):
# for k in range(10):
# graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
graphs = []
for i in range(2, 3):
for j in range(8, 9):
for k in range(100):
graphs.append(caveman_special(i, j, p_edge=0.5))
args.max_prev_node = 20
elif args.graph_type.startswith('community'):
num_communities = int(args.graph_type[-1])
print('Creating dataset with ', num_communities, ' communities')
c_sizes = np.random.choice([12, 13, 14, 15, 16, 17], num_communities)
#c_sizes = [15] * num_communities
for k in range(3000):
graphs.append(n_community(c_sizes, p_inter=0.01))
args.max_prev_node = 80
elif args.graph_type=='grid':
graphs = []
for i in range(10,20):
for j in range(10,20):
graphs.append(nx.grid_2d_graph(i,j))
args.max_prev_node = 40
elif args.graph_type=='grid_small':
graphs = []
for i in range(2,5):
for j in range(2,6):
graphs.append(nx.grid_2d_graph(i,j))
args.max_prev_node = 15
elif args.graph_type=='barabasi':
graphs = []
for i in range(100,200):
for j in range(4,5):
for k in range(5):
graphs.append(nx.barabasi_albert_graph(i,j))
args.max_prev_node = 130
elif args.graph_type=='barabasi_small':
graphs = []
for i in range(4,21):
for j in range(3,4):
for k in range(10):
graphs.append(nx.barabasi_albert_graph(i,j))
args.max_prev_node = 20
elif args.graph_type=='grid_big':
graphs = []
for i in range(36, 46):
for j in range(36, 46):
graphs.append(nx.grid_2d_graph(i, j))
args.max_prev_node = 90
elif 'barabasi_noise' in args.graph_type:
graphs = []
for i in range(100,101):
for j in range(4,5):
for k in range(500):
graphs.append(nx.barabasi_albert_graph(i,j))
graphs = perturb_new(graphs,p=args.noise/10.0)
args.max_prev_node = 99
# real graphs
elif args.graph_type == 'enzymes':
graphs= Graph_load_batch(min_num_nodes=10, name='ENZYMES')
args.max_prev_node = 25
elif args.graph_type == 'enzymes_small':
graphs_raw = Graph_load_batch(min_num_nodes=10, name='ENZYMES')
graphs = []
for G in graphs_raw:
if G.number_of_nodes()<=20:
graphs.append(G)
args.max_prev_node = 15
elif args.graph_type == 'protein':
graphs = Graph_load_batch(min_num_nodes=20, name='PROTEINS_full')
args.max_prev_node = 80
elif args.graph_type == 'DD':
graphs = Graph_load_batch(min_num_nodes=100, max_num_nodes=500, name='DD',node_attributes=False,graph_labels=True)
args.max_prev_node = 230
elif args.graph_type == 'citeseer':
_, _, G = Graph_load(dataset='citeseer')
G = max(nx.connected_component_subgraphs(G), key=len)
G = nx.convert_node_labels_to_integers(G)
graphs = []
for i in range(G.number_of_nodes()):
G_ego = nx.ego_graph(G, i, radius=3)
if G_ego.number_of_nodes() >= 50 and (G_ego.number_of_nodes() <= 400):
graphs.append(G_ego)
args.max_prev_node = 250
elif args.graph_type == 'citeseer_small':
_, _, G = Graph_load(dataset='citeseer')
G = max(nx.connected_component_subgraphs(G), key=len)
G = nx.convert_node_labels_to_integers(G)
graphs = []
for i in range(G.number_of_nodes()):
G_ego = nx.ego_graph(G, i, radius=1)
if (G_ego.number_of_nodes() >= 4) and (G_ego.number_of_nodes() <= 20):
graphs.append(G_ego)
shuffle(graphs)
graphs = graphs[0:200]
args.max_prev_node = 15
elif args.graph_type == 'AST' or args.graph_type == '200Graphs':
graphs = ast_graph_load_batch(min_num_nodes=10, name=args.graph_type)
# update edge_feature_output_dim
if not args.max_node_feature_num:
# print(type(graphs[1].nodes._nodes), graphs[1].nodes._nodes.keys())
args.max_node_feature_num = len(list(graphs[1].nodes._nodes._atlas[1].keys())) # now equals to 28
args.max_prev_node = 120
# TODO: args.max_edge_feature_num update
if not args.edge_feature_output_dim:
args.edge_feature_output_dim = args.max_edge_feature_num + 2 #int(args.max_prev_node * args.max_edge_feature_num)
# 2 indicates two directions of edges
if not args.node_feature_input_dim:
args.node_feature_input_dim = args.max_node_feature_num + args.max_prev_node + args.edge_feature_output_dim
# args.node_feature_input_dim = args.max_prev_node
return graphs
|
[
"bwh.buaa@gmail.com"
] |
bwh.buaa@gmail.com
|
668cd341318eeaefb3cfccd3c9694710dc5d5f46
|
6dec3256279f73c563b116a8fa44900c4b51a4ce
|
/scrapy_demo/scrapy_demo/pipelines.py
|
736cf257e01c3b8c355daf21b7e4d34a1cf2bf32
|
[] |
no_license
|
gannonk08/scrapy-demo
|
6802cfbe109ff428f58c5e9957ac109bfed282a0
|
86939c56b83c9142ac129f02769770128d1f6fc6
|
refs/heads/master
| 2020-07-05T22:50:47.026269
| 2016-11-17T21:09:01
| 2016-11-17T21:09:01
| 73,976,758
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,052
|
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import psycopg2
import logging
from spiders.items import awayTeamRushItem
from scrapy.conf import settings
from scrapy.exceptions import DropItem
class ScrapyDemoPipeline(object):
def __init__(self):
self.connection = psycopg2.connect(host='localhost', database='scraping_demo', user='Gannon')
self.cursor = self.connection.cursor()
def process_item(self, item, spider):
try:
if type(item) is awayTeamRushItem:
table = """awayteamrush"""
self.cursor.execute("""INSERT INTO """ + table + """ (rusher, car, yds, avg, td, longest) VALUES(%s, %s, %s, %s, %s, %s)""", (item.get('rusher'), item.get('car'), item.get('yds'), item.get('avg'), item.get('td'), item.get('longest')))
self.connection.commit()
self.cursor.fetchall()
except psycopg2.DatabaseError, e:
print "Error: %s" % e
return item
|
[
"gannonk08@gmail.com"
] |
gannonk08@gmail.com
|
8f296f11eca9ea2bac005150d59334d961aca574
|
2e423a357321278e08fb009812f5fd2f794f66e5
|
/users/migrations/0001_initial.py
|
44c4896eefc330924229af13f6fc98432e429d7d
|
[] |
no_license
|
rafnixg/platzigram
|
2a0f5d4c5994be31401408fd3176f57beb1da98e
|
e51a9d92c14c23d0de24cdda78ce9683955c43e3
|
refs/heads/master
| 2022-04-27T12:04:21.549548
| 2020-01-17T21:26:18
| 2020-01-17T21:26:18
| 206,177,266
| 0
| 0
| null | 2022-04-22T22:25:27
| 2019-09-03T21:41:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,116
|
py
|
# Generated by Django 2.2.5 on 2019-09-04 17:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('website', models.URLField(blank=True)),
('biography', models.TextField(blank=True)),
('phone_number', models.CharField(blank=True, max_length=20)),
('picture', models.ImageField(blank=True, null=True, upload_to='users/pictures')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"rafnixg@gmail.com"
] |
rafnixg@gmail.com
|
10a0f0560d4f34d9937c65d9b6b1f0b3ba125dcb
|
4d8a66ebd98d18e407c20c93d4268b85f5548ed4
|
/public/http_request.py
|
9c15a12525a537248fd19e00abbaff92638d1ed6
|
[] |
no_license
|
2295256562/API_Project
|
57cc40a9c79e855aa30a25db820bbffb0add6410
|
66b72690d765ed96a9d8ae72debeaba4fe7a5073
|
refs/heads/master
| 2022-12-14T10:20:32.753852
| 2019-10-10T13:40:22
| 2019-10-10T13:40:22
| 213,394,060
| 0
| 0
| null | 2022-12-08T06:42:03
| 2019-10-07T13:42:25
|
Python
|
UTF-8
|
Python
| false
| false
| 1,913
|
py
|
import copy
import requests
from config.globals import headers
from config.globals import login_url
from functools import lru_cache
class HttpRequest:
def __init__(self):
self.headers_teml = {
'content-type': 'application/json',
'token': None
}
@lru_cache()
def _login(self):
"""
:return: 返回token
"""
data = {
"username": "",
"password": ""
}
# url = login_url
# r = requests.post(url=url, json=data, headers=headers)
x = "ldddwlfwfwelof"
return x
def headers(self):
headers = copy.deepcopy(self.headers_teml)
headers.update({'token': self._login()})
return headers
def http_request(self, url, data, http_method, header):
"""
http 请求基础类
:param url: 请求的url
:param data: 请求数据
:param http_method: 请求方式 GET、POST
:return: res
"""
# headers = self.headers()
try:
if http_method.upper() == 'GET':
if data != None:
res = requests.get(url, eval(data))
else:
res = requests.get(url)
elif http_method.upper() == 'POST':
if data != None:
res = requests.post(url, eval(data))
else:
res = requests.post(url)
else:
raise NameError("你输入的请求方式不对, 请你输入GET或POST")
except Exception as e:
raise e
return res
if __name__ == '__main__':
C = HttpRequest().http_request('http://127.0.0.1:8000/api/reg', '{"username":"123425653","password":"1111"}',
'POST')
print(C.headers)
|
[
"xinman.kuang@daddylab.com"
] |
xinman.kuang@daddylab.com
|
5fb4b852b0812c437735609954ef8693db355baf
|
75802efe7ac5e39d12c90b1ab91028f7a83f3623
|
/ex3_2_nonlinear_model.py
|
5527d8f1c41efad01b46792a6808fa3434ad63ef
|
[] |
no_license
|
AOE-khkhan/PytorchZeroToAll
|
fbd5ecba967a4d7316667c028af37340518c451d
|
f8d9a392e44197de6c593dfd4b32cb192d3a6ba9
|
refs/heads/master
| 2020-09-02T12:21:32.800832
| 2018-05-09T12:38:52
| 2018-05-09T12:38:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,068
|
py
|
x_data = [1.0, 2.0, 3.0]
y_data = [6.0, 17.0, 34.0]
w2, w1, b = 1.0, 1.0, 0.0 # a random guess: random value
lr = 1e-2 # a learning rate
num_epochs = 100 # numbe rof epochs
# our model forward pass
def forward(x):
return x * x * w2 + x * w1 + b
# Loss function
def loss(x, y):
y_pred = forward(x)
return (y_pred - y) * (y_pred - y)
# compute gradient
def gradient(x, y):
dw2 = 2 * x ** 2 * (forward(x) - y)
dw1 = 2 * x * (forward(x) - y)
db = 1 * (forward(x) - y)
return dw2, dw1, db
# Before training
print('predict (before training):', 4, forward(4))
# Training loop
for epoch in range(num_epochs):
l_ = None
for x_val, y_val in zip(x_data, y_data):
dw2, dw1, db = gradient(x_val, y_val)
w2 += -lr * dw2
w1 += -lr * dw1
b += -lr * db
l_ = loss(x_val, y_val)
print("probress: {}, w2 = {}, w1 = {}, b = {}, loss = {}".format(epoch, w2, w1, b, l_))
# After training
print("predict (after training)", "4 hours:", forward(4))
|
[
"sbkim0407@gmail.com"
] |
sbkim0407@gmail.com
|
1537c16957f6dab218fa76a979c37a5ba45f9468
|
0b4b1dd3e5652572571128750f31d088b28b19ad
|
/blog/views.py
|
926c879a7599aea1421bbf3987c9a362189fe6b7
|
[] |
no_license
|
cozynn/djangoProject
|
66d74d70efb8e0efeb4a2a21146e4a4473ab50e0
|
1bf95b7b906ce030616e994c091c9693e34c30ab
|
refs/heads/master
| 2023-02-02T19:07:37.276396
| 2020-12-20T19:23:02
| 2020-12-20T19:23:02
| 323,114,124
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,615
|
py
|
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from .models import Post
from .forms import PostForm
# Create your views here.
def post_list(request):
posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
return render(request, 'blog/post_list.html', {'posts': posts})
def post_detail(request, pk):
post = get_object_or_404(Post, pk=pk)
return render(request, 'blog/post_detail.html', {'post': post})
def post_new(request):
if request.method == "POST":
form = PostForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('blog:post_detail', pk=post.pk)
else:
form = PostForm()
return render(request, 'blog/post_edit.html', {'form': form})
def post_edit(request, pk):
post = get_object_or_404(Post, pk=pk)
if request.method == "POST":
form = PostForm(request.POST, instance=post)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('blog:post_detail', pk=post.pk)
else:
form = PostForm(instance=post)
return render(request, 'blog/post_edit.html', {'form': form})
def post_delete(request, pk):
post = Post.objects.get(id=pk)
post.author = request.user
post.delete()
return redirect('blog:post_list')
|
[
"cozynn@naver.com"
] |
cozynn@naver.com
|
7587bb7173fe75660b164c360fe327e5f35df3c5
|
6e8b2c9cd80af70fc070085a07d8f34216d16ec1
|
/Task 25/main.py
|
7e8a4623cd9610f3db37fea47aca067d85871129
|
[] |
no_license
|
tregubchenko-dima/modern_tech_programming
|
65957a8da63903e17f8421c84379d3312ece3521
|
1aa824f9f5ef32772cfc61fa4d53ab1f898594fb
|
refs/heads/master
| 2023-04-09T04:54:23.166673
| 2021-04-27T18:51:28
| 2021-04-27T18:51:28
| 352,748,720
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,567
|
py
|
from random import randint
from math import sqrt
def BozoSort(values, asc=True):
if isinstance(values[0], list):
list_temp = []
for i in values:
for j in i:
list_temp.append(j)
values = list_temp
num1 = randint(0, len(values)-1)
num2 = randint(0, len(values)-1)
temp = values[num1]
values[num1] = values[num2]
values[num2] = temp
for i in range(0, len(values)):
if i >= len(values) - 1:
return values
elif asc:
if values[i] > values[i+1]:
return BozoSort(values, asc)
else:
if values[i] < values[i+1]:
return BozoSort(values, asc)
try:
n = int(input('Введите количество чисел n: '))
if 3 < n <= 100:
values = list(map(int, input(f'Введите числа: ').split(' ')))
if n != len(values):
print('Неверное количество чисел')
exit(1)
print(*BozoSort(values))
print(*BozoSort(values, False))
size = int(sqrt(n))
arr = []
for i in range(0, size):
arr.append(values[i*size:(i*size)+size])
print(*BozoSort(arr))
print(*BozoSort(arr, False))
print(*BozoSort(values[0:3]))
print(*BozoSort(values[0:3], False))
else:
print('Неверное количество чисел')
exit(1)
except ValueError:
print("Вы ввели недопустимое значение")
exit(1)
|
[
"tregubchenko2@yandex.ru"
] |
tregubchenko2@yandex.ru
|
7436c86cf9c2ad83dacb6291b7f64493ca90f126
|
56e17984c230be6ee841264d65d7ba6ce949ceb1
|
/DateAndTime/Calendar_Module.py
|
a2a0fd44eca638970bd921c1fc7b96f6358a26ef
|
[] |
no_license
|
Shamsullo/HackerRank_Python-
|
203b4277d7eb1de2236ff983415344a1409277d7
|
1f231f04cdd1cb0cd4e6a65318113ed15f1e47bc
|
refs/heads/master
| 2021-05-15T05:45:58.885667
| 2018-01-11T18:36:28
| 2018-01-11T18:36:28
| 115,595,144
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 262
|
py
|
import calendar, datetime
input_date = input()
month,day,year = map(int,input_date.split())
day_of_the_week = datetime.date(year,month,day)
print calendar.day_name[day_of_the_week.weekday()].upper()
#the code isn't giving an expected result, i wonder why?
|
[
"noreply@github.com"
] |
noreply@github.com
|
9378b601770bd4c71b6a616ad9a7a895ad48a7b2
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5658571765186560_1/Python/StefanPochmann/D.py
|
feca86f2ed13ba681c7c3230c60ce03f3e2c21f7
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992
| 2017-05-23T09:23:38
| 2017-05-23T09:23:38
| 84,313,442
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 500
|
py
|
#f = open('D.in')
#def input():
# return next(f)
T = int(input())
for x in range(1, T + 1):
X, R, C = map(int, input().split())
A = R * C
s, S = sorted((R, C))
gabriel = A % X == 0 and \
(X == 1 or
X == 2 or
X == 3 and s >= 2 or
X == 4 and s >= 3 or
X == 5 and s >= 3 and A > 15 or
X == 6 and s >= 4)
print('Case #{}: {}'.format(x, 'GABRIEL' if gabriel else 'RICHARD'))
|
[
"eewestman@gmail.com"
] |
eewestman@gmail.com
|
1ee56e00fc1f6518207dde8d7e2c4ad70939ccb7
|
62b90959763f40954a7c6270bfb0529b536b2888
|
/user/forms.py
|
e3f2e1e1a1c2c677d176cbff33084fa0620bcb3a
|
[
"MIT"
] |
permissive
|
thiagosouzalink/blogphoto_Django
|
68698c4fc684f0ba1d9dde795a07f72df32ead38
|
7d09f44b196897c4d31fff2eff8d2a164e44db27
|
refs/heads/master
| 2023-02-20T20:32:00.527084
| 2021-01-25T15:16:07
| 2021-01-25T15:16:07
| 332,782,817
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,226
|
py
|
from django import forms
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import CustomUser
class UserForm(forms.ModelForm):
""" Formulário para cadastrar usuário"""
username = forms.CharField(
label='Usuário',
error_messages= {
'invalid': 'Nome de usuário inválido, informe apenas letras, números ou @, ., +, -, _',
'max_length': 'Você excedeu o limite de caracteres.',
'unique': 'Nome de usuário já existe.'
},
help_text= "Requeridos 150 caracteres ou menos. Letras, dígitos e @ /. / + / - / _ apenas",
widget=forms.TextInput(attrs={'placeholder':'Username'})
)
email = forms.EmailField(
label='E-mail',
error_messages={'invalid': 'E-mail inválido.'},
help_text='user@dominio.com',
widget=forms.TextInput(attrs={'placeholder':'E-mail'})
)
first_name = forms.CharField(
label='Nome',
error_messages={'max_length': 'Nome não pode ter mais de 30 caracteres'},
widget=forms.TextInput(attrs={'placeholder':'Nome'})
)
last_name = forms.CharField(
label='Sobrenome',
error_messages={'max_length': 'Sobrenome não pode ter mais de 150 caracteres'},
widget=forms.TextInput(attrs={'placeholder':'Sobrenome'})
)
telefone = forms.CharField(
label='Telefone',
help_text='(xx) xxxxx-xxxx',
widget=forms.TextInput(attrs={'placeholder':'Telefone...'})
)
password = forms.CharField(
label='Senha',
help_text="Digite uma senha segura",
widget=forms.PasswordInput(attrs={'placeholder':'Senha'})
)
password2 = forms.CharField(
label='Confirmar senha',
widget=forms.PasswordInput(attrs={'placeholder':'Repetir senha'})
)
class Meta:
model = CustomUser
fields = (
'username',
'email',
'first_name',
'last_name',
'telefone'
)
def clean_password2(self):
passwords = self.cleaned_data
if passwords['password2'] != passwords['password']:
raise forms.ValidationError("Senhas diferentes")
return passwords['password2']
def save(self, commit=True):
user = CustomUser.objects.create_user(
username=self.cleaned_data['username'],
email=self.cleaned_data['email'],
password=self.cleaned_data['password'],
first_name=self.cleaned_data['first_name'],
last_name=self.cleaned_data['last_name'],
telefone=self.cleaned_data['telefone']
)
return user
class UserProfileForm(forms.ModelForm):
""" Formulário para atualizar dados do usuário"""
facebook = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'https://www.facebook.com/seu_username'}), required=False)
instagram = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'https://www.instagram.com/seu_username'}), required=False)
twitter = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'https://www.twitter.com/seu_username'}), required=False)
class Meta:
model = CustomUser
fields = fields = (
'username',
'email',
'first_name',
'last_name',
'telefone',
'facebook',
'instagram',
'twitter',
'bio'
)
class CustomUserCreateForm(UserCreationForm):
""" Formulário para criar usuário no painel administrativo"""
class Meta:
model = CustomUser
fields = ('first_name', 'last_name', 'telefone')
labels = {'username': 'Username/E-mail'}
def save(self, commit=True):
user = super().save(commit=False)
user.set_password(self.cleaned_data["password1"])
user.username = self.cleaned_data["username"]
if commit:
user.save()
return user
class CustomUserChangeForm(UserChangeForm):
""" Atualizar usuário no painel administrativo"""
class Meta:
model = CustomUser
fields = ('email', 'first_name', 'last_name', 'telefone')
|
[
"thiagolsmail@gmail.com"
] |
thiagolsmail@gmail.com
|
ab881c94078041feb7fe0fefd3fb0913233feb4b
|
e715be7aef31a307d2cf09d8a4ecf46ea662826f
|
/device_simulator/src/orchestator.py
|
e88831369f171c3e6acd4859ce8da628125314b0
|
[] |
no_license
|
GabrielMartinMoran/TFI_UNTREF
|
0dcfd0d5b4d69c282ce732a21039c4a69a6530af
|
e4abc9bc93b840627a008e3af5f4d86b7cd30732
|
refs/heads/main
| 2023-06-23T11:06:35.138785
| 2021-07-14T13:21:14
| 2021-07-14T13:21:14
| 358,573,316
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,489
|
py
|
import time
from datetime import datetime
from src.models.energy_sensor import EnergySensor
from src.models.console_display import ConsoleDisplay
from src.models.measure import Measure
from src.repositories.user_repository import UserRepository
from src.repositories.device_repository import DeviceRepository
import config
class Orchestator:
def __init__(self, device, ref_voltage, ref_current, user_secret):
self.sensor = EnergySensor(ref_voltage, ref_current)
self.display = ConsoleDisplay()
self.device = device
self.user_repository = UserRepository(user_secret)
self.device_repository = DeviceRepository(user_secret)
self.user = self.user_repository.get_user_data()
self.message = ''
def loop(self):
while(True):
measure = Measure(
self.sensor.get_voltage(),
self.sensor.get_current(),
self.__get_timestamp()
)
try:
self.device_repository.add_measure(self.device.ble_id, measure)
self.message = 'Muestra enviada al servidor'
except Exception as e:
self.message = f'Error: {e}'
self.device.set_last_measure(measure)
self.display.set_ui(self.device, self.user, self.message)
self.display.draw()
time.sleep(config.TIME_BETWEEN_MEASUREMENTS)
def __get_timestamp(self):
return int(datetime.now().timestamp())
|
[
"moran.gabriel.95@gmail.com"
] |
moran.gabriel.95@gmail.com
|
5b6ae4546dda852369334665c79612273e580227
|
0eaf0d3f0e96a839f2ef37b92d4db5eddf4b5e02
|
/past3/e.py
|
6a5af7eb3744903d23f04c0de4ad30e373c33a27
|
[] |
no_license
|
silphire/atcoder
|
b7b02798a87048757745d99e8564397d1ca20169
|
f214ef92f13bc5d6b290746d5a94e2faad20d8b0
|
refs/heads/master
| 2023-09-03T17:56:30.885166
| 2023-09-02T14:16:24
| 2023-09-02T14:16:24
| 245,110,029
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
n, m, q = map(int, input().split())
e = [set() for _ in range(n)]
for i in range(m):
u, v = map(int, input().split())
u -= 1
v -= 1
e[u].add(v)
e[v].add(u)
c = list(map(int, input().split()))
for i in range(q):
s = tuple(map(int, input().split()))
vv = s[1] - 1
print(c[vv])
if s[0] == 1:
for ee in e[vv]:
c[ee] = c[vv]
else:
c[vv] = s[2]
|
[
"silphire@gmail.com"
] |
silphire@gmail.com
|
339b6652e6902305375b21f8ec23ad0e0f230c76
|
6ddd0cfdbaca412ee2b3a7a01e7fcaad63550ac2
|
/Python GUI/GUI window with various label,buttons,colors,font.py
|
b0274813b6a312766b548cf1260d2688c7d120c0
|
[] |
no_license
|
pranshu798/Python-programs
|
b302be9c9fd3aaf66824857bdb8bb2d9b8a9b5de
|
fb6e712594c72d8ea0be1026e6fb26c7fdd639ba
|
refs/heads/master
| 2022-12-08T15:52:21.625098
| 2020-09-14T11:57:26
| 2020-09-14T11:57:26
| 281,318,764
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 721
|
py
|
#Python program to demontrate complete details of GUI window
from tkinter import *
gui = Tk()
def hello():
c = a.get()
guil3 = Label(text=c, fg='red', bg='yellow', font=10).pack()
def delete():
guil4 = Label(text='Delete', fg='red', bg='yellow', font=10).pack()
a = StringVar()
gui.title("Aliyas Shaik Button Program")
gui.geometry("500x500+100+100")
guil1 = Label(text='Label One', fg='red', bg='yellow', font=10).pack()
button1 = Button(text='Enter', fg='red', bg='yellow', command=hello, font=10).pack()
button2 = Button(text='Delete', fg='red', bg='yellow', command=delete, font=10).pack()
# Place method places label @ specified place within the window
text = Entry(textvariable=a).pack()
gui.mainloop()
|
[
"pranshuverma798@gmail.com"
] |
pranshuverma798@gmail.com
|
3c3d8847ece82de5f4ddb2fa122ea976e7da211e
|
2ee3a2b8971118b1a1e8c101382702d698021ad5
|
/weather/models.py
|
8372f28956277f086c9e5f53ff17faa6a968168c
|
[] |
no_license
|
manikshahkataria/weather
|
29a34264fd281cf26758be06d19dd19bbd226cfc
|
1bb5160caab2dc287118ab7ed4a25cf575453ee4
|
refs/heads/master
| 2022-12-11T07:50:28.988645
| 2019-01-19T10:22:10
| 2019-01-19T10:22:10
| 163,946,933
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 226
|
py
|
from django.db import models
class City(models.Model):
name= models.CharField(max_length=25)
def __str__(self):
return self.name
class Meta:
verbose_name_plural='cities'
# Create your models here.
|
[
"manikshah1998@gmail.com"
] |
manikshah1998@gmail.com
|
2b152bec1bfa703b5df15c67f5fc0e3aa9ab815e
|
432a58b3bad9eb008ea332c06f22700172c660ac
|
/admin/client.py
|
19d4b3ccd622ebea00d2084316019e164af6a53a
|
[
"Apache-2.0"
] |
permissive
|
achanda/flocker
|
7b5c5264b52489e9da774ff011699c6b62a4bddd
|
ac822c3d6687ea63cad2aea81334a86100bfda0e
|
refs/heads/master
| 2020-12-11T01:40:20.625304
| 2015-08-01T05:47:10
| 2015-08-01T05:47:10
| 37,820,406
| 0
| 0
| null | 2015-06-21T18:24:07
| 2015-06-21T18:24:06
| null |
UTF-8
|
Python
| false
| false
| 9,859
|
py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Run the acceptance tests.
"""
import sys
import yaml
from zope.interface import Interface, implementer
from characteristic import attributes
from eliot import add_destination
from twisted.internet.error import ProcessTerminated
from twisted.python.usage import Options, UsageError
from twisted.python.filepath import FilePath
from twisted.internet.defer import inlineCallbacks, returnValue
from flocker.common.version import make_rpm_version
from flocker.provision import PackageSource, CLOUD_PROVIDERS
import flocker
from flocker.provision._ssh import (
run_remotely)
from flocker.provision._install import (
task_client_installation_test,
install_cli,
)
from effect.twisted import perform
from flocker.provision._ssh._conch import make_dispatcher
from .runner import run
def remove_known_host(reactor, hostname):
"""
Remove all keys belonging to hostname from a known_hosts file.
:param reactor: Reactor to use.
:param bytes hostname: Remove all keys belonging to this hostname from
known_hosts.
"""
return run(reactor, ['ssh-keygen', '-R', hostname])
def run_client_tests(reactor, node):
"""
Run the client acceptance tests.
:param INode node: The node to run client acceptance tests against.
:return int: The exit-code of trial.
"""
def check_result(f):
f.trap(ProcessTerminated)
if f.value.exitCode is not None:
return f.value.exitCode
else:
return f
return perform(make_dispatcher(reactor), run_remotely(
username=node.get_default_username(),
address=node.address,
commands=task_client_installation_test()
)).addCallbacks(
callback=lambda _: 0,
errback=check_result,
)
class INodeRunner(Interface):
"""
Interface for starting and stopping nodes for acceptance testing.
"""
def start_nodes(reactor):
"""
Start nodes for running acceptance tests.
:param reactor: Reactor to use.
:return Deferred: Deferred which fires with a list of nodes to run
tests against.
"""
def stop_nodes(reactor):
"""
Stop the nodes started by `start_nodes`.
:param reactor: Reactor to use.
:return Deferred: Deferred which fires when the nodes have been
stopped.
"""
RUNNER_ATTRIBUTES = [
'distribution', 'top_level', 'config', 'package_source'
]
@implementer(INodeRunner)
@attributes(RUNNER_ATTRIBUTES + [
'provisioner',
], apply_immutable=True)
class LibcloudRunner(object):
"""
Start and stop cloud nodes for acceptance testing.
:ivar LibcloudProvioner provisioner: The provisioner to use to create the
nodes.
:ivar VolumeBackend volume_backend: The volume backend the nodes are
configured with.
"""
def __init__(self):
self.nodes = []
self.metadata = self.config.get('metadata', {})
try:
creator = self.metadata['creator']
except KeyError:
raise UsageError("Must specify creator metadata.")
if not creator.isalnum():
raise UsageError(
"Creator must be alphanumeric. Found {!r}".format(creator)
)
self.creator = creator
@inlineCallbacks
def start_nodes(self, reactor, node_count):
"""
Start cloud nodes for client tests.
:return list: List of addresses of nodes to connect to, for client
tests.
"""
metadata = {
'purpose': 'client-testing',
'distribution': self.distribution,
}
metadata.update(self.metadata)
for index in range(node_count):
name = "client-test-%s-%d" % (self.creator, index)
try:
print "Creating node %d: %s" % (index, name)
node = self.provisioner.create_node(
name=name,
distribution=self.distribution,
metadata=metadata,
)
except:
print "Error creating node %d: %s" % (index, name)
print "It may have leaked into the cloud."
raise
yield remove_known_host(reactor, node.address)
self.nodes.append(node)
del node
returnValue(self.nodes)
def stop_nodes(self, reactor):
"""
Deprovision the nodes provisioned by ``start_nodes``.
"""
for node in self.nodes:
try:
print "Destroying %s" % (node.name,)
node.destroy()
except Exception as e:
print "Failed to destroy %s: %s" % (node.name, e)
DISTRIBUTIONS = ('centos-7', 'ubuntu-14.04', 'ubuntu-15.04')
PROVIDERS = tuple(sorted(CLOUD_PROVIDERS.keys()))
class RunOptions(Options):
description = "Run the client tests."
optParameters = [
['distribution', None, None,
'The target distribution. '
'One of {}.'.format(', '.join(DISTRIBUTIONS))],
['provider', None, 'rackspace',
'The target provider to test against. '
'One of {}.'.format(', '.join(PROVIDERS))],
['config-file', None, None,
'Configuration for providers.'],
['branch', None, None, 'Branch to grab packages from'],
['flocker-version', None, flocker.__version__,
'Version of flocker to install'],
['flocker-version', None, flocker.__version__,
'Version of flocker to install'],
['build-server', None, 'http://build.clusterhq.com/',
'Base URL of build server for package downloads'],
]
optFlags = [
["keep", "k", "Keep VMs around, if the tests fail."],
]
synopsis = ('Usage: run-client-tests --distribution <distribution> '
'[--provider <provider>]')
def __init__(self, top_level):
"""
:param FilePath top_level: The top-level of the flocker repository.
"""
Options.__init__(self)
self.top_level = top_level
def postOptions(self):
if self['distribution'] is None:
raise UsageError("Distribution required.")
if self['config-file'] is not None:
config_file = FilePath(self['config-file'])
self['config'] = yaml.safe_load(config_file.getContent())
else:
self['config'] = {}
if self['flocker-version']:
rpm_version = make_rpm_version(self['flocker-version'])
os_version = "%s-%s" % (rpm_version.version, rpm_version.release)
if os_version.endswith('.dirty'):
os_version = os_version[:-len('.dirty')]
else:
os_version = None
package_source = PackageSource(
version=self['flocker-version'],
os_version=os_version,
branch=self['branch'],
build_server=self['build-server'],
)
if self['distribution'] not in DISTRIBUTIONS:
raise UsageError(
"Distribution %r not supported. Available distributions: %s"
% (self['distribution'], ', '.join(DISTRIBUTIONS)))
if self['provider'] not in PROVIDERS:
raise UsageError(
"Provider %r not supported. Available providers: %s"
% (self['provider'], ', '.join(PROVIDERS)))
if self['provider'] in CLOUD_PROVIDERS:
# Configuration must include credentials etc for cloud providers.
try:
provider_config = self['config'][self['provider']]
except KeyError:
raise UsageError(
"Configuration file must include a "
"{!r} config stanza.".format(self['provider'])
)
provisioner = CLOUD_PROVIDERS[self['provider']](**provider_config)
self.runner = LibcloudRunner(
config=self['config'],
top_level=self.top_level,
distribution=self['distribution'],
package_source=package_source,
provisioner=provisioner,
)
from .acceptance import eliot_output
@inlineCallbacks
def main(reactor, args, base_path, top_level):
"""
:param reactor: Reactor to use.
:param list args: The arguments passed to the script.
:param FilePath base_path: The executable being run.
:param FilePath top_level: The top-level of the flocker repository.
"""
options = RunOptions(top_level=top_level)
add_destination(eliot_output)
try:
options.parseOptions(args)
except UsageError as e:
sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
raise SystemExit(1)
runner = options.runner
from flocker.common.script import eliot_logging_service
log_file = open("%s.log" % base_path.basename(), "a")
log_writer = eliot_logging_service(
log_file=log_file,
reactor=reactor,
capture_stdout=False)
log_writer.startService()
reactor.addSystemEventTrigger(
'before', 'shutdown', log_writer.stopService)
try:
nodes = yield runner.start_nodes(reactor, node_count=1)
yield perform(
make_dispatcher(reactor),
install_cli(runner.package_source, nodes[0]))
result = yield run_client_tests(reactor=reactor, node=nodes[0])
except:
result = 1
raise
finally:
# Unless the tests failed, and the user asked to keep the nodes, we
# delete them.
if not (result != 0 and options['keep']):
runner.stop_nodes(reactor)
elif options['keep']:
print "--keep specified, not destroying nodes."
raise SystemExit(result)
|
[
"jon.giddy@clusterhq.com"
] |
jon.giddy@clusterhq.com
|
3ffd2a81defe2dd17e7c4f0ee0d2e75c6f233e90
|
b481964a107b7a1afd5997e2736235ffb2c17138
|
/hud.py
|
86a51accc3edf2f32b32261581f9c30e58e9030f
|
[
"MIT"
] |
permissive
|
marax27/pyNoid
|
06503b576e97e839bcfa8d132bf8855769db7777
|
c988db7ef6750352ecb6e3749f73302da4b71488
|
refs/heads/master
| 2021-09-04T11:19:22.625982
| 2018-01-18T07:03:43
| 2018-01-18T07:08:23
| 111,729,907
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,505
|
py
|
#!/usr/bin/python3
from constants import Constants
from vec2 import vec2
import sdl2.sdlttf
import sdl2.ext
import sdl2
import misc
class UIElement:
def __init__(self):
self.position = vec2(0,0)
self.size = vec2(0,0)
def centerHorizontally(self):
self.position.x = (Constants.WINDOW_SIZE.x - self.size[0])//2
def centerVertically(self):
self.position.y = (Constants.WINDOW_SIZE.y - self.size[1])//2
def center(self):
self.centerHorizontally()
self.centerVertically()
class Text(UIElement):
"""Single piece of text."""
font_manager = sdl2.ext.FontManager('resources/vga_437.ttf', size=24)
def __init__(self, text, renderer, size=None, color=None):
self.position = vec2(0,0)
self.texture = None
self.load(text, renderer, size, color)
def render(self, renderer, pos=None):
"""Render the text, using the renderer."""
r = sdl2.SDL_Rect(self.position[0] if not pos else pos[0],
self.position[1] if not pos else pos[1],
self.size[0],
self.size[1])
sdl2.SDL_RenderCopy(renderer.renderer, self.texture, None, r)
def load(self, text, renderer, size=None, color=None):
"""Update a Text object."""
if self.texture:
sdl2.SDL_DestroyTexture(self.texture) #That does the trick.
surf = Text.font_manager.render(text, size=size, color=color)
sprite = sdl2.ext.SoftwareSprite(surf, True)
self.size = (surf.w, surf.h)
self.texture = sdl2.SDL_CreateTextureFromSurface(renderer.renderer, sprite.surface)
class Button(UIElement):
"""Button class."""
IDLE, HOVER, PRESSED = 0x1001, 0x1002, 0x1003
@staticmethod
def buildClickableText(message, renderer, idle_color, pressed_color, hover_color, size, pos=None):
"""Generates a text label that will change color according to whether it's pressed or not."""
return Button(
Text(message, renderer, size, idle_color),
Text(message, renderer, size, pressed_color),
Text(message, renderer, size, hover_color),
pos
)
def __init__(self, idle_state, pressed_state, hover_state=None, pos=None):
self.states = {}
self.states[self.IDLE] = idle_state
self.states[self.HOVER] = hover_state if hover_state is not None else idle_state
self.states[self.PRESSED] = pressed_state
self.state = self.IDLE
self.position = pos if pos else vec2(0, 0)
# Size restriction.
if self.states[self.IDLE].size != self.states[self.HOVER].size or self.states[self.IDLE].size != self.states[self.PRESSED].size:
raise ValueError()
self.size = self.states[self.IDLE].size
def render(self, renderer):
self.states[self.state].render(renderer, self.position)
def handleEvent(self, event):
"""Handle mouse events."""
mx, my = misc.getMousePos()
if ((self.position[0] <= mx < self.position[0] + self.size[0]) and
(self.position[1] <= my < self.position[1] + self.size[1])):
if event.type == sdl2.SDL_MOUSEBUTTONDOWN:
self.state = self.PRESSED
else:
self.state = self.HOVER
else:
self.state = self.IDLE
def isPressed(self):
return self.state == self.PRESSED
class VerticalContainer:
def __init__(self, elements=[], y_pos=0):
self.elem = elements
self.y_pos = y_pos
self.adjust()
def adjust(self):
if len(self.elem):
self.elem[0].position.y = self.y_pos
for i in range(1, len(self.elem)):
self.elem[i].position.y = self.elem[i-1].position.y+self.elem[i-1].size[1]
for i in self.elem:
i.centerHorizontally()
def render(self, renderer):
for i in self.elem:
i.render(renderer)
|
[
"kacpert314@gmail.com"
] |
kacpert314@gmail.com
|
5fc93f5180bbbf9d6e8482073bcb89bf2d923892
|
2c68f9156087d6d338373f9737fee1a014e4546b
|
/src/connectedk8s/azext_connectedk8s/vendored_sdks/models/authentication_details_value.py
|
982b4554803e85c978165d7b651f09cd77ff0c69
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
anpaz/azure-cli-extensions
|
8b0d4071c49840da9883f13cb0fd1f4515246ee0
|
847fd487fe61e83f2a4163a9393edc9555267bc2
|
refs/heads/master
| 2023-04-23T17:22:53.427404
| 2021-01-29T17:48:28
| 2021-01-29T18:01:33
| 257,394,204
| 2
| 0
|
MIT
| 2021-01-28T10:31:07
| 2020-04-20T20:19:43
|
Python
|
UTF-8
|
Python
| false
| false
| 890
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AuthenticationDetailsValue(Model):
"""Authentication token value.
:param token: Authentication token.
:type token: str
"""
_attribute_map = {
'token': {'key': 'token', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AuthenticationDetailsValue, self).__init__(**kwargs)
self.token = kwargs.get('token', None)
|
[
"noreply@github.com"
] |
noreply@github.com
|
b1f2496ebe49df46a46292c6d998c4fbb52a383b
|
a10b0c634e83a652b02db4e6a24ba7d0429dfa05
|
/main.py
|
1caa556c793abd75cfe2dcbcde0ba8e6167be17a
|
[] |
no_license
|
Diptasri/Intro-to-Ai-ML-5600
|
9f822327d5825e0e9c4cf7b446b61eaa9ee39a55
|
05efaa4f973f8c55bb12cb0b65407c3f9ce9bb50
|
refs/heads/main
| 2023-07-27T03:47:36.049328
| 2021-09-12T03:33:41
| 2021-09-12T03:33:41
| 399,465,959
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,388
|
py
|
import matplotlib.pyplot as plt
# inputs
a = float (input ("Enter a value: "))
b = float (input ("Enter b value: "))
ax = a + b
ay = a - b
bx = a - b
by = a + b
# Internal division section formula
Px = ((a * a) + (b * b)) / (a + b)
Py = ((a * a) + (2 * a * b) - (b * b)) / (a + b)
# External division section formula
Qx = ((a * a) - (2 * a * b) - (b * b)) / (a - b)
Qy = ((a * a) + (b * b)) / (a - b)
plt.figure(1)
plt.scatter([ax, bx, Px], [ay, by, Py], color= 'r')
plt.text(ax, ay + 0.5, '({},{})'.format(ax, ay))
plt.text(bx, by + 0.5, '({},{})'.format(bx, by))
plt.text(Px, Py + 0.5, '({},{})'.format(Px, Py))
plt.plot([ax, bx, Px], [ay, by, Py])
plt.title("Internal Division Section")
plt.xlabel("X- Axis")
plt.ylabel("Y- Axis")
plt.grid(True)
plt.show()
plt.figure(2)
plt.scatter([ax, bx, Qx], [ay, by, Qy], color= 'r')
plt.text(ax, ay + 0.5, '({},{})'.format(ax, ay))
plt.text(bx, by + 0.5, '({},{})'.format(bx, by))
plt.text(Qx, Qy + 0.5, '({},{})'.format(Qx, Qy))
plt.plot([ax, bx, Qx], [ay, by, Qy])
plt.title("External Division Section")
plt.xlabel("X- Axis")
plt.ylabel("Y- Axis")
plt.grid(True)
plt.show()
# Final coordinates
internal_coordinates = (Px, Py)
external_coordinates = (Qx, Qy)
# Printing outputs
print ("(Px, Py) = " + str (internal_coordinates))
print ("(Qx, Qy) = " + str (external_coordinates))
|
[
"noreply@github.com"
] |
noreply@github.com
|
35b33536fa724c6e8c12b3a831280b5ef91b1f3e
|
336f6cbc2fa3821ad07c702c136bd55034cd2780
|
/api/views.py
|
5f4dc1ce2afdd034d87e15aa2e72ff373803d22c
|
[] |
no_license
|
wucg-python/dry1
|
2521bccfa23c84d4d93e28c9a4a19c4eb0c6b6ec
|
e68ebc5e6f65ee3ca5850223a61574b40837feba
|
refs/heads/master
| 2023-01-05T19:28:02.699639
| 2020-11-02T02:16:08
| 2020-11-02T02:16:08
| 309,234,902
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,390
|
py
|
from django.shortcuts import render
# Create your views here.
from rest_framework import mixins
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import generics
from api.models import Book, User
from api.serializers import BookModelSerializer, UserModelSerializer
from rest_framework import viewsets
class BookAPIView(APIView):
def get(self,request,*args,**kwargs):
book_id = kwargs.get('id')
if book_id:
book = Book.objects.filter(id=book_id,is_delete=False)
serialize = BookModelSerializer(book,many=True).data
# print(serialize)
return Response({
'status':200,
"message":"查询一个书籍",
"result":serialize,
})
else:
books = Book.objects.filter(is_delete=False)
serialize = BookModelSerializer(books,many=True).data
return Response({
'status': 200,
"message": "查询所有书籍",
"result": serialize,
})
def post(self,request,*args,**kwargs):
request_data = request.data
print(request_data)
if isinstance(request_data,dict):
many = False
elif isinstance(request_data,list):
many = True
else:
return Response({
"status":400,
"message":"添加失败"
})
serialize = BookModelSerializer(data=request_data,many=many)
serialize.is_valid(raise_exception=True)
book = serialize.save()
return Response({
'status': 200,
"message": "添加书籍",
"result": BookModelSerializer(book,many=many).data,
})
def delete(self,request,*args,**kwargs):
id = kwargs.get('id')
if id:
# 删除单个
ids = [id]
else:
ids = request.data
response = Book.objects.filter(id__in=ids,is_delete=False).update(is_delete=True)
if response:
return Response({
"status":200,
"message":"删除成功",
})
else:
return Response({
"status": 400,
"message": "删除失败或已被删除",
})
# 更新单个整体
def put(self,request,*args,**kwargs):
# 获取到修改的值
request_data = request.data
# 获取到被修改的对象
book_id = kwargs.get('id')
try:
book_obj = Book.objects.get(id=book_id)
except:
return Response({
"status":400,
"message":"对象不存在"
})
serializer = BookModelSerializer(data=request_data,instance=book_obj,partial=True)
serializer.is_valid(raise_exception=True)
book = serializer.save()
return Response({
"status":200,
"message":"修改成功",
"result":BookModelSerializer(book).data
})
# def patch(self,request,*args,**kwargs):
# # 获取到修改的值
# request_data = request.data
# # 获取到被修改的对象
# book_id = kwargs.get('id')
# try:
# book_obj = Book.objects.get(id=book_id)
# except:
# return Response({
# "status": 400,
# "message": "对象不存在"
# })
# serializer = BookModelSerializer(data=request_data, instance=book_obj,partial=True)
# serializer.is_valid(raise_exception=True)
# book = serializer.save()
# return Response({
# "status": 200,
# "message": "修改成功",
# "result": BookModelSerializer(book).data
# })
def patch(self,request,*args,**kwargs):
'''
更新单个 id kwargs.get('id')
更新多个 [{},{},{}]
:param request:
:param args:
:param kwargs:
:return:
'''
book_id = kwargs.get('id')
request_data = request.data
# 修改单个
if book_id and isinstance(request_data,dict):
ids = [book_id]
request_data = [request_data]
elif not book_id and isinstance(request_data,list):
ids = []
for i in request_data:
id = i.pop('id',None)
if id:
ids.append(id)
else:
return Response({
'status':400,
"message":"id不存在"
})
else:
return Response({
"status":400,
"message":"格式错误"
})
books_obj = []
new_data = []
for index,id in enumerate(ids):
print(index,id)
try:
book_obj = Book.objects.get(id)
print(book_obj)
books_obj.append(book_obj)
new_data.append(request_data[index])
except:
continue
serializer = BookModelSerializer(data=new_data,instance=books_obj,partial=True,many=True)
serializer.is_valid(raise_exception=True)
datas = serializer.save()
return Response({
"status":200,
"message":"成功",
"result": BookModelSerializer(datas).data
})
class BookGenericAPIView(GenericAPIView,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
mixins.CreateModelMixin,
mixins.UpdateModelMixin,
):
queryset = Book.objects.filter(is_delete=False)
serializer_class = BookModelSerializer
lookup_field = "id"
def get(self,request,*args,**kwargs):
if "id" in kwargs:
return self.retrieve(request,*args,**kwargs)
return self.list(request,*args,**kwargs)
def post(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
def delete(self,request,*args,**kwargs):
return self.destroy(request,*args,**kwargs)
def put(self,request,*args,**kwargs):
return self.partial_update(request,*args,**kwargs)
class BookGenerics(generics.ListAPIView,
generics.ListCreateAPIView):
queryset = Book.objects.filter()
serializer_class = BookModelSerializer
lookup_field = "id"
class UserAPIView(viewsets.GenericViewSet,
mixins.CreateModelMixin):
queryset = User.objects.all()
serializer_class = UserModelSerializer
lookup_field = "id"
def register(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
def login(self,request,*args,**kwargs):
request_data = request.data
print(request_data)
user= User.objects.filter(username=request_data.get('username'),password=request_data.get('password'))
if user:
return Response({
"status":200,
"message":"登陆成功"
})
return Response({
"status":400,
"message":"登陆失败"
})
|
[
"1490168051@qq.com"
] |
1490168051@qq.com
|
e18bf50cd5e5c0cee6e3670840430470da5195de
|
f1e4a8ab1ce478b3a95e8e8a74faa16409ac86e2
|
/lambdas/InitializeDBCustomLambda/index.py
|
7d26789efb9fa031b09f6bbee5909f5d6e904771
|
[] |
no_license
|
Ryanjlowe/pinpoint-engagement-scoring
|
6b761fd7ada905db251d42ec1d4b47c0e27ad810
|
a75f29f687c799ed6e0125c3c0044cc39c9dd2d5
|
refs/heads/master
| 2020-09-30T05:02:39.204421
| 2019-12-13T17:41:44
| 2019-12-13T17:41:44
| 227,209,468
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,868
|
py
|
import json
import boto3
import logging
import os
from botocore.vendored import requests
dynamodb = boto3.resource('dynamodb')
PINPOINT_PROJECT_ID = os.environ.get('PINPOINT_PROJECT_ID')
scoring_definition_table = dynamodb.Table(os.environ.get('SCORING_DEFINITION_TABLE'))
def lambda_handler(event, context):
global log_level
log_level = str(os.environ.get('LOG_LEVEL')).upper()
if log_level not in [
'DEBUG', 'INFO',
'WARNING', 'ERROR',
'CRITICAL'
]:
log_level = 'ERROR'
logging.getLogger().setLevel(log_level)
logging.info(event)
try:
populate_score_definition('_email.click', 50)
populate_score_definition('_email.open', 10)
populate_score_definition('_email.delivered', 2)
populate_score_definition('_email.hardbounce', -1000)
populate_score_definition('_email.complaint', -1000)
populate_score_definition('_email.unsubscribe', -500)
populate_score_definition('_SMS.SUCCESS', 2)
populate_score_definition('_SMS.OPTOUT', -500)
populate_score_definition('_campaign.send', 2)
populate_score_definition('_campaign.opened_notification', 50)
populate_score_definition('_campaign.received_foreground', 2)
populate_score_definition('_campaign.received_background', 2)
populate_score_definition('_session.start', 2)
populate_score_definition('_userauth.sign_up', 50)
populate_score_definition('_monetization.purchase', 100)
except Exception as e:
logging.error('Received Error while populating default values: %s', e)
send(event, context, 'FAILED', {})
else:
send(event, context, 'SUCCESS', {})
def populate_score_definition(event_type, score_offset):
try:
scoring_definition_table.put_item(
Item={
'EventType': event_type,
'PinpointProjectId': PINPOINT_PROJECT_ID,
'ScoreOffset':score_offset
}
)
except Exception as e:
logging.error('Received Error while populate_score_definition: %s', e)
raise e
######
# Following taken from: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html#w2ab1c20c25c14b9c15
######
# Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
# This file is licensed to you under the AWS Customer Agreement (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
# See the License for the specific language governing permissions and limitations under the License.
def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False):
responseUrl = event['ResponseURL']
print(responseUrl)
responseBody = {}
responseBody['Status'] = responseStatus
responseBody['Reason'] = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name
responseBody['PhysicalResourceId'] = physicalResourceId or context.log_stream_name
responseBody['StackId'] = event['StackId']
responseBody['RequestId'] = event['RequestId']
responseBody['LogicalResourceId'] = event['LogicalResourceId']
responseBody['NoEcho'] = noEcho
responseBody['Data'] = responseData
json_responseBody = json.dumps(responseBody)
print("Response body:\n" + json_responseBody)
headers = {
'content-type' : '',
'content-length' : str(len(json_responseBody))
}
try:
response = requests.put(responseUrl,
data=json_responseBody,
headers=headers)
print("Status code: " + response.reason)
except Exception as e:
print("send(..) failed executing requests.put(..): " + str(e))
|
[
"rjlowe@amazon.com"
] |
rjlowe@amazon.com
|
4106443cb73c63c91456d1feb0e571e206168629
|
0ad1a9530f0765b07d568095cb1534babc4432f5
|
/utils/db.py
|
0391f7d7acc6be817a2340825b6016c54fba89ab
|
[
"MIT"
] |
permissive
|
bryceweiner/Infiniti
|
18acc471b5882bd676e61b840bff793c22d6c272
|
5ea829dfa26c0948970329651d3cacff7788d116
|
refs/heads/master
| 2022-12-15T06:10:10.981048
| 2018-09-25T20:03:02
| 2018-09-25T20:03:02
| 147,550,307
| 1
| 2
|
MIT
| 2022-12-07T23:52:23
| 2018-09-05T16:47:43
|
Python
|
UTF-8
|
Python
| false
| false
| 2,781
|
py
|
import rocksdb,time,ast
from infiniti.params import *
MAX_RETRY_CREATE_DB = 100
def writebatch():
return rocksdb.WriteBatch()
def open_db(filename, logger=None, read_only=False):
db_default_path = (filename, "wallet_test")[filename == ""]
db_path = db_default_path
retry_count = 0
db = None
save_err=None
while db is None and retry_count < MAX_RETRY_CREATE_DB:
opts = rocksdb.Options()
opts.create_if_missing = True
opts.max_open_files = -1
opts.write_buffer_size = 67108864
opts.max_write_buffer_number = 3
opts.target_file_size_base = 67108864
opts.table_factory = rocksdb.BlockBasedTableFactory(
filter_policy=rocksdb.BloomFilterPolicy(10),
block_cache=rocksdb.LRUCache(2 * (1024 ** 3)),
block_cache_compressed=rocksdb.LRUCache(500 * (1024 ** 2)))
try:
db = rocksdb.DB(db_path, opts, read_only)
except Exception as err:
save_err=err
time.sleep(.1)
retry_count += 1
if retry_count == MAX_RETRY_CREATE_DB:
raise save_err
return db
def uuid_exists(object_db,uuid):
try:
_db = open_db(join_path(DATA_PATH,object_db))
it = _db.iteritems()
it.seek(uuid)
items = dict(itertools.takewhile(lambda item: item[0].startswith(uuid), it))
return len(items) == 0
except Exception as err:
raise err
def get_infiniti_object(object_db,uuid):
"""
All Inifiniti objects have a unique UUID, so just dump the object
"""
try:
_db = open_db(join_path(DATA_PATH,object_db))
it = _db.iteritems()
it.seek(uuid)
result = {}
for key,value in dict(itertools.takewhile(lambda item: item[0].startswith(uuid), it)):
_uuid,_field = key.split('.')
_value = value
result.update = { _field : _value }
return result
except Exception as err:
raise err
def put_infiniti_object(object_db,obj):
try:
_db = open_db(join_path(DATA_PATH,object_db))
wb = writebatch()
for attr in dir(obj):
if attr.startswith('_') and not attr.startswith('__'):
wb.put("{0}.{1}".format(obj.uuid,attr),getattr(obj,attr))
db.write(wb)
return True
except Exception as err:
raise err
def utxo_by_address(address,network,block_height):
db = open_db(join_path(join_path(DATA_PATH,network),'utxo'))
it = db.iteritems()
it.seek_to_first()
total = 0
utxo = []
for k,v in it:
addr,amt = v.split('|')
height,tx_hash = k.split('.')
if address == addr:
utxo.append({
'amount':Decimal(amt),
'confirmations':int(int(block_height)-int(height)),
'tx_hash':tx_hash
})
if len(utxo) > 0:
return utxo
else:
return None
def balance_by_address(address,network):
db = open_db(join_path(join_path(DATA_PATH,network),'utxo'))
it = db.iteritems()
it.seek_to_first()
total = 0
for k,v in it:
addr,amt = v.split('|')
if address == addr:
total += Decimal(amt)
return total
|
[
"bryce@altsystem.io"
] |
bryce@altsystem.io
|
e9dfc09020ce8682f64b107a1c8ad0d41d060345
|
b1b520c9bae5b241405dbc1a6b25ef928877c317
|
/defangIpAddress.py
|
7bed80a95e1468a02ca4701cc4900f4fa9552b8a
|
[] |
no_license
|
TBobcat/Leetcode
|
0ee5d06be3f7f10077961a8e3aedcd28fe4cb85a
|
3b6e3d38ac2d5158329a9444ad7e7e7ee9d07176
|
refs/heads/main
| 2023-06-04T10:40:00.275728
| 2021-06-16T23:05:35
| 2021-06-16T23:05:35
| 375,470,361
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 660
|
py
|
def defangIPaddr(address):
"""
:type address: str
:rtype: str
"""
## use list(str) and str.join(list_str)
## join can also be used like this
## 'abc'.join(address1.split('.')) -> '1abc1abc1abc1'
## str.replace() would also be useful
res = ""
list_address = list(address)
for i,char in enumerate(list_address):
if char == ".":
list_address[i] = "[.]"
res = res.join(list_address)
return res
if __name__ == '__main__':
address1 = "1.1.1.1"
address2 = "255.100.50.0"
print(defangIPaddr(address1))
print(defangIPaddr(address2))
else:
pass
|
[
"xiangyuantech@gmail.com"
] |
xiangyuantech@gmail.com
|
8602e4c68c80a37d2046a827fca331f3acf97906
|
c1cad053e9fbfe536722c13338fff63e471ff252
|
/Next.tech/Analyzing-Text-Data/solution/chunking.py
|
58c9c3ededb97de3f1ce991ac04f48ebe62345aa
|
[] |
no_license
|
adataholic/Datascience
|
9bedfda47c3b84e72e9833b293ce2f602ca2ec9f
|
766a34c480eec61afdd0f485a4e77428cf3eba95
|
refs/heads/master
| 2020-12-27T01:33:55.458858
| 2020-01-06T23:48:28
| 2020-01-06T23:48:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 803
|
py
|
import nltk
nltk.download('brown')
import numpy as np
from nltk.corpus import brown
# Split a text into chunks
def splitter(data, num_words):
words = data.split(' ')
output = []
cur_count = 0
cur_words = []
for word in words:
cur_words.append(word)
cur_count += 1
if cur_count == num_words:
output.append(' '.join(cur_words))
cur_words = []
cur_count = 0
output.append(' '.join(cur_words) )
return output
if __name__=='__main__':
# Read the data from the Brown corpus
data = ' '.join(brown.words()[:10000])
# Number of words in each chunk
num_words = 1700
chunks = []
counter = 0
text_chunks = splitter(data, num_words)
print "Number of text chunks =", len(text_chunks)
|
[
"akshaymm13b034@gmail.com"
] |
akshaymm13b034@gmail.com
|
21d962029b74b4eafe0c5b512082596bdf3800f2
|
95e7cf518b8d71270a7de6e7c7254861010f5035
|
/garage/tf/algos/batch_polopt.py
|
4245380d53581a9a7d6e72637049760557283eaf
|
[
"MIT"
] |
permissive
|
reslthrowaway/garage
|
aaeadf7e918d80d467b2fcce61c50e8404480f83
|
e921119434d205b6f644f139f6075516fb9ece74
|
refs/heads/master
| 2020-03-28T08:32:58.835060
| 2018-09-08T21:55:41
| 2018-09-08T21:55:41
| 147,972,769
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,354
|
py
|
import time
import tensorflow as tf
from garage.algos import RLAlgorithm
import garage.misc.logger as logger
from garage.tf.plotter import Plotter
from garage.tf.samplers import BatchSampler
from garage.tf.samplers import VectorizedSampler
class BatchPolopt(RLAlgorithm):
"""
Base class for batch sampling-based policy optimization methods.
This includes various policy gradient methods like vpg, npg, ppo, trpo,
etc.
"""
def __init__(self,
env,
policy,
baseline,
scope=None,
n_itr=500,
start_itr=0,
batch_size=5000,
max_path_length=500,
discount=0.99,
gae_lambda=1,
plot=False,
pause_for_plot=False,
center_adv=True,
positive_adv=False,
store_paths=False,
whole_paths=True,
fixed_horizon=False,
sampler_cls=None,
sampler_args=None,
force_batch_sampler=False,
**kwargs):
"""
:param env: Environment
:param policy: Policy
:type policy: Policy
:param baseline: Baseline
:param scope: Scope for identifying the algorithm. Must be specified if
running multiple algorithms
simultaneously, each using different environments and policies
:param n_itr: Number of iterations.
:param start_itr: Starting iteration.
:param batch_size: Number of samples per iteration.
:param max_path_length: Maximum length of a single rollout.
:param discount: Discount.
:param gae_lambda: Lambda used for generalized advantage estimation.
:param plot: Plot evaluation run after each iteration.
:param pause_for_plot: Whether to pause before contiuing when plotting.
:param center_adv: Whether to rescale the advantages so that they have
mean 0 and standard deviation 1.
:param positive_adv: Whether to shift the advantages so that they are
always positive. When used in conjunction with center_adv the
advantages will be standardized before shifting.
:param store_paths: Whether to save all paths data to the snapshot.
:return:
"""
self.env = env
self.policy = policy
self.baseline = baseline
self.scope = scope
self.n_itr = n_itr
self.start_itr = start_itr
self.batch_size = batch_size
self.max_path_length = max_path_length
self.discount = discount
self.gae_lambda = gae_lambda
self.plot = plot
self.pause_for_plot = pause_for_plot
self.center_adv = center_adv
self.positive_adv = positive_adv
self.store_paths = store_paths
self.whole_paths = whole_paths
self.fixed_horizon = fixed_horizon
if sampler_cls is None:
if self.policy.vectorized and not force_batch_sampler:
sampler_cls = VectorizedSampler
else:
sampler_cls = BatchSampler
if sampler_args is None:
sampler_args = dict()
self.sampler = sampler_cls(self, **sampler_args)
self.init_opt()
def start_worker(self, sess):
self.sampler.start_worker()
if self.plot:
self.plotter = Plotter(self.env, self.policy, sess)
self.plotter.start()
def shutdown_worker(self):
self.sampler.shutdown_worker()
if self.plot:
self.plotter.shutdown()
def obtain_samples(self, itr):
return self.sampler.obtain_samples(itr)
def process_samples(self, itr, paths):
return self.sampler.process_samples(itr, paths)
def train(self, sess=None):
created_session = True if (sess is None) else False
if sess is None:
sess = tf.Session()
sess.__enter__()
sess.run(tf.global_variables_initializer())
self.start_worker(sess)
start_time = time.time()
last_average_return = None
for itr in range(self.start_itr, self.n_itr):
itr_start_time = time.time()
with logger.prefix('itr #%d | ' % itr):
logger.log("Obtaining samples...")
paths = self.obtain_samples(itr)
logger.log("Processing samples...")
samples_data = self.process_samples(itr, paths)
last_average_return = samples_data["average_return"]
logger.log("Logging diagnostics...")
self.log_diagnostics(paths)
logger.log("Optimizing policy...")
self.optimize_policy(itr, samples_data)
logger.log("Saving snapshot...")
params = self.get_itr_snapshot(itr, samples_data)
if self.store_paths:
params["paths"] = samples_data["paths"]
logger.save_itr_params(itr, params)
logger.log("Saved")
logger.record_tabular('Time', time.time() - start_time)
logger.record_tabular('ItrTime', time.time() - itr_start_time)
logger.dump_tabular(with_prefix=False)
if self.plot:
self.plotter.update_plot(self.policy, self.max_path_length)
if self.pause_for_plot:
input("Plotting evaluation run: Press Enter to "
"continue...")
self.shutdown_worker()
if created_session:
sess.close()
return last_average_return
def log_diagnostics(self, paths):
self.policy.log_diagnostics(paths)
self.baseline.log_diagnostics(paths)
def init_opt(self):
"""
Initialize the optimization procedure. If using tensorflow, this may
include declaring all the variables and compiling functions
"""
raise NotImplementedError
def get_itr_snapshot(self, itr, samples_data):
"""
Returns all the data that should be saved in the snapshot for this
iteration.
"""
raise NotImplementedError
def optimize_policy(self, itr, samples_data):
raise NotImplementedError
|
[
"reslthrowaway@yandex.com"
] |
reslthrowaway@yandex.com
|
6919b3a14f65b0c108aa619c12b7e531c0c04e51
|
6328387281d1b2b0bec13d51916d916fea1e7351
|
/myvenv/bin/easy_install-3.7
|
185061d76e1848a152ec8b720265f7b58b47dc6b
|
[] |
no_license
|
MedApplive/my-first-blog
|
81e40789efe28cf99c7a342f7b8c48e879e70b4c
|
0268eb6a64aa2733552b285accceba8cb82cd64d
|
refs/heads/master
| 2020-05-01T11:58:28.305631
| 2020-01-20T17:04:57
| 2020-01-20T17:04:57
| 177,456,209
| 0
| 0
| null | 2019-09-30T14:45:40
| 2019-03-24T18:57:54
|
Python
|
UTF-8
|
Python
| false
| false
| 272
|
7
|
#!/home/maurice/Documents/djangogirls/myvenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"msourir@gmail.com"
] |
msourir@gmail.com
|
cdbf1c4051359a9d4660b03166c1b23f3c05428f
|
bb097a986d37c5b6d49bf6d1f9df17960e32e36e
|
/final_project/poi_id.py
|
9f2d37fc735529d2c4baaf05ebe14afbc6c019da
|
[] |
no_license
|
noslav/enron_analysis
|
84e32fa2060c35d18e921cdb6647fd04019e7f57
|
9c98dfe8dcf71728cf8a8d75ab4b0c8468bd2d1e
|
refs/heads/master
| 2021-03-22T04:42:56.034087
| 2017-12-07T15:15:55
| 2017-12-07T15:15:55
| 96,874,843
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,872
|
py
|
#!/usr/bin/python
import sys
import pickle
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data
import pandas
import numpy as np
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2, f_classif, mutual_info_classif
from sklearn.feature_selection import RFE
from sklearn.linear_model import LogisticRegression
from sklearn.decomposition import PCA
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.model_selection import StratifiedShuffleSplit
import matplotlib.pyplot as plt
from sklearn.svm import SVC
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import RFECV
import matplotlib.pyplot
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.model_selection import cross_val_score
#from sklearn.datasets import make_classification
#=================================================================================#
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
### for feature selection we run the selectKbest on the data set.
enron_data = pickle.load(open("../final_project/final_project_dataset.pkl", "r"))
#names of the columns in the 'enron_pickle_features.csv' data set.
names = ["salary" , "to_messages" , "deferral_payments" , "total_payments" ,
"exercised_stock_options", "bonus" , "restricted_stock" , "shared_receipt_with_poi" ,
"restricted_stock_deferred", "total_stock_value", "expenses" , "loan_advances" ,
"from_messages" , "other" , "from_this_person_to_poi",
"director_fees" , "deferred_income" , "long_term_incentive",
"from_poi_to_this_person"]
#getting the data and then reading them into dataframes for manipulation and feature
#selection
def makeDataFramesFeature():
enron_dataframe = pandas.read_csv('enron_pickle_features.csv')
enron_poi = pandas.read_csv('enron_poi.csv')
del enron_dataframe['Unnamed: 0']
del enron_poi['Unnamed: 0']
array_data = enron_dataframe.values
X = array_data[:,0:19]
array_poi = enron_poi.values.ravel()
Y = array_poi
return X, Y
#X, Y = makeDataFramesFeature()
def featureSelectKbest(X,Y): #Cannot be used since we have negative values
test = SelectKBest(score_func= chi2, k=4)
fit = test.fit(X, Y)
# summarize scores
np.set_printoptions(precision=3)
print(fit.scores_)
features = fit.transform(X)
# summarize selected features
print(features[0:6,:])
def featureRFE(X,Y): #recursive feature elimination using a random forest cl
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier(n_estimators=200)
rfe = RFE(model, 4)
fit = rfe.fit(X, Y)
print("Num Features: %d") % fit.n_features_
print("Selected Features: %s") % fit.support_
print("Feature Ranking: %s") % fit.ranking_
#print names
i = 0
namesdict = {}
for values in names:
namesdict[values] = fit.support_[i]
i +=1
print namesdict
#==============================================================================
# featureRFEout = { 'salary': False, 'to_messages': False, \
# 'deferral_payments': False, 'total_payments': False, 'loan_advances': False, \
# 'bonus': True, 'restricted_stock_deferred': False, 'total_stock_value': True, \
# 'shared_receipt_with_poi': False, 'long_term_incentive': False, \
# 'exercised_stock_options': False, 'from_messages': False, 'other': True, \
# 'from_poi_to_this_person': False, 'from_this_person_to_poi': False, \
# 'deferred_income': False, 'expenses': True, 'restricted_stock': False, \
# 'director_fees': False }
#==============================================================================
#Recursive feature elimination using random forests suggest I should use
#Bonus, total_stock_value, expenses and other (we can ignore this for now)
def featureRFECross(X,Y): #recursive feature elimination super nice methoed to automatically
#select good feature using a cross validation for classification
svc = SVC(kernel="rbf") #taking too much time.
#model = LogisticRegression()
# The "accuracy" scoring is proportional to the number of correct
# classifications
rfecv = RFECV(estimator=svc, step=1, cv=StratifiedKFold(2),
scoring='accuracy')
rfecv.fit(X, Y)
print("Optimal number of features : %d" % rfecv.n_features_)
# Plot number of features VS. cross-validation scores
plt.figure()
plt.xlabel("Number of features selected")
plt.ylabel("Cross validation score (nb of correct classifications)")
plt.plot(range(1, len(rfecv.grid_scores_) + 1), rfecv.grid_scores_)
plt.show()
i = 0
namesdict = {}
for values in names:
namesdict[values] = rfecv.support_[i]
i +=1
print namesdict
def featurePCA(X,Y): #also tried this but I could not figure out how to name the features
# for feature extraction feature extraction
pca = PCA(n_components=4)
fit = pca.fit(X)
# summarize components
print("Explained Variance: %s") % fit.explained_variance_ratio_
print(fit.components_)
def featureETC(X,Y): #extra method learnt to select features
# feature extraction using bagged trees
namesdict = {}
model = ExtraTreesClassifier()
model.fit(X, Y)
#print(model.feature_importances_)
#print names
i = 0
for values in names:
namesdict[values] = model.feature_importances_[i]
i +=1
print namesdict
#==============================================================================
# featureETCoutput = {'salary': 0.041632257710185139, 'to_messages': 0.039859107742246484,\
# 'deferral_payments': 0.024074150367185947, \
# 'total_payments': 0.064555518959854619, \
# 'loan_advances': 0.0093116015112734048, \
# 'bonus': 0.073052292449863596, \
# 'restricted_stock_deferred': 0.0042245370370370353,\
# 'total_stock_value': 0.09369028839725424, \
# 'shared_receipt_with_poi': 0.071123487226284629, \
# 'long_term_incentive': 0.04046480320342271, \
# 'exercised_stock_options': 0.1129150917901071, \
# 'from_messages': 0.024095798427395669, \
# 'other': 0.054538316103403574, \
# 'from_poi_to_this_person': 0.036628178381556319, \
# 'from_this_person_to_poi': 0.052257500165677206, \
# 'deferred_income': 0.10772890822303453, \
# 'expenses': 0.10845624961908869,\
# 'restricted_stock': 0.038109225603169372, \
# 'director_fees': 0.0032826870819597218}
#==============================================================================
#the results from feature RFE selection recommended using , exercised stock value, total_stock_value, expenses
#===================================================================================#
features_list = ['poi','salary', 'total_stock_value', 'expenses', 'bonus']
# You will need to use more features + your feature
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
#===================================================================================#
##plotting for outlier removal
### Task 2: Remove outliers #done in .rmd file
data1 = featureFormat(data_dict, features_list)
def plotSalaryStock(data):
for point in data :
salary = point[1]
total_stock_value = point[2]
matplotlib.pyplot.scatter(salary, total_stock_value)
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("total_stock_value")
matplotlib.pyplot.show()
def plotSalaryExpenses(data):
for point in data :
salary = point[1]
expenses = point[3]
matplotlib.pyplot.scatter(salary, expenses)
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("expenses")
matplotlib.pyplot.show()
def plotStockExpenses(data):
for point in data :
total_stock_value = point[2]
expenses = point[3]
matplotlib.pyplot.scatter(total_stock_value, expenses)
matplotlib.pyplot.xlabel("total_stock_value")
matplotlib.pyplot.ylabel("expenses")
matplotlib.pyplot.show()
def plotSalaryBonus(data):
for point in data :
salary = point[1]
bonus = point[4]
matplotlib.pyplot.scatter(salary, bonus)
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("bonus")
matplotlib.pyplot.show()
plotSalaryStock(data1)
plotSalaryExpenses(data1)
plotStockExpenses(data1)
plotSalaryBonus(data1)
#outliers were noticed in data in salary, bonus and total stock value
#===================================================================================#
#outlier removal functions
def salaryOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["salary"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E":
if data_dict[keys]["salary"] > 2.5e7:
data_dict[keys]["salary"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["salary"] < 0:
data_dict[keys]["salary"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Salary outliers removed :", outlierlist, "\n"
def totalStockValueOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["total_stock_value"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E" :
if data_dict[keys]["total_stock_value"] > 4.0e8:
data_dict[keys]["total_stock_value"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["total_stock_value"] < 0:
data_dict[keys]["total_stock_value"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Total stock value outliers removed :",outlierlist, "\n"
def bonusOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["bonus"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E":
if data_dict[keys]["bonus"] > 0.8e8:
data_dict[keys]["bonus"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["bonus"]< 0:
data_dict[keys]["bonus"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Bonus outliers removed :" , outlierlist, "\n"
def expenseOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["expenses"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E":
if data_dict[keys]["expenses"] > 5.0e6:
data_dict[keys]["expenses"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["expenses"]< 0:
data_dict[keys]["expenses"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Expenses outliers removed ", outlierlist, "\n"
#removing salary outliers
#removing total_stock_value outliers
#removing bonus outliers
#removing expense outliers
salaryOut()
totalStockValueOut()
bonusOut()
expenseOut()
#===================================================================================#
#replotting removed outliers
print "Take a look at outlier removed graphs :"
#reassinging data to new mat for looking at plots from new modified dictionaires
data2 = featureFormat(data_dict, features_list)
#plotting to see the effect of having removed outliers
plotSalaryStock(data2)
plotSalaryExpenses(data2)
plotStockExpenses(data2)
plotSalaryBonus(data2)
### Task 3: Create new feature(s)
#===================================================================================#
#making the new feature
#Total number of "to emails" to this person - shared_receipt_with_poi - sent_by_poi_to_this_person.
#The lower this number greater the relative importance since it means that most \
#of the communication to this person involved conversation with a poi or in the \
#same conversation with a poi. The number of emails from other people to this \
#person was low.
def newFeature():
for keys in data_dict :
if data_dict[keys].get("to_messages") and data_dict[keys].get("shared_receipt_with_poi") and data_dict[keys].get("from_poi_to_this_person") != 'NaN' :
to = data_dict[keys].get("to_messages")
shared = data_dict[keys].get("shared_receipt_with_poi")
from_poi = data_dict[keys].get("from_poi_to_this_person")
data_dict[keys]["relative_importance"] = to - (shared + from_poi)
else:
data_dict[keys]["relative_importance"] = "NaN"
#print "relative_importance :", data_dict[keys].get("relative_importance")
#print enron_data[keys].get("relative_importance")
newFeature()
def extractRelativeImportance(): #funtion to see the values of relative importance
i = []
j = []
for keys in data_dict:
if data_dict[keys]["relative_importance"] != 'NaN':
i.append(data_dict[keys].get("relative_importance"))
j.append(keys)
else:
pass
return i, j
### Store to my_dataset for easy export below.
#enron_dataframe.insert(
my_dataset = data_dict
#including the new features in the feature list
features_list2 = ['poi','salary', 'total_stock_value', 'expenses', 'bonus', 'relative_importance']
### Extract features and labels from dataset for local testing
data3 = featureFormat(my_dataset, features_list2, sort_keys = True)
labels, features = targetFeatureSplit(data3)
## done for finding out number of features to use.
features_list4 = ["poi", "salary" , "to_messages" , "deferral_payments" , "total_payments" ,
"exercised_stock_options", "bonus" , "restricted_stock" , "shared_receipt_with_poi" ,
"restricted_stock_deferred", "total_stock_value", "expenses" , "loan_advances" ,
"from_messages" , "other" , "from_this_person_to_poi",
"director_fees" , "deferred_income" , "long_term_incentive",
"from_poi_to_this_person"]
data4 = featureFormat(my_dataset, features_list4, sort_keys = True)
labels4, features4 = targetFeatureSplit(data4)
labels4 = np.array(labels4)
features4 = np.array(features4)
#===================================================================================#
#trying the classifiers
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
# Provided to give you a starting point. Try a variety of classifiers.
def classifyNB():
from sklearn.naive_bayes import GaussianNB
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "NB Accuracy" , pres, "NB precision","\n"
return clf , acc, pres
def classifyDT():
from sklearn import tree
clf = tree.DecisionTreeClassifier(criterion='gini', splitter='best', \
max_depth=None, min_samples_split=2, \
min_samples_leaf=1, min_weight_fraction_leaf=0.0,\
max_features=None, random_state=8, \
max_leaf_nodes=None, min_impurity_split=1e-07,\
class_weight=None, presort=False)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "DT Accuracy" , pres, "DT precision", "\n"
return clf , acc, pres
def classifySVM():
from sklearn.svm import SVC
clf = SVC(C=0.9, cache_size=200, class_weight=None, coef0=0.0,
decision_function_shape=None, degree=3, gamma='auto', kernel='poly')
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "SVM Accuracy" , pres, "SVM precision" , "\n"
return clf , acc, pres
def classifyRF():
from sklearn.ensemble import RandomForestClassifier
clf = RandomForestClassifier(n_estimators=200)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "RF Accuracy" , pres, "RF precision" , "\n"
return clf , acc, pres
def classifyAB():
from sklearn.ensemble import AdaBoostClassifier
clf = AdaBoostClassifier(base_estimator=None, n_estimators=200, \
learning_rate=0.5, algorithm='SAMME', random_state=None)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "AB Accuracy" , pres, "AB precision"
scores = cross_val_score(clf,features_test,pred )
print scores.mean() , "AB Cross val score" , "\n"
return clf , acc, pres
#give the number of splits on the validation step
n_splits = 5
SSS = StratifiedShuffleSplit(n_splits, test_size=0.5, random_state=0)
SSS.get_n_splits(features, labels)
print SSS ,"\n"
print "performing a stratified shuffle split for the validation process \
to ensure that an equal ratio of POIs to non-POIs are present in the training and \
test set", "\n"
accuracyNB = []
precisionNB = []
accuracyRF = []
precisionRF = []
accuracyAB = []
precisionAB = []
accuracyDT = []
precisionDT = []
for train_index, test_index in SSS.split(features, labels):
print "=================================================================="
print "TRAIN:", train_index, "\n" "TEST:", test_index, "\n"
features = np.array(features)
labels = np.array(labels)
features_train, features_test = features[train_index], features[test_index]
labels_train, labels_test = labels[train_index], labels[test_index]
clf1, acc1, prec1 = classifyNB()
clf2, acc2, prec2 = classifyRF()
clf3, acc3, prec3 = classifyAB()
clf4, acc4, prec4 = classifyDT()
accuracyNB.append(acc1)
precisionNB.append(prec1)
accuracyRF.append(acc2)
precisionRF.append(prec2)
accuracyAB.append(acc3)
precisionAB.append(prec3)
accuracyDT.append(acc4)
precisionDT.append(prec4)
print "=================================================================="
print "average accuracy NB : " , float(sum(accuracyNB))/n_splits
print "precision accuracy NB : " , float(sum(precisionNB))/n_splits
print "average accuracy RF : " , float(sum(accuracyRF))/n_splits
print "precision accuracy RF : " , float(sum(precisionRF))/n_splits
print "average accuracy AB : " , float(sum(accuracyAB))/n_splits
print "precision accuracy AB : " , float(sum(precisionAB))/n_splits
print "average accuracy DT : " , float(sum(accuracyDT))/n_splits
print "precision accuracy DT : " , float(sum(precisionDT))/n_splits
#===================================================================================#
#exporting features
my_dataset = my_dataset
features_list = features_list
clf, acc, prec = classifyDT()
#clf = classifyAB()
#clf = classifySVM()
#clf = classifyDT()
dump_classifier_and_data(clf, my_dataset, features_list)
#==============================================================================
#==============================================================================
# GaussianNB(priors=None)
# Accuracy: 0.85629 Precision: 0.49612 Recall: 0.38400 F1: 0.43292 F2: 0.40218
# Total predictions: 14000 True positives: 768 False positives: 780 False negatives: 1232 True negatives: 11220
#
#==============================================================================
#==============================================================================
|
[
"pranay.valson@gmail.com"
] |
pranay.valson@gmail.com
|
6645d5a6dce03632ee27b25baedaec0596e6733e
|
4eb32b229dffcfc19cc28189e12602e63a3799cc
|
/tests/conftest.py
|
b4628e9431f31f66807cbb01daa476159a7e75b7
|
[
"MIT"
] |
permissive
|
karlneco/kanji-test-maker
|
4fc09e31a80fdb47e176ba7f69daaa36790dc412
|
6d3c855718971cb0061b4c238ebb8329328018bf
|
refs/heads/master
| 2023-03-31T00:45:59.830161
| 2023-03-27T04:29:10
| 2023-03-27T04:29:10
| 227,032,872
| 2
| 0
|
MIT
| 2023-02-15T23:08:52
| 2019-12-10T05:00:46
|
Python
|
UTF-8
|
Python
| false
| false
| 3,456
|
py
|
import pytest
from flask_login import login_user, login_manager, LoginManager
from hktm import create_app, db
from hktm.models import User, Lesson, LessonMaterial, MaterialType
@pytest.fixture()
def app():
app = create_app('test.cfg')
return app
@pytest.fixture()
def auth_user(app):
@app.login_manager.request_loader
def load_user_from_request(request):
return User.query.first()
@pytest.fixture(scope='module')
def test_client():
app = create_app('test.cfg')
client = app.test_client()
ctx = app.app_context()
ctx.push()
yield client
ctx.pop()
@pytest.fixture()
def init_database():
db.create_all()
db.session.commit()
yield db
db.drop_all()
@pytest.fixture()
def add_data(init_database):
material_type1 = MaterialType('KJTS','Kanji Test','Instruction for Kanji Test')
material_type2 = MaterialType('TRCP','Tracing','Instruction for Tracing')
material_type3 = MaterialType('KJWR','Kanji Writing','Instruction for Kanji Writing')
material_type4 = MaterialType('KJRD','Kanji Reading','Instruction for Kanji Reading')
material_type5 = MaterialType('NWRD','New Reading','Instruction for New Reading')
material_type6 = MaterialType('KT36','Kanji Test 3-6','Instruction for Kanji Test grades 3-6')
user1 = User('user1@gmail.com','password')
user1.grades = '1'
user2 = User('user26@gmail.com','password')
user2.grades = '2,6'
user3 = User('userempty@gmail.com','password')
user3.grades = '3'
user4 = User('usernoteset@gmail.com','password')
userAdmin = User('admin@hoshuko.com','password')
userAdmin.grades = 'A123456789'
lesson11 = Lesson('Grade 1 - Lesson 1','1')
lesson12 = Lesson('Grade 1 - Lesson 2','1')
lesson21 = Lesson('Grade 2 - Lesson 1','2')
lesson22 = Lesson('Grade 2 - Lesson 2','2')
lesson61 = Lesson('Grade 6 - Lesson 1','6')
db.session.add_all([material_type1,material_type2,material_type3,material_type4,material_type5,material_type6])
db.session.add_all([user1, user2, user3, user4, userAdmin])
db.session.add_all([lesson11, lesson12, lesson21, lesson22, lesson61])
db.session.commit()
lesson_mat_11_1 = LessonMaterial('Lesson 1 Material 1','something',lesson11.id,'KJTS')
db.session.add(lesson_mat_11_1)
db.session.commit()
@pytest.fixture()
def existing_user():
user = User('testuser@gmail.com','password')
user.grades = '1'
db.session.add(user)
db.session.commit()
@pytest.fixture
def authenticated_request(test_client):
user = User('testuser@gmail.com','password')
user.grades = '1'
db.session.add(user)
db.session.commit()
# with flask_app.test_request_context():
# yield login_user(User('testuser@gmail.com','password'))
######################################## fix for Live_server fixture and windows
# @pytest.fixture(scope="session")
# def flask_port():
# ## Ask OS for a free port.
# #
# with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
# s.bind(("", 0))
# addr = s.getsockname()
# port = addr[1]
# return port
#
# @pytest.fixture(scope="session", autouse=True)
# def live_server(flask_port):
# env = os.environ.copy()
# env["FLASK_APP"] = "main.py"
# server = subprocess.Popen(['flask', 'run', '--port', str(flask_port)], env=env)
# try:
# yield server
# finally:
# server.terminate()
|
[
"karl1112@gmail.com"
] |
karl1112@gmail.com
|
8402be75cce1ddbd62ff54e6ca1af746d547ba7e
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p04044/s043863854.py
|
77675a21e84977960d992326c6742602cc68d034
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 172
|
py
|
N, L = map(int,input().split())
word = []
count = 0
while N > count:
S = input()
word.append(S)
count += 1
word = sorted(word)
ans = ''.join(word)
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
37dafa17ed9dc319a258358248dd28b2bbf33390
|
ee8c4c954b7c1711899b6d2527bdb12b5c79c9be
|
/assessment2/amazon/run/core/controllers/old.py
|
b834b8f7e61106d74a5d8d14bccffde5155b3848
|
[] |
no_license
|
sqlconsult/byte
|
02ac9899aebea4475614969b594bfe2992ffe29a
|
548f6cb5038e927b54adca29caf02c981fdcecfc
|
refs/heads/master
| 2021-01-25T14:45:42.120220
| 2018-08-11T23:45:31
| 2018-08-11T23:45:31
| 117,135,069
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 360
|
py
|
#!/usr/bin/env python3
from flask import Blueprint, Flask, render_template, request, url_for
controller = Blueprint('old', __name__, url_prefix='/old')
# @controller.route('/<string:title>', methods=['GET'])
# def lookup(title):
# if title == 'Republic': # TODO 2
# return render_template('republic.html') # TODO 2
# else:
# pass
|
[
"sqlconsult@hotmail.com"
] |
sqlconsult@hotmail.com
|
d1fdbec314276a6ab8c6ad6bf9fb6fc536447263
|
09756a11e9c1e7c771a1ee7afaafff2bee28c9aa
|
/pyReadFileEx.py
|
30682bc1a8cadb34471a27fbe7b7c67e3103ea17
|
[] |
no_license
|
mgupte7/python-examples1
|
74c1038ce0973ea6c668adec064c64ad59341073
|
cfa903f3a6e021a408013f2fd45d3cb281a094ab
|
refs/heads/main
| 2023-08-27T10:06:24.681075
| 2021-11-11T02:56:28
| 2021-11-11T02:56:28
| 405,139,727
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 949
|
py
|
# ------------------------------------------------
# ----------------Python Read Files---------------
# ------------------------------------------------
# ex 1 - The open() function returns a file object, which has a read() method for reading the content of the file:
f = open("demofile.txt", "r")
print(f.read())
# ex 2 - Open a file on a different location:
f = open("D:\\myfiles\welcome.txt", "r")
print(f.read())
# ex 3 - Return the 5 first characters of the file:
f = open("demofile.txt", "r")
print(f.read(5))
# ex 4 - Read one line of the file:
f = open("demofile.txt", "r")
print(f.readline())
# ex 5 - Read two lines of the file:
f = open("demofile.txt", "r")
print(f.readline())
print(f.readline())
# ex 6 - Loop through the file line by line:
f = open("demofile.txt", "r")
for x in f:
print(x)
# ex 7 - Close the file when you are finish with it:
f = open("demofile.txt", "r")
print(f.readline())
f.close()
# ex 8
# ex 9
# ex 10
|
[
"noreply@github.com"
] |
noreply@github.com
|
db6db0e486babf550b51b49de237a138ddc8b6ff
|
960a8f1bec84563680271d10e2b9dfd296599d86
|
/python/kyu_6/replace_with_alphabet_position.py
|
d24aef52d6acd7655c9d3a2f654f25666d09e95a
|
[] |
no_license
|
Sqvall/codewars
|
42bcbfad99cd6c34fd3ec5fd903010d255e5d8e9
|
4f102d89ff86cd544eed362a232cbc1f4afea77c
|
refs/heads/master
| 2021-11-18T22:35:38.717604
| 2021-09-27T10:54:10
| 2021-09-27T10:54:10
| 204,555,403
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 606
|
py
|
""" https://www.codewars.com/kata/546f922b54af40e1e90001da """
from string import ascii_lowercase
def alphabet_position(text):
score = 1
score_dict = {}
for i in ascii_lowercase:
score_dict[i] = score
score += 1
t = text.lower()
out = ''
for i in t:
if i in score_dict:
out += repr(score_dict[i]) + " "
return out[0:-1]
print(alphabet_position("The sunset sets at twelve o' clock."))
# '20 8 5 19 21 14 19 5 20 19 5 20 19 1 20 20 23 5 12 22 5 15 3 12 15 3 11'
# '20 8 5 19 21 14 19 5 20 19 5 20 19 1 20 20 23 5 12 22 5 15 3 12 15 3 11'
|
[
"masster1987@inbox.ru"
] |
masster1987@inbox.ru
|
7c07de17053aa19697fa3b8ec39bfe5a624ff542
|
feca84ca57eee5045a1235006e472b4766ca3b06
|
/flaskapi/graphapp01.py
|
083c083606a6b6a7422cad37d931d98e190b6d4e
|
[] |
no_license
|
Chasbott182/mycode
|
1b89fa5d1d94a4ff39c8ce98b046189a2d9df10f
|
5afcd3cc32feee16e0fecc85a6165aaa01f60774
|
refs/heads/main
| 2023-07-14T17:06:34.618830
| 2021-08-13T20:46:59
| 2021-08-13T20:46:59
| 392,015,327
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,815
|
py
|
#!/usr/bin/python3
import numpy as np # number operations
import yaml # pyyaml for yaml
import re # regex
import paramiko # ssh into servers
from flask import Flask, render_template
import matplotlib.pyplot as plt
def sshlogin(ip, un, passw):
sshsession = paramiko.SSHClient()
sshsession.set_missing_host_key_policy(paramiko.AutoAddPolicy())
sshsession.connect(hostname=ip, username=un, password=passw)
ssh_stdin, ssh_stdout, ssh_stderr = sshsession.exec_command("cat /proc/uptime")
sshresult = ssh_stdout.read().decode('utf-8').split()[0]
with open("sshresult", "w") as myfile:
myfile.write(sshresult)
days = (int(float(sshresult)) / 86400) # convert uptime in sec to days
sshsession.close()
print(days)
return days
app = Flask(__name__)
@app.route("/graphin")
def graphin():
with open("C:\mycode\sshpass.yml") as sshpass: # creds for our servers
creds = yaml.load(sshpass)
svruptime = []
xtick = []
for cred in creds:
xtick.append(cred['ip'])
resp = sshlogin(cred['ip'], cred['un'], cred['passw'])
svruptime.append(resp)
xtick = tuple(xtick) # create a tuple
svruptime = tuple(svruptime)
# graphin
N = 2 # total number of bars
ind = np.arange(N) # the x locations for the groups
width = 0.35 # the width of the bars: can also be len(x) sequence
p1 = plt.bar(ind, svruptime, width)
plt.ylabel('Uptime in Days')
plt.title('Uptime of Servers in Days')
plt.xticks(ind, xtick)
plt.yticks(np.arange(0, 20, 1)) # prob want to turn this into a log scale
plt.savefig('static/status.png') # might want to save this with timestamp for history purposes
return render_template("graph.html")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=3000)
|
[
"chasbott182@gmail.com"
] |
chasbott182@gmail.com
|
3e1077c0c7f104ca8e5cff93ad223d48c668bb26
|
d04975c798ad43c8fadc8cf7b0b3742bbbdbc632
|
/dashboard/consumption/queries.py
|
402f2756edeaf320d860670f4f342dbda480aacc
|
[] |
no_license
|
yuki0417/smap-coding-challenge
|
331e99a980adcf561338ec539631c78e33b251d0
|
6041d895f6088d8d19c12cd0ec7a44c3bb15d04c
|
refs/heads/master
| 2023-03-13T14:48:44.443640
| 2021-03-02T13:07:59
| 2021-03-02T13:07:59
| 339,998,433
| 0
| 0
| null | 2021-03-02T13:08:00
| 2021-02-18T09:28:08
|
Python
|
UTF-8
|
Python
| false
| false
| 2,451
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Avg, Sum
from .models import Consumption, User
class ConsumptionQueryset():
"""queryset for consumption data
"""
def get_consumption_avg_and_sum():
"""get average and sum of consumption
Get average and sum of consumption all time
Returns:
QuerySet (dict): consumption data for each user
Raises:
Consumption.DoesNotExist: if Consumption is not found
"""
consum_data = Consumption.objects.values('datetime').annotate(
Avg('consumption'), Sum('consumption')).order_by('datetime')
if consum_data.exists():
return consum_data
else:
raise Consumption.DoesNotExist
def get_each_user_average_consumption():
"""get user's average consumption
Get user's average comsumption
Returns:
QuerySet (dict): consumption data for each user
Raises:
Consumption.DoesNotExist: if Consumption is not found
"""
consum_data = Consumption.objects.values('user_id').annotate(
Avg('consumption'))
if consum_data.exists():
return consum_data
else:
raise Consumption.DoesNotExist
def get_user_consumption_order_by_date(user_id):
"""get user's consumption data order by datetime
Get specified user's comsumption.
Args:
user_id (int): user id
Returns:
QuerySet (Consumption): consumption data for each user
Raises:
Consumption.DoesNotExist: if user is not found
"""
user = User.objects.get(id=user_id)
consum_data = Consumption.objects.filter(
user_id=user).order_by('datetime')
if consum_data.exists():
return consum_data
else:
raise Consumption.DoesNotExist
class UserQueryset():
"""queryset for user data
"""
def get_all_user():
"""get all user
Get All user data.
Args:
user (User object): user model object
Returns:
QuerySet (User): all user data
Raises:
User.DoesNotExist: if user is not found
"""
user_data = User.objects.all()
if user_data.exists():
return user_data
else:
raise User.DoesNotExist
|
[
"nicorasuster@gmail.com"
] |
nicorasuster@gmail.com
|
16ca7bb88839c3a6b72181762a2ffaa3aac44251
|
f4062989a2b3cab117ac18a1e5408b521f69bf76
|
/users/validators.py
|
d7e65c36f869f9ac0cf31fc4c5ebfb0141d88c2e
|
[] |
no_license
|
saeedhassan/CHQ_Backend
|
4e07bd5d95251e6bcd472d320e92f092d64bf071
|
3798a2b8c4e5bfbb4f4826ee22d9574316a18a90
|
refs/heads/main
| 2023-02-26T02:36:59.478206
| 2021-01-25T17:07:53
| 2021-01-25T17:07:53
| 467,846,247
| 1
| 0
| null | 2022-03-09T08:49:02
| 2022-03-09T08:49:02
| null |
UTF-8
|
Python
| false
| false
| 1,732
|
py
|
"""
Custom model validators
"""
import re
import django
import jsonschema
from django.core.exceptions import ValidationError
from django.core.validators import BaseValidator
from django.utils.translation import gettext_lazy as _
from users.CHQ_Scoring.github_score import CHQScore
from django.conf import settings
from users.utils import get_github_username
from users import news
class JSONSchemaValidator(BaseValidator):
"""validate json schemas against templates"""
def compare(self, input, schema):
try:
jsonschema.validate(input, schema)
except jsonschema.exceptions.ValidationError:
raise django.core.exceptions.ValidationError(
'%(value)s failed JSON schema check', params={'value': input})
def validate_no_news_source(value):
"""news needs to be part of news source"""
if value not in news.NEWS_SITES:
raise ValidationError(
_('%(value)s is not an available news source'),
params={'value': value},
)
def validate_github_url(value):
"""validate github profile"""
pattern = r'github.com\/[a-zA-Z0-9]+(?:-[a-zA-Z0-9]+)*\/?$'
if re.search(pattern, value) is None:
raise ValidationError(
_('%(value)s is not a valid github profile.'),
params={'value': value},
)
def validate_github_user(value):
"""make sure github user is an actual user, consumes one api call"""
user_name = get_github_username(value)
chq_score = CHQScore(settings.GITHUB_TOKEN)
if chq_score.check_user_exists(user_name) == False:
raise ValidationError(_('github username %(value)s doesnt exist'),
params={'value': user_name},)
|
[
"x.suwaidi@gmail.com"
] |
x.suwaidi@gmail.com
|
b003fe59cd7510a33a775dff69cf99d6c0c439fb
|
c21b64617d440a2b70b75ecdb756607044b1b797
|
/todo/views.py
|
c135c8223640593189972880eb382f5f623da91c
|
[
"Apache-2.0"
] |
permissive
|
foxy4096/DjangoToDo
|
cfe8d6151ba2f32f922777f7d4646dd11d6b36cb
|
f19d1a6d0d953354245cb4e3dedd2ad8710b4ec0
|
refs/heads/main
| 2023-09-02T03:34:33.144510
| 2021-11-12T10:21:17
| 2021-11-12T10:21:17
| 427,317,178
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 850
|
py
|
from django.views.generic import *
from django.shortcuts import redirect
from .models import ToDo
class ToDoListView(ListView):
model = ToDo
context_object_name = "todos"
class ToDoCreateView(CreateView):
model = ToDo
success_url = '/'
fields = '__all__'
def form_valid(self, form):
form.save()
return super().form_valid(form)
class ToDoUpdateView(UpdateView):
model = ToDo
success_url = '/'
fields = '__all__'
def form_valid(self, form):
form.save()
return super().form_valid(form)
class ToDoDeleteView(DeleteView):
model = ToDo
success_url = '/'
def get_context_data(self, **kwargs):
context = super(ToDoDeleteView, self).get_context_data(**kwargs)
context['todo'] = ToDo.objects.get(pk=self.object.pk)
return context
|
[
"54215788+foxy4096@users.noreply.github.com"
] |
54215788+foxy4096@users.noreply.github.com
|
34ffe8b08928d8766903d48e45e665ebdbb0834e
|
8f90482e83eaac3af8b8acf019a232ec92fc878d
|
/assignment1/q2_neural.py
|
5c6f31e1da37706d0dfe27371bfba3de63ff1230
|
[] |
no_license
|
aoussou/cs224n
|
72413ec35ad0c760127703da2ff0d6753d3a3559
|
1a27d8d359c920013424a5e3376fa734c20ac10c
|
refs/heads/master
| 2021-10-23T16:08:50.336982
| 2019-03-18T17:31:39
| 2019-03-18T17:31:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,627
|
py
|
#!/usr/bin/env python
import numpy as np
import random
from q1_softmax import softmax
from q2_sigmoid import sigmoid, sigmoid_grad
from q2_gradcheck import gradcheck_naive
def forward_backward_prop(data, labels, params, dimensions):
"""
Forward and backward propagation for a two-layer sigmoidal network
Compute the forward propagation and for the cross entropy cost,
and backward propagation for the gradients for all parameters.
Arguments:
data -- M x Dx matrix, where each row is a training example.
labels -- M x Dy matrix, where each row is a one-hot vector.
params -- Model parameters, these are unpacked for you.
dimensions -- A tuple of input dimension, number of hidden units
and output dimension
"""
### Unpack network parameters (do not modify)
ofs = 0
Dx, H, Dy = (dimensions[0], dimensions[1], dimensions[2])
W1 = np.reshape(params[ofs:ofs+ Dx * H], (Dx, H))
ofs += Dx * H
b1 = np.reshape(params[ofs:ofs + H], (1, H))
ofs += H
W2 = np.reshape(params[ofs:ofs + H * Dy], (H, Dy))
ofs += H * Dy
b2 = np.reshape(params[ofs:ofs + Dy], (1, Dy))
z1 = np.dot(data,W1) + b1
h = sigmoid(z1)
z2 = np.dot(h,W2) + b2
yhat = softmax(z2)
cost = -np.sum(labels*np.log(yhat))
d1 = yhat - labels
d2 = d1.dot(W2.T)
d3 = d2*sigmoid_grad(h)
gradW2 = np.dot(h.T,d1)
gradb2 = np.sum(d1,axis = 0)
gradW1 = np.dot(data.T,d3)
gradb1 = np.sum(d3,axis = 0)
### Stack gradients (do not modify)
grad = np.concatenate((gradW1.flatten(), gradb1.flatten(),
gradW2.flatten(), gradb2.flatten()))
return cost, grad
def sanity_check():
"""
Set up fake data and parameters for the neural network, and test using
gradcheck.
"""
print "Running sanity check..."
N = 20
dimensions = [10, 5, 10]
data = np.random.randn(N, dimensions[0]) # each row will be a datum
labels = np.zeros((N, dimensions[2]))
for i in xrange(N):
labels[i, random.randint(0,dimensions[2]-1)] = 1
params = np.random.randn((dimensions[0] + 1) * dimensions[1] + (
dimensions[1] + 1) * dimensions[2], )
gradcheck_naive(lambda params:
forward_backward_prop(data, labels, params, dimensions), params)
def your_sanity_checks():
"""
Use this space add any additional sanity checks by running:
python q2_neural.py
This function will not be called by the autograder, nor will
your additional tests be graded.
"""
if __name__ == "__main__":
sanity_check()
your_sanity_checks()
|
[
"noreply@github.com"
] |
noreply@github.com
|
b5f8da9a720fc87c6d511ba662b7005be382c8eb
|
da93b0746d5b12899c17db53839cd3055d6e8267
|
/bin/dcn2.py
|
111d73937abee3c2b859718d0ddab6fd1c745ffd
|
[
"Apache-2.0"
] |
permissive
|
mattmoehr/rtdl
|
01228261739dcc3cf3fb0e47f3a9d987c8e322eb
|
44cdf56fd958bc60609ae595911d272afa998d67
|
refs/heads/main
| 2023-08-08T10:05:01.482980
| 2021-09-02T14:33:13
| 2021-09-02T14:33:13
| 408,602,108
| 0
| 0
|
Apache-2.0
| 2021-09-20T21:14:04
| 2021-09-20T21:14:03
| null |
UTF-8
|
Python
| false
| false
| 8,536
|
py
|
# %%
import math
import typing as ty
from pathlib import Path
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import zero
import lib
# %%
class CrossLayer(nn.Module):
def __init__(self, d, dropout):
super().__init__()
self.linear = nn.Linear(d, d)
self.dropout = nn.Dropout(dropout)
def forward(self, x0, x):
return self.dropout(x0 * self.linear(x)) + x
class DCNv2(nn.Module):
def __init__(
self,
*,
d_in: int,
d: int,
n_hidden_layers: int,
n_cross_layers: int,
hidden_dropout: float,
cross_dropout: float,
d_out: int,
stacked: bool,
categories: ty.Optional[ty.List[int]],
d_embedding: int,
) -> None:
super().__init__()
if categories is not None:
d_in += len(categories) * d_embedding
category_offsets = torch.tensor([0] + categories[:-1]).cumsum(0)
self.register_buffer('category_offsets', category_offsets)
self.category_embeddings = nn.Embedding(sum(categories), d_embedding)
nn.init.kaiming_uniform_(self.category_embeddings.weight, a=math.sqrt(5))
print(f'{self.category_embeddings.weight.shape=}')
self.first_linear = nn.Linear(d_in, d)
self.last_linear = nn.Linear(d if stacked else 2 * d, d_out)
deep_layers = sum(
[
[nn.Linear(d, d), nn.ReLU(True), nn.Dropout(hidden_dropout)]
for _ in range(n_hidden_layers)
],
[],
)
cross_layers = [CrossLayer(d, cross_dropout) for _ in range(n_cross_layers)]
self.deep_layers = nn.Sequential(*deep_layers)
self.cross_layers = nn.ModuleList(cross_layers)
self.stacked = stacked
def forward(self, x_num, x_cat):
if x_cat is not None:
x_cat = self.category_embeddings(x_cat + self.category_offsets[None])
x = torch.cat([x_num, x_cat.view(x_cat.size(0), -1)], dim=-1)
else:
x = x_num
x = self.first_linear(x)
x_cross = x
for cross_layer in self.cross_layers:
x_cross = cross_layer(x, x_cross)
if self.stacked:
return self.last_linear(self.deep_layers(x_cross)).squeeze(1)
else:
return self.last_linear(
torch.cat([x_cross, self.deep_layers(x)], dim=1)
).squeeze(1)
# %%
args, output = lib.load_config()
# %%
zero.set_randomness(args['seed'])
dataset_dir = lib.get_path(args['data']['path'])
stats: ty.Dict[str, ty.Any] = {
'dataset': dataset_dir.name,
'algorithm': Path(__file__).stem,
**lib.load_json(output / 'stats.json'),
}
timer = zero.Timer()
timer.run()
D = lib.Dataset.from_dir(dataset_dir)
X = D.build_X(
normalization=args['data'].get('normalization'),
num_nan_policy='mean',
cat_nan_policy='new',
cat_policy=args['data'].get('cat_policy', 'counter'),
cat_min_frequency=args['data'].get('cat_min_frequency', 0.0),
seed=args['seed'],
)
if not isinstance(X, tuple):
X = (X, None)
zero.set_randomness(args['seed'])
Y, y_info = D.build_y(args['data'].get('y_policy'))
lib.dump_pickle(y_info, output / 'y_info.pickle')
X = tuple(None if x is None else lib.to_tensors(x) for x in X)
Y = lib.to_tensors(Y)
device = lib.get_device()
if device.type != 'cpu':
X = tuple(None if x is None else {k: v.to(device) for k, v in x.items()} for x in X)
Y_device = {k: v.to(device) for k, v in Y.items()}
else:
Y_device = Y
X_num, X_cat = X
if not D.is_multiclass:
Y_device = {k: v.float() for k, v in Y_device.items()}
train_size = len(X_num[lib.TRAIN])
batch_size = args['training']['batch_size']
epoch_size = stats['epoch_size'] = math.ceil(train_size / batch_size)
loss_fn = (
F.binary_cross_entropy_with_logits
if D.is_binclass
else F.cross_entropy
if D.is_multiclass
else F.mse_loss
)
args['model'].setdefault('d_embedding', None)
model = DCNv2(
d_in=X_num[lib.TRAIN].shape[1],
d_out=D.info['n_classes'] if D.is_multiclass else 1,
categories=lib.get_categories(X_cat),
**args['model'],
).to(device)
stats['n_parameters'] = lib.get_n_parameters(model)
optimizer = lib.make_optimizer(
args['training']['optimizer'],
model.parameters(),
args['training']['lr'],
args['training']['weight_decay'],
)
stream = zero.Stream(lib.IndexLoader(train_size, batch_size, True, device))
progress = zero.ProgressTracker(args['training']['patience'])
training_log = {lib.TRAIN: [], lib.VAL: [], lib.TEST: []}
timer = zero.Timer()
checkpoint_path = output / 'checkpoint.pt'
def print_epoch_info():
print(f'\n>>> Epoch {stream.epoch} | {lib.format_seconds(timer())} | {output}')
print(
' | '.join(
f'{k} = {v}'
for k, v in {
'lr': lib.get_lr(optimizer),
'batch_size': batch_size,
'epoch_size': stats['epoch_size'],
'n_parameters': stats['n_parameters'],
}.items()
)
)
@torch.no_grad()
def evaluate(parts):
model.eval()
metrics = {}
predictions = {}
for part in parts:
predictions[part] = (
torch.cat(
[
model(X_num[part][idx], None if X_cat is None else X_cat[part][idx])
for idx in lib.IndexLoader(
len(X_num[part]),
args['training']['eval_batch_size'],
False,
device,
)
]
)
.cpu()
.numpy()
)
try:
metrics[part] = lib.calculate_metrics(
D.info['task_type'],
Y[part].numpy(), # type: ignore[code]
predictions[part], # type: ignore[code]
'logits',
y_info,
)
except ValueError as err:
assert (
'Target scores need to be probabilities for multiclass roc_auc'
in str(err)
)
metrics[part] = {'score': -999999999.0}
for part, part_metrics in metrics.items():
print(f'[{part:<5}]', lib.make_summary(part_metrics))
return metrics, predictions
def save_checkpoint(final):
torch.save(
{
'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'stream': stream.state_dict(),
'random_state': zero.get_random_state(),
**{
x: globals()[x]
for x in [
'progress',
'stats',
'timer',
'training_log',
]
},
},
checkpoint_path,
)
lib.dump_stats(stats, output, final)
lib.backup_output(output)
# %%
timer.run()
for epoch in stream.epochs(args['training']['n_epochs']):
print_epoch_info()
model.train()
epoch_losses = []
for batch_idx in epoch:
optimizer.zero_grad()
loss = loss_fn(
model(
X_num[lib.TRAIN][batch_idx],
None if X_cat is None else X_cat[lib.TRAIN][batch_idx],
),
Y_device[lib.TRAIN][batch_idx],
)
if loss.isnan():
print('Loss is nan!')
break
loss.backward()
optimizer.step()
epoch_losses.append(loss.detach())
if loss.isnan():
break
epoch_losses = torch.stack(epoch_losses).tolist()
training_log[lib.TRAIN].extend(epoch_losses)
print(f'[{lib.TRAIN}] loss = {round(sum(epoch_losses) / len(epoch_losses), 3)}')
metrics, predictions = evaluate(lib.PARTS)
for k, v in metrics.items():
training_log[k].append(v)
progress.update(metrics[lib.VAL]['score'])
if progress.success:
print('New best epoch!')
stats['best_epoch'] = stream.epoch
stats['metrics'] = metrics
save_checkpoint(False)
for k, v in predictions.items():
np.save(output / f'p_{k}.npy', v)
elif progress.fail:
break
# %%
print('\nRunning the final evaluation...')
model.load_state_dict(torch.load(checkpoint_path)['model'])
stats['metrics'], predictions = evaluate(lib.PARTS)
for k, v in predictions.items():
np.save(output / f'p_{k}.npy', v)
stats['time'] = lib.format_seconds(timer())
save_checkpoint(True)
print('Done!')
|
[
"strausmg@gmail.com"
] |
strausmg@gmail.com
|
64f1c7bd8f0f8bab932d8e95efb828f317b84145
|
50008b3b7fb7e14f793e92f5b27bf302112a3cb4
|
/recipes/Python/438806_catenateFilesFactory/recipe-438806.py
|
59a8c77281e6b148e80c4e006fc5987455451ecf
|
[
"Python-2.0",
"MIT"
] |
permissive
|
betty29/code-1
|
db56807e19ac9cfe711b41d475a322c168cfdca6
|
d097ca0ad6a6aee2180d32dce6a3322621f655fd
|
refs/heads/master
| 2023-03-14T08:15:47.492844
| 2021-02-24T15:39:59
| 2021-02-24T15:39:59
| 341,878,663
| 0
| 0
|
MIT
| 2021-02-24T15:40:00
| 2021-02-24T11:31:15
|
Python
|
UTF-8
|
Python
| false
| false
| 2,449
|
py
|
import os
def catenateFilesFactory(isTextFiles=True, isClearTgt=True, isCreateTgt=True):
"""return a catenateFiles function parameterized by the factory arguments.
isTextFiles: Catenate text files. If the last line of a non-empty file
is not terminated by an EOL, append an EOL to it.
isClearTgt If the target file already exists, clear its original
contents before appending the source files.
isCreateTgt If the target file does not already exist, and this
parameter is True, create the target file; otherwise raise
an IOError.
"""
eol = os.linesep
lenEol = len(eol)
def catenateFiles(tgtFile, *srcFiles):
isTgtAppendEol = False
if os.path.isfile(tgtFile):
if isClearTgt:
tgt = open(tgtFile, 'wb')
tgt.close()
elif isTextFiles:
tgt = open(tgtFile, 'rb')
data = tgt.read()
tgt.close()
if len(data) and (len(data) < lenEol or data[-lenEol:] != eol):
isTgtAppendEol = True
elif not isCreateTgt:
raise IOError, "catenateFiles target file '%s' not found" % (
tgtFile)
tgt = open(tgtFile, 'ab')
if isTgtAppendEol:
tgt.write(eol)
for srcFile in srcFiles:
src = open(srcFile, 'rb')
data = src.read()
src.close()
tgt.write(data)
if (isTextFiles and len(data) and
(len(data) < lenEol or data[-lenEol:] != eol)):
tgt.write(eol)
tgt.close()
return
# Support reflection and doc string.
catenateFiles.isTextFiles = isTextFiles
catenateFiles.isClearTgt = isClearTgt
catenateFiles.isCreateTgt = isCreateTgt
if isTextFiles:
docFileType = "text"
else:
docFileType = "binary"
if isCreateTgt:
docCreate = "Create tgtFile if it does not already exist."
else:
docCreate = "Require that tgtFile already exists."
if isClearTgt:
docClear = "replace"
else:
docClear = "append to"
catenateFiles.__doc__ = """Catenate %s srcFiles to %s the tgtFile.
%s
All of the srcFiles must exist; otherwise raise an IOError.
""" % (docFileType, docClear, docCreate)
return catenateFiles
|
[
"betty@qburst.com"
] |
betty@qburst.com
|
a799262d26ff055133010df02e61e893ddf72751
|
97ebc5e257d68eb9f46551d1897f3901e4faca41
|
/poketypes/basic.py
|
3037cd29cb0fec3108555337432f07f2ec491987
|
[
"BSD-2-Clause"
] |
permissive
|
GrenderG/PokeDuino
|
16f6ab982c447d15ff5ed9ec55a9fd35551d3fcd
|
5b2d7f8fa64edb52d68b647e723f513405b391d4
|
refs/heads/master
| 2021-06-01T02:33:46.419676
| 2016-06-29T09:57:47
| 2016-06-29T09:57:47
| 285,349,854
| 1
| 0
|
NOASSERTION
| 2020-08-05T16:44:29
| 2020-08-05T16:44:28
| null |
UTF-8
|
Python
| false
| false
| 1,941
|
py
|
import ctypes
from . import encoding
class PokeMetaStructure(type(ctypes.BigEndianStructure)):
def __new__(metacls, name, bases, dct):
cls = super().__new__(metacls, name, bases, dct)
for member, adapter_type in cls._adapters_:
cls.buildProperty(member, adapter_type)
return cls
def buildProperty(cls, member, adapter_type):
def get(self):
return adapter_type(getattr(self, member))
def set(self, value):
if isinstance(value, adapter_type):
setattr(self, member, value.value)
else:
setattr(self, member, value)
if member.startswith("_"):
property_name = member[1:]
else:
property_name = member + "_adapter"
setattr(cls, property_name,
property(fget=get, fset=set, doc="%s adapter to member %s" % (adapter_type.__name__, member)))
class PokeStructure(ctypes.BigEndianStructure, metaclass=PokeMetaStructure):
_pack_ = 1
_adapters_ = []
@classmethod
def fromBytes(cls, data):
return cls.from_buffer_copy(data)
def bytes(self):
return ctypes.string_at(ctypes.byref(self), ctypes.sizeof(self))
def bytecount(self):
return ctypes.sizeof(self)
def Pokearray(length):
# okay, I learned.
# It's not possible to use a custom base class
# in a ctypes.Structure field. Forget about it
@classmethod
def fromBytes(cls, data):
return cls(*data)
def asBytes(self):
return bytes(iter(self))
t = ctypes.c_uint8 * length
t.fromBytes = fromBytes
t.bytes = asBytes
return t
def Pokestring(length):
@classmethod
def fromString(cls, data, fillUp=False):
encoded = encoding.encode(data) + encoding.ENDCHAR
if fillUp:
encoded = encoded.ljust(length, encoding.ENDCHAR)
return cls(*encoded[:length])
def toString(self):
encoded = self.bytes().partition(encoding.ENDCHAR)[0]
return encoding.decode(encoded)
t = Pokearray(length)
t.fromString = fromString
t.toString = toString
return t
def PaddingBytes(length):
return ("padding", Pokearray(length))
|
[
"sven.koehler@student.hpi.uni-potsdam.de"
] |
sven.koehler@student.hpi.uni-potsdam.de
|
0384ff350e0c11acd5694d601dd75ef9f8c1794b
|
0e0254ead600d156e96b6a3a814806156f0d807d
|
/users/urls.py
|
a54f169d91f50903e4223765ed8dae5f79e9e341
|
[] |
no_license
|
catding/trainserver
|
1ade4b9b53995ba93d290f19d74c7981ecc1a9bf
|
3d0dbcf92642fe42293736f8bdf812d43e0cb67b
|
refs/heads/master
| 2022-04-08T05:03:33.773819
| 2020-03-07T13:13:08
| 2020-03-07T13:13:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,611
|
py
|
from django.urls import include, path, re_path
from rest_framework import routers
from .api import (
LoginView, LogoutView, AccountDetailView, PasswordChangeView,
PasswordResetView, PasswordResetConfirmView, ExcelfileUploadView, UserAvatarView, UserView
)
router = routers.SimpleRouter(trailing_slash=False)
router.register(r'user', UserView)
urlpatterns = [
# URLs that do not require a session or valid token
path('auth/password/reset', PasswordResetView.as_view(),
name='user_password_reset'),
path('auth/password/reset/confirm', PasswordResetConfirmView.as_view(),
name='user_password_reset_confirm'),
path('auth/password/change', PasswordChangeView.as_view(),
name='user_password_change'),
path('auth/login', LoginView.as_view(), name='user_login'),
# URLs that require a user to be logged in with a valid session / token.
path('auth/logout', LogoutView.as_view(), name='auth_logout'),
path('account/info', AccountDetailView.as_view(), name='account_details'),
path('account/avatar', UserAvatarView.as_view(), name='account_avatar'),
# path('user/list/trainmanager', UserListView.as_view(), name='user_list'),
path('user/upload', ExcelfileUploadView.as_view(), name='user_upload'),
# path('user/list/trainmanager', UserListView.as_view({'get': 'list'}), name='user_trainmanagerlist'),
# path('user/list', UserListView.as_view({'get': 'list', 'patch': 'bulkdelete'}), name='user_list'),
# path('user/list/<str:roles>', UserListView.as_view({'get': 'list'}), name='user_listrole'),
]
urlpatterns += router.urls
|
[
"36583983@qq.com"
] |
36583983@qq.com
|
b4c799535b5e0995f3c1d6c81b0b0eaede036e40
|
19200bc6b36e33f9dec1394b90ff41272601b16d
|
/gistApi/asgi.py
|
d22173ac28e314752aaea0d52dc74a9b1c28f963
|
[] |
no_license
|
AnoshaRehan/github-gist-api-django
|
103c8f0656899cd6cf068ea7945f29a51eb90e56
|
4c0a97f7eebca276de050ae071aabe40331806e1
|
refs/heads/main
| 2023-03-02T09:48:38.647915
| 2021-02-14T08:19:04
| 2021-02-14T08:19:04
| 338,624,345
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 391
|
py
|
"""
ASGI config for gistApi project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gistApi.settings')
application = get_asgi_application()
|
[
"anosha.rehan@lovefordata.com"
] |
anosha.rehan@lovefordata.com
|
5f2b378d006e7aa2e46251661e0d4e03d3b9810f
|
d452e34253561a47b974e260dabd8dcda6e750a2
|
/supervised_learning/0x0B-face_verification/5-main.py
|
0859b3e7ecf4dc518afbab30ba555f77a521f265
|
[] |
no_license
|
JohnCook17/holbertonschool-machine_learning
|
57fcb5b9d351826c3e3d5478b3b4fbe16cdfac9f
|
4200798bdbbe828db94e5585b62a595e3a96c3e6
|
refs/heads/master
| 2021-07-07T10:16:21.583107
| 2021-04-11T20:38:33
| 2021-04-11T20:38:33
| 255,424,823
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 554
|
py
|
#!/usr/bin/env python3
from align import FaceAlign
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
from matplotlib.patches import Circle
import numpy as np
fa = FaceAlign('models/landmarks.dat')
test_img = mpimg.imread('HBTN/KirenSrinivasan.jpg')
anchors = np.array([[0.194157, 0.16926692], [0.7888591, 0.15817115], [0.4949509, 0.5144414]], dtype=np.float32)
aligned = fa.align(test_img, np.array([36, 45, 33]), anchors, 96)
plt.imshow(aligned)
ax = plt.gca()
for anchor in anchors:
ax.add_patch(Circle(anchor * 96, 1))
plt.show()
|
[
"jcook0017@gmail.com"
] |
jcook0017@gmail.com
|
323ae2986aeb577ac7207ed8bc111206556ec27d
|
fbf7929ede740a416362e40b3b0d44d2b823c14e
|
/distinct_occure_helpers.py
|
f1b794b0ee5a00fe9dcf5314be1f2033a3856710
|
[
"MIT"
] |
permissive
|
martynaut/mirnaome_somatic_mutations
|
f588545c57871c0125656445cc66198f04c98895
|
b7e332d56ee17c0b54969db8e515001bf23300f8
|
refs/heads/master
| 2020-04-24T22:33:20.446928
| 2019-08-08T20:14:56
| 2019-08-08T20:14:56
| 172,315,673
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,968
|
py
|
import numpy as np
def set_balance(row, ratio):
if row['ratio 3/(3+5)'] >= ratio:
return '3p'
elif row['ratio 5/(3+5)'] >= ratio:
return '5p'
elif np.isnan(row['reads_3p']) and np.isnan(row['reads_5p']):
return 'unknown'
elif np.isnan(row['reads_3p']):
return '5p'
elif np.isnan(row['reads_5p']):
return '3p'
else:
return 'both'
def find_in_mirna(row, df_loc):
if df_loc[
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['orientation'] == row['orient_loc']) &
(df_loc['stop'] >= row['pos'])].shape[0] != 0:
temp = df_loc[
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['orientation'] == row['orient_loc']) &
(df_loc['stop'] >= row['pos'])].values[0]
if row['orient_loc'] == '+':
start = row['pos'] - temp[2] + 1
stop = row['pos'] - temp[3] - 1
else:
start = -(row['pos'] - temp[3] - 1)
stop = -(row['pos'] - temp[2] + 1)
localiz = [start, stop]
else:
localiz = [np.nan,
np.nan]
return localiz
def find_arm(row):
if row['-/+'] == '+':
if row['start'] - row['start_pre'] < row['stop_pre'] - row['stop']:
return '5p'
else:
return '3p'
if row['-/+'] == '-':
if row['start'] - row['start_pre'] < row['stop_pre'] - row['stop']:
return '3p'
else:
return '5p'
def from_start(row, column_start, column_stop):
if row['orient_loc'] == '+':
return row['pos'] - row[column_start] + 1
else:
return row[column_stop] - row['pos'] + 1
def from_end(row, column_stop, column_start):
if row['orient_loc'] == '+':
return row['pos'] - row[column_stop] - 1
else:
return row[column_start] - row['pos'] - 1
def find_localization(row, df_loc):
# fix values that were not in reference
if row['name'].lower() == 'hsa-mir-4477b' and \
row['start'] == 63819560 and \
row['stop'] == 63819669:
row['Strand'] = '+'
elif row['name'].lower() == 'hsa-mir-6723':
row['Strand'] = '-'
elif row['name'].lower() == 'hsa-mir-3656':
row['Strand'] = '+'
if (type(row['Strand']) != str and
df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos'])].shape[0] != 0):
localiz = df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos'])].values[0]
elif df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos']) &
(df_loc['orientation'] == row['Strand'])].shape[0] != 0:
localiz = df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos']) &
(df_loc['orientation'] == row['Strand'])].values[0]
else:
localiz = [np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
np.nan]
return localiz
def if_complex(row, complex_df):
if complex_df[(complex_df['chrom'] == row['chrom']) &
(complex_df['start'] == row['start']) &
(complex_df['stop'] == row['stop']) &
(complex_df['gene'] == row['gene']) &
(complex_df['seq_type'] == row['seq_type'])].shape[0] != 0:
values = complex_df[(complex_df['chrom'] == row['chrom']) &
(complex_df['start'] == row['start']) &
(complex_df['stop'] == row['stop']) &
(complex_df['gene'] == row['gene']) &
(complex_df['seq_type'] == row['seq_type'])]['complex'].unique()
if 1 in values:
return 1
else:
return 0
else:
return 0
def concat_ints(col):
row = list(col.values)
new_row = []
for x in row:
new_row.append(str(x))
return '"' + ':'.join(new_row) + '"'
def concat_alg(col):
row = list(col.values)
new_row = []
for x in row:
new_row.append(str(x))
new_row = sorted(set(new_row))
return '"' + ':'.join(new_row) + '"'
def type_of_mutation(row):
if len(row['ref']) > len(row['alt']):
return 'del'
elif len(row['ref']) == len(row['alt']):
return 'subst'
elif ',' in row['alt']:
return 'subst'
else:
return 'ins'
def take_from_coord(coordinates, column_name, row):
return coordinates[(coordinates['chr'] == row['chrom']) &
(coordinates['start'] < int(row['pos'])) &
(coordinates['stop'] > int(row['pos']))][column_name].values[0]
def seq_type(value, list_df):
if 'hsa-' in value:
return 'mirna'
elif value in list_df:
return 'cancer_exome'
else:
return 'not_defined'
def subst_type(row):
if row['mutation_type'] == 'subst':
if (((row['ref'] in ['A', 'G']) and (row['alt'] in ['A', 'G'])) or
((row['ref'] in ['C', 'T']) and (row['alt'] in ['C', 'T']))):
return 'transition'
else:
return 'transversion'
else:
return 'n.a.'
|
[
"martyna.urbanek@gmail.com"
] |
martyna.urbanek@gmail.com
|
d5b8181aea069440370a3630b8e9b4e47a43870f
|
72c9e235b19b80d9e332c1f19e1c4e1e28ff2cac
|
/craw/spider/LJ_parser.py
|
c2d8b50614943d46663ac37ac69ad4d2e39a7455
|
[] |
no_license
|
VinceLim68/python-craw
|
56a1299b4e3ac55a3690946a6f4ff8c2c1ef5b04
|
37d1570ee10d080e55b5b8cf885b7e7a3b00c81d
|
refs/heads/master
| 2021-01-20T06:12:10.753799
| 2017-10-28T13:56:30
| 2017-10-28T13:56:30
| 89,851,102
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,521
|
py
|
#coding:utf-8
from html_parser import HtmlParser
import mytools
import urllib2
from bs4 import BeautifulSoup
import re
class LjParser(HtmlParser):
# 这些是2016.11.17重写的,原来那个出现了解析错误
def _get_new_datas(self,soup):
page_datas = []
titles = soup.select("div.title > a")
houseinfo = soup.select("div.houseInfo")
positionInfo = soup.select("div.positionInfo")
totalprices = soup.select("div.totalPrice")
for title,info,position,totalPrice in zip(titles,houseinfo,positionInfo,totalprices):
# each_data有些需要设置初始值
each_data = {'builded_year':0,'spatial_arrangement':'','floor_index':0,'total_floor':0}
each_data['title'] = title.get_text()
each_data['details_url'] = title.get('href')
each_data['total_price'] = int(round(float(re.search('(\d+.?\d+)万'.decode('utf8'),totalPrice.get_text()).groups(0)[0]),0))
info_item = (info.get_text().split('|'))
each_data['community_name'] = info_item[0].strip() # 第1个总是小区名称
for i in range(1,len(info_item)):
d1 = {}
d1 = self.parse_item(info_item[i].strip())
if d1.has_key('advantage') and each_data.has_key('advantage'):
d1['advantage'] = each_data['advantage'] + ',' + d1['advantage']
each_data = dict(each_data, **d1)
position = position.get_text().replace('\t','').replace('\n','').split()
each_data['block'] = position[-1]
if ')' not in position[0]: #链前的别墅会用'4层2008年建'的形式,加入')',以便分隔
position[0] = position[0].replace('层', '层)')
for item in position[0].split(')'): #2017.4.1链家格式有改
d1 = {}
# d1 = self.parse_item(position[i].strip())
d1 = self.parse_item(item.strip()) #2017.4.1链家格式有改
each_data = dict(each_data, **d1)
each_data['price'] = float(each_data['total_price']*10000/each_data['area'])
each_data['from'] = "lianjia"
each_data = self.pipe(each_data)
if each_data.has_key('total_floor') and each_data.has_key('total_price') and each_data.has_key('area') and each_data.has_key('community_name'):
page_datas.append(each_data)
else:
if mytools.ShowInvalideData(each_data):page_datas.append(each_data)
return page_datas
def _get_new_urls(self , soup):
new_urls = set()
# links = soup.select("div.page-box")
links = soup.select("div.house-lst-page-box") #2016.11.11修改,网页改了
if len(links) == 0 :
print "Only 1 page!!"
else:
t_page = eval(links[0].get('page-data'))['totalPage']
url = links[0].get('page-url')
for i in range(1,t_page+1):
new_urls.add("http://xm.lianjia.com" + url.replace("{page}",str(i)))
return new_urls
def _ischeck(self,soup):
# 判断是否是验证界面
ischeck = soup.select("title")
if len(ischeck) > 0: #如果找不到title,就认为不是验证界面
iscode = ischeck[0].get_text().strip() == "验证异常流量-链家网"
else:
iscode = False
return iscode
|
[
"1500725439@qq.com"
] |
1500725439@qq.com
|
ce9f9a67e6de02ba78da7819553047f0a148bb68
|
9bcb054fa53e1ff749f2ba06c7477b60c0a1dccf
|
/internet connection check.py
|
025e54ba14c6fdfcd2dcc55e5a6b72e06dbf6956
|
[] |
no_license
|
thatscoding/Hacktoberfest-2023
|
e67594420d39340095dde77e02bcf20353a5f645
|
40cfc950487e040b192a304a9d387dda2988845b
|
refs/heads/main
| 2023-08-28T07:28:41.110272
| 2021-10-30T11:48:00
| 2021-10-30T11:48:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 596
|
py
|
import requests
from requests.exceptions import ConnectionError
def internet_connection_test():
url = 'https://www.google.com/'
print(f'Attempting to connect to {url} to determine internet connection status.')
try:
print(url)
resp = requests.get(url, timeout = 10)
resp.text
resp.status_code
print(f'Connection to {url} was successful.')
return True
except ConnectionError as e:
requests.ConnectionError
print(f'Failed to connect to {url}.')
return False
except:
print(f'Failed with unparsed reason.')
return False
internet_connection_test()
|
[
"noreply@github.com"
] |
noreply@github.com
|
7508ed13cb989f8e06150d4a366684e8cb626f4c
|
890c8b8e90e516a5a3880eca9b2d217662fe7d84
|
/armulator/armv6/opcodes/abstract_opcodes/usad8.py
|
6568222b03d6464463dd16b171bf86a89484d155
|
[
"MIT"
] |
permissive
|
doronz88/armulator
|
b864135996f876c7857b79a314d4aa06cc19c549
|
0294feac2785c8947e5943ac0c34f941ee4b5fff
|
refs/heads/master
| 2022-11-05T08:14:42.405335
| 2020-06-18T23:53:17
| 2020-06-18T23:53:17
| 273,363,061
| 2
| 0
| null | 2020-06-18T23:51:03
| 2020-06-18T23:51:02
| null |
UTF-8
|
Python
| false
| false
| 1,008
|
py
|
from armulator.armv6.opcodes.abstract_opcode import AbstractOpcode
from bitstring import BitArray
class Usad8(AbstractOpcode):
def __init__(self, m, d, n):
super(Usad8, self).__init__()
self.m = m
self.d = d
self.n = n
def execute(self, processor):
if processor.condition_passed():
absdiff1 = abs(
processor.registers.get(self.n)[24:32].uint - processor.registers.get(self.m)[24:32].uint)
absdiff2 = abs(
processor.registers.get(self.n)[16:24].uint - processor.registers.get(self.m)[16:24].uint)
absdiff3 = abs(
processor.registers.get(self.n)[8:16].uint - processor.registers.get(self.m)[8:16].uint)
absdiff4 = abs(
processor.registers.get(self.n)[0:8].uint - processor.registers.get(self.m)[0:8].uint)
result = absdiff1 + absdiff2 + absdiff3 + absdiff4
processor.registers.set(self.d, BitArray(uint=result, length=32))
|
[
"matan1008@gmail.com"
] |
matan1008@gmail.com
|
05a92429c41bbe7dfce417ff86c07367317dc447
|
973c2a5fd8c37497e91487a1cbc34a489a0d0108
|
/bin/home.py
|
b6cc7a8204c700706c6dd305f0fba2869e72ce26
|
[] |
no_license
|
BBATools/PWCode
|
24d3b730cf06b2ee2aa5edb59ea806c2a62b1ea3
|
6d2696962d65a799359395ee5528766cec5e0d13
|
refs/heads/master
| 2021-02-15T04:34:38.025770
| 2020-08-18T11:45:33
| 2020-08-18T11:45:33
| 244,863,145
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 25,970
|
py
|
# MIT License
# Original work Copyright (c) 2018 François Girault
# Modified work Copyright 2020 Morten Eek
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# from console import ConsoleUi, Processing
from common.xml_settings import XMLSettings
import inspect
import commands
import os
import webbrowser
import pickle
import shutil
import tkinter as tk
from tkinter import ttk
# from tkinter import filedialog
from settings import COLORS
from gui.dialog import multi_open
import pathlib
class HomeTab(ttk.Frame):
def __init__(self, parent, app):
super().__init__(parent, style="Home.TFrame", padding=[56, 12, 8, 8])
self.heading = ttk.Label(self, text=app.settings.name, style="Heading.TLabel")
self.heading.pack(side=tk.TOP, anchor=tk.W)
global subsystem_frames
subsystem_frames = []
self.system_dir = None
self.project_dir_created = False
frame = ttk.Frame(self, style="Home.TFrame")
frame.pack(fill=tk.BOTH, expand=1, pady=12)
frame.pack_propagate(False)
self.left_frame = ttk.Frame(frame, style="Home.TFrame")
self.left_frame.pack(side=tk.LEFT, fill=tk.BOTH, expand=1)
self.right_frame = ttk.Frame(frame, style="Home.TFrame")
self.right_frame.pack(side=tk.RIGHT, fill=tk.BOTH, expand=1, padx=(0, 56))
self.show_start(app)
self.show_help(app)
# self.folder_list = LinksFrame(self)
# self.folder_list.pack(side=tk.TOP, anchor=tk.N, padx=(8, 0), pady=3, fill=tk.X)
def open_home_url(self):
webbrowser.open('https://github.com/BBATools/PWCode', new=2)
def show_help(self, app):
self.subheading = ttk.Label(self, text=app.settings.desc, style="SubHeading.TLabel")
self.subheading.pack(side=tk.TOP, anchor=tk.W, after=self.heading)
self.description = ttk.Label(self, text=app.settings.long_desc, style="Text.TLabel")
self.description.pack(side=tk.TOP, anchor=tk.W, after=self.subheading)
for widget in self.right_frame.winfo_children():
widget.destroy()
subsystem_frames.clear()
self.project_dir_created = False
LinksFrame(
self.right_frame,
"Help",
(
("GitHub repository", self.open_home_url),
),
).pack(side=tk.TOP, anchor=tk.W, pady=12)
def show_start(self, app):
LinksFrame(
self.left_frame,
"Start",
(
("Export Data", lambda: self.export_data_project(app)),
("Convert Files", lambda: self.convert_files_project(app)), # TODO: Legg inn sjekk på at på PWLinux for at denne skal vises
("New File", app.command_callable("new_file")),
("Open File ...", app.command_callable("open_file")),
("Open Folder ...", app.command_callable("open_folder")),
),
).pack(side=tk.TOP, anchor=tk.W, pady=12)
self.recent_links_frame = RecentLinksFrame(self.left_frame, app).pack(side=tk.TOP, anchor=tk.W, pady=12)
# self.left_frame.pack(side=tk.LEFT, fill=tk.BOTH, expand=1)
def system_entry_check(self, app): # TODO: Slå sammen med run_plugin? Med arg om run? Også duplisering av kode i selve plugin main
system_name = self.project_frame.name_entry.get()
if not system_name:
msg = 'Missing system name'
msg_label.config(text=msg)
return
else:
msg_label.config(text='')
self.system_dir = app.data_dir + system_name + '_' # --> projects/[system_]
system_dir = self.system_dir
archive = system_dir[:-1] + '/' + system_name + '.tar'
# TODO: Flere sjekker? Sjekke mot config xml fil og, eller bare?
# TODO: Gjenbruke mappe hvis finnes og tom eller bare visse typer innhold?
if os.path.isfile(archive):
msg = "'" + archive + "' already exists"
msg_label.config(text=msg)
return
ok = self.create_project_dir(system_dir, system_name)
if not ok:
return
return 'ok'
def create_project_dir(self, path, project_name):
if not self.project_dir_created:
try:
os.mkdir(path)
self.project_dir_created = True
except OSError:
msg = "Can't create destination directory '%s'" % (path)
msg_label.config(text=msg)
return
pathlib.Path(path + '/.pwcode').mkdir(parents=True, exist_ok=True)
self.project_frame.configure(text=' ' + project_name + ' ')
self.project_frame.name_entry.configure(state=tk.DISABLED)
return 'ok'
def reset_rhs(self, header):
global msg_label
self.project_dir_created = False
self.subheading.pack_forget()
self.description.pack_forget()
for widget in self.right_frame.winfo_children():
widget.destroy()
frame = ttk.Frame(self.right_frame, style="SubHeading.TLabel")
frame.pack(side=tk.TOP, anchor=tk.W, pady=12, fill=tk.X)
header_label = ttk.Label(frame, text=header, style="SubHeading.TLabel")
header_label.pack(side=tk.LEFT, anchor=tk.N, pady=4, padx=1, fill="both", expand="yes")
msg_label = ttk.Label(frame, text="", style="Links.TButton")
msg_label.pack(side=tk.LEFT, anchor=tk.E, pady=4, padx=(0, 12))
def config_init(self, def_name):
config_dir = os.environ["pwcode_config_dir"] # Get PWCode config path
config_path = config_dir + '/tmp/' + def_name + '.xml'
if os.path.isfile(config_path):
os.remove(config_path)
return XMLSettings(config_path), config_dir
def run_plugin(self, app, project_name, config_dir, def_name):
base_path = app.data_dir + project_name
if def_name == 'export_data':
base_path = app.data_dir + project_name + '_'
for filename in os.listdir(config_dir + def_name):
new_path = base_path + '/.pwcode/' + filename
if filename == 'main.py':
new_path = base_path + '/.pwcode/' + project_name + '_' + def_name + '.py'
path = new_path
shutil.copy(config_dir + def_name + '/' + filename, new_path)
app.model.open_file(path)
tab_id = app.editor_frame.path2id[path]
file_obj = app.editor_frame.id2path[tab_id]
text_editor = app.editor_frame.notebook.nametowidget(tab_id)
self.show_help(app)
text_editor.run_file(file_obj, False)
def export_data(self, app):
def_name = inspect.currentframe().f_code.co_name
config_dir = self.export_check(app)
if config_dir:
project_name = self.project_frame.name_entry.get()
self.run_plugin(app, project_name, config_dir, def_name)
# TODO: Må lese fra xml i tmp først og så kopiere xml til prosjektmappe. Fortsatt riktig?
def convert_files(self, app):
def_name = inspect.currentframe().f_code.co_name
config, config_dir = self.config_init(def_name)
if not hasattr(self.project_frame, 'folders_frame'):
msg_label.config(text='No folders added')
return
project_name = self.project_frame.name_entry.get()
if not project_name:
msg_label.config(text='Missing project name')
return
ok = self.create_project_dir(app.data_dir + project_name, project_name)
if ok:
msg_label.config(text='')
else:
return
config.put('name', self.project_frame.name_entry.get())
config.put('options/merge', self.project_frame.merge_option.get())
i = 1
for frame, path in self.project_frame.folders_frame.folders.items():
# frame.remove_button.configure(state=tk.DISABLED)
config.put('folders/folder' + str(i), path)
i += 1
# self.project_frame.merge_option_frame.configure(state=tk.DISABLED)
# self.project_frame.name_frame.folder_button.configure(state=tk.DISABLED)
config.save()
self.run_plugin(app, project_name, config_dir, def_name)
def convert_files_project(self, app):
self.reset_rhs("Convert Files")
self.project_frame = Project(self.right_frame, app, self, "Project Name:", text=" New Data Project ", relief=tk.GROOVE)
self.project_frame.pack(side=tk.TOP, anchor=tk.W, fill="both", expand=1, pady=12)
name_frame = self.project_frame.name_frame
name_frame.folder_button = ttk.Button(name_frame, text='Add Folder', style="Entry.TButton", command=lambda: self.project_frame.choose_folder(app))
name_frame.folder_button.pack(side=tk.RIGHT, anchor=tk.N, pady=3, padx=(0, 12))
run_button = ttk.Button(name_frame, text='Run', style="Run.TButton", command=lambda: self.convert_files(app))
run_button.pack(side=tk.RIGHT, anchor=tk.N, pady=3, padx=(0, 12))
options_frame = ttk.Frame(self.project_frame, style="SubHeading.TLabel")
options_frame.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
# options_label = ttk.Label(options_frame, text="Options:", width=16)
# options_label.pack(side=tk.LEFT, anchor=tk.N, padx=(8, 0), pady=3)
merge_label = ttk.Label(options_frame, text="Merge Subfolders:")
merge_label.pack(side=tk.LEFT, anchor=tk.N, pady=3, padx=(8, 0))
options = ['', 'False', 'True']
self.project_frame.merge_option = tk.StringVar()
self.project_frame.merge_option.set(options[1])
merge_option = ttk.OptionMenu(options_frame, self.project_frame.merge_option, *options)
merge_option.pack(side=tk.LEFT, anchor=tk.N, pady=3, padx=(0, 55))
merge_option.configure(width=4)
# self.project_frame.merge_option = var
self.project_frame.merge_option_frame = merge_option
def export_data_project(self, app):
self.reset_rhs("Export Data")
self.project_frame = Project(self.right_frame, app, self, "System Name:", text=" New Data Project ", relief=tk.GROOVE)
self.project_frame.pack(side=tk.TOP, anchor=tk.W, fill="both", expand=1, pady=12)
name_frame = self.project_frame.name_frame
subsystem_button = ttk.Button(name_frame, text='Add Subsystem', style="Entry.TButton", command=lambda: self.subsystem_entry(app))
subsystem_button.pack(side=tk.RIGHT, anchor=tk.N, pady=3, padx=(0, 12))
# TODO: Lag def export_data(self, app):
run_button = ttk.Button(name_frame, text='Run', style="Run.TButton", command=lambda: self.export_data(app))
run_button.pack(side=tk.RIGHT, anchor=tk.N, pady=3, padx=(0, 12))
options_frame = ttk.Frame(self.project_frame, style="SubHeading.TLabel")
options_frame.pack(side=tk.TOP, anchor=tk.W, fill=tk.X, pady=(0, 20))
options_label = ttk.Label(options_frame, text="Database Options:", width=16)
options_label.pack(side=tk.LEFT, anchor=tk.N, padx=(8, 0), pady=3)
# # TODO: Flytt denne linjen opp på system nivå
# # TODO: Legg inn sjekk på at ikke duplikat folder --> i choose_folder kode?
memory_label = ttk.Label(options_frame, text="Allocated memory:")
memory_label.pack(side=tk.LEFT, anchor=tk.N, pady=3)
options = ['', '3 Gb', '4 Gb', '5 Gb', '6 Gb', '7 Gb', '8 Gb']
self.project_frame.memory_option = tk.StringVar()
self.project_frame.memory_option.set(options[2])
memory_option = ttk.OptionMenu(options_frame, self.project_frame.memory_option, *options)
memory_option.pack(side=tk.LEFT, anchor=tk.N, pady=3, padx=(0, 55))
memory_option.configure(width=4)
ddl_label = ttk.Label(options_frame, text="DDL Generation:")
ddl_label.pack(side=tk.LEFT, anchor=tk.N, pady=3)
options = ['', 'Native', 'SQL Workbench']
self.project_frame.ddl_option = tk.StringVar()
self.project_frame.ddl_option.set(options[1])
ddl_option = ttk.OptionMenu(options_frame, self.project_frame.ddl_option, *options)
ddl_option.pack(side=tk.LEFT, anchor=tk.N, pady=3)
ddl_option.configure(width=12)
def subsystem_entry(self, app):
ok = None
if len(subsystem_frames) == 0:
ok = self.system_entry_check(app)
else:
ok = self.export_check(app) # TODO: Riktig med 'ok' her?
if ok:
if len(subsystem_frames) == 0:
self.project_frame.pack_forget()
self.project_frame.pack(side=tk.TOP, anchor=tk.W, fill="both", expand=0, pady=(0, 12))
subsystem_frame = SubSystem(self.right_frame, app, self, text=" New Subsystem ", relief=tk.GROOVE)
subsystem_frame.pack(side=tk.TOP, anchor=tk.W, fill="both", expand=1, pady=12)
subsystem_frames.append(subsystem_frame)
def export_check(self, app):
# TODO: Sjekk kobling eller at kan brukes som mappenavn her hvis db subsystem og ikke bare filer
config, config_dir = self.config_init('pwcode')
config.put('name', self.project_frame.name_entry.get())
config.put('options/memory', self.project_frame.memory_option.get())
config.put('options/ddl', self.project_frame.ddl_option.get())
i = 0
subsystem_names = []
for subsystem in subsystem_frames:
subsystem_name = None
folder_paths = []
for frame, path in subsystem.folders_frame.folders.items():
folder_paths.append(path)
db_name = subsystem.db_name_entry.get().lower()
db_schema = subsystem.db_schema_entry.get().lower()
msg = None
if (len(db_name) == 0 or len(db_schema) == 0):
if folder_paths:
subsystem_name = 'files' + str(i)
i += 1
else:
msg = 'Missing subsystem name'
elif subsystem_name in subsystem_names:
msg = 'Duplicate subsystem name'
else:
subsystem_name = db_name + '_' + db_schema
if msg:
msg_label.config(text=msg)
# WAIT: Slette system mappe hvis tom her? Også når cancel?
return
msg_label.config(text='')
subsystem_names.append(subsystem_name)
subsystem.configure(text=' ' + subsystem_name + ' ')
config.put('subsystems/' + subsystem_name + '/db_name', db_name)
config.put('subsystems/' + subsystem_name + '/schema_name', db_schema)
j = 0
for path in folder_paths:
config.put('subsystems/' + subsystem_name + '/folders/folder' + str(j), path)
j += 1
config.save()
return config_dir
class Project(ttk.LabelFrame):
def __init__(self, parent, app, grandparent, entry_text, *args, **kwargs):
super().__init__(parent, *args, **kwargs, style="Links.TFrame")
self.grandparent = grandparent
self.merge_option = None
self.merge_option_frame = None
self.memory_option = None
self.ddl_option = None
self.name_frame = ttk.Frame(self, style="SubHeading.TLabel")
self.name_frame.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
self.name_label = ttk.Label(self.name_frame, text=entry_text, width=16)
self.name_label.pack(side=tk.LEFT, anchor=tk.N, padx=(8, 0), pady=(4, 3))
self.name_entry = make_entry(self.name_frame, app, 56)
self.name_entry.pack(side=tk.LEFT, anchor=tk.N, pady=(4, 3))
self.name_entry.focus()
self.cancel_button = ttk.Button(self.name_frame, text='Discard', style="Links.TButton", command=lambda: self.grandparent.show_help(app))
self.cancel_button.pack(side=tk.RIGHT, anchor=tk.N, pady=3, padx=(0, 12))
def choose_folder(self, app):
if not hasattr(self, 'folders_frame'):
self.folders_frame = LinksFrame(self)
self.folders_frame.pack(side=tk.TOP, anchor=tk.N, padx=(8, 0), pady=3, fill=tk.X)
path = multi_open(app.data_dir, mode='dir')
self.folders_frame.add_folder(path, lambda p=path: app.command_callable("open_folder")(p), 70)
class SubSystem(ttk.LabelFrame):
def __init__(self, parent, app, grandparent, *args, **kwargs):
super().__init__(parent, *args, **kwargs, style="Links.TFrame")
self.grandparent = grandparent
self.frame1 = ttk.Frame(self, style="SubHeading.TLabel")
self.frame1.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
self.db_name_label = ttk.Label(self.frame1, text="DB Name:", width=8)
self.db_name_label.pack(side=tk.LEFT, anchor=tk.N, padx=(8, 0), pady=(4, 3))
self.db_name_entry = make_entry(self.frame1, app, 25)
self.db_name_entry.pack(side=tk.LEFT, anchor=tk.N, pady=(4, 3))
self.db_name_entry.focus()
self.db_schema_label = ttk.Label(self.frame1, text="Schema Name:", width=12)
self.db_schema_label.pack(side=tk.LEFT, anchor=tk.N, padx=(12, 0), pady=(4, 3))
self.db_schema_entry = make_entry(self.frame1, app, 25)
self.db_schema_entry.pack(side=tk.LEFT, anchor=tk.N, pady=(4, 3))
self.cancel_button = ttk.Button(self.frame1, text='Discard', style="Links.TButton", command=lambda: self.subsystem_remove())
self.cancel_button.pack(side=tk.RIGHT, anchor=tk.N, pady=3, padx=(0, 12))
self.folder_button = ttk.Button(self.frame1, text='Add Folder', style="Entry.TButton", command=lambda: self.choose_folder(app))
self.folder_button.pack(side=tk.RIGHT, anchor=tk.N, pady=3, padx=(0, 12))
self.frame2 = ttk.Frame(self, style="SubHeading.TLabel")
self.frame2.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
self.jdbc_url_label = ttk.Label(self.frame2, text="JDBC Url:", width=8)
self.jdbc_url_label.pack(side=tk.LEFT, anchor=tk.N, padx=(8, 0), pady=3)
self.jdbc_url_entry = make_entry(self.frame2, app, 64)
self.jdbc_url_entry.pack(side=tk.LEFT, anchor=tk.N, pady=3)
self.frame3 = ttk.Frame(self, style="SubHeading.TLabel")
self.frame3.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
self.db_user_label = ttk.Label(self.frame3, text="DB User:", width=8)
self.db_user_label.pack(side=tk.LEFT, anchor=tk.N, padx=(8, 0), pady=3)
self.db_user_entry = make_entry(self.frame3, app, 25)
self.db_user_entry.pack(side=tk.LEFT, anchor=tk.N, pady=3)
self.db_pwd_label = ttk.Label(self.frame3, text="DB Password:", width=12)
self.db_pwd_label.pack(side=tk.LEFT, anchor=tk.N, padx=(12, 0), pady=3)
self.db_pwd_entry = make_entry(self.frame3, app, 25)
self.db_pwd_entry.pack(side=tk.LEFT, anchor=tk.N, pady=3)
self.frame5 = ttk.Frame(self, style="SubHeading.TLabel")
self.frame5.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
options = ['', 'Exclude Tables (comma separated)', 'Include Tables (comma separated)']
self.var = tk.StringVar()
self.var.set(' '.join(options[1].split(' ')[:2]) + ':')
self.var.trace("w", self.get_option)
self.tables_option = ttk.OptionMenu(self.frame5, self.var, *options)
self.tables_option.pack(side=tk.LEFT, anchor=tk.N, pady=3, padx=(8, 0))
self.tables_option.configure(width=12)
self.tables_entry = make_entry(self.frame5, app, 57)
self.tables_entry.pack(side=tk.LEFT, anchor=tk.N, pady=3)
self.frame6 = ttk.Frame(self, style="SubHeading.TLabel")
self.frame6.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
self.overwrite_label = ttk.Label(self.frame6, text="Overwrite Tables:", width=15)
self.overwrite_label.pack(side=tk.LEFT, anchor=tk.N, padx=(8, 0), pady=3)
self.overwrite_entry = make_entry(self.frame6, app, 57)
self.overwrite_entry.pack(side=tk.LEFT, anchor=tk.N, pady=(3, 6))
self.folders_frame = LinksFrame(self)
self.folders_frame.pack(side=tk.TOP, anchor=tk.N, padx=(8, 0), pady=3, fill=tk.X)
def choose_folder(self, app):
path = multi_open(app.data_dir, mode='dir')
self.folders_frame.add_folder(path, lambda p=path: app.command_callable("open_folder")(p), 70)
def get_option(self, *args):
value = ' '.join(self.var.get().split(' ')[:2]) + ':'
self.var.set(value)
self.tables_option.configure(state=tk.NORMAL) # Just for refreshing widget
def subsystem_remove(self):
subsystem_frames.remove(self)
self.destroy()
if len(subsystem_frames) == 0:
self.grandparent.project_frame.pack_forget()
self.grandparent.project_frame.pack(side=tk.TOP, anchor=tk.W, fill="both", expand=1)
class LinksFrame(ttk.Frame):
""" A container of links and label that packs vertically"""
def __init__(self, parent, title=None, links=None):
super().__init__(parent, style="Links.TFrame")
self.folders = {}
if title:
self.title = ttk.Label(self, text=title, style="SubHeading.TLabel")
self.title.pack(side=tk.TOP, anchor=tk.W, pady=4, padx=1)
if links:
for label, action in links:
if action:
self.add_link(label, action)
else:
self.add_label(label)
def add_link(self, label, action):
ttk.Button(self, text=label, style="Links.TButton", command=action).pack(side=tk.TOP, anchor=tk.W)
def add_folder(self, path, action, width):
if not path:
msg_label.config(text='Not a valid path.')
return
if path in self.folders.values():
msg_label.config(text='Duplicate folder')
return
label = 'Folder: ' + path
folder_frame = ttk.Frame(self, style="SubHeading.TLabel")
folder_frame.pack(side=tk.TOP, anchor=tk.W, fill=tk.X)
self.folders[folder_frame] = path
folder_frame.folder = ttk.Button(folder_frame, text=label, style="SideBar.TButton", command=action, width=width)
folder_frame.folder.pack(side=tk.LEFT, anchor=tk.N, pady=(1, 0))
folder_frame.remove_button = ttk.Button(folder_frame, text=' x', style="SideBar.TButton", command=lambda: self.remove_folder(folder_frame))
folder_frame.remove_button.pack(side=tk.LEFT, anchor=tk.N, pady=(1, 0))
msg_label.config(text='')
def remove_folder(self, folder_frame):
del self.folders[folder_frame]
folder_frame.pack_forget()
def add_label(self, text):
ttk.Label(self, text=text, style="Links.TLabel").pack(side=tk.TOP, anchor=tk.W)
class RecentLinksFrame(LinksFrame):
"""A frame display a list of last opened in the model"""
def __init__(self, parent, app):
super().__init__(parent, "Open Recent")
self.app = app
app.model.add_observer(self)
if os.path.exists(self.app.tmp_dir + "/recent_files.p"):
self.app.recent_links = pickle.load(open(self.app.tmp_dir + "/recent_files.p", "rb"))
self.update_recent_links(None)
def update_recent_links(self, new_file_obj):
if new_file_obj:
if new_file_obj.path in self.app.recent_links.keys():
del self.app.recent_links[new_file_obj.path]
self.app.recent_links.update({new_file_obj.path: new_file_obj})
for widget in self.winfo_children():
if isinstance(widget, ttk.Button):
widget.destroy()
for path, file_obj in reversed(self.app.recent_links.items()):
if os.path.isfile(file_obj.path):
if 'PWCode/bin/tmp/Untitled-' in file_obj.path:
if os.path.getsize(file_obj.path) == 0:
os.remove(file_obj.path)
continue
if file_obj in self.app.model.openfiles:
continue
self.add_link(file_obj.basename, lambda p=path: self.app.command_callable("open_file")(p))
def on_file_closed(self, file_obj):
"""model callback"""
self.update_recent_links(file_obj)
def on_file_open(self, file_obj):
"""model callback"""
self.update_recent_links(None)
def make_entry(parent, app, width):
entry = tk.Entry(parent,
font=app.font,
bg=COLORS.sidebar_bg,
disabledbackground=COLORS.sidebar_bg,
fg=COLORS.fg,
disabledforeground=COLORS.sidebar_fg,
bd=0,
insertbackground=COLORS.link,
insertofftime=0,
width=width,
highlightthickness=0,
)
return entry
|
[
"mortenee@gmail.com"
] |
mortenee@gmail.com
|
c15c181958bc2aea8abc6b30ac520658a40dd56e
|
5a903f3d295b9942224e5f01bce388a25a788f35
|
/Q_learning_Games_v2/Q_Learning_Games_v2_/Sarsa_vs_Q_Learning/Taxi_Game/Taxi_Analysis/print_taxi_comparison_plots.py
|
dc4dd5c1b825083bece4a64ebfcd723565d9adaa
|
[
"MIT"
] |
permissive
|
GiacomoFerro/Bachelor-Thesis
|
0382e6b36c5b890e72028a97e59e80b5563d0f0f
|
a9ac91a208dfc175084cf22673f88add6ec15281
|
refs/heads/master
| 2020-04-06T18:00:16.103447
| 2019-10-09T15:11:29
| 2019-10-09T15:11:29
| 157,681,361
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,723
|
py
|
#libreria per generare grafici
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
#lib to remove files
import os
print("Make the comparison Plots")
plt.rc('xtick', labelsize=8)
plt.rc('ytick', labelsize=8)
plt.figure(figsize=(10, 5))
f=open("rewards_taxi_qlearning.txt","r")
stringa=f.readline()
n=0
while stringa!="":#count the number of rewards
n+=1
stringa=f.readline()
newRewards=[ 0 for i in range(n)]
newRewardsSarsa=[ 0 for i in range(n)]
#read q-learning rewards
f=open("rewards_taxi_qlearning.txt","r")
stringa=f.readline()
n=0
while stringa!="":#make the rewards list
newRewards[n]=stringa
n+=1
stringa=f.readline()
f.close()
#read sarsa rewards
f=open("rewards_taxi_sarsa.txt","r")
stringa=f.readline()
n=0
while stringa!="":#make the rewards list
newRewardsSarsa[n]=stringa
n+=1
stringa=f.readline()
f.close()
#eps list with numRewards slots
eps=range(0,1000)
plt.plot(eps,newRewards,'r',eps,newRewardsSarsa,'b')
plt.title("Rewards collected over the time for Taxi game")
plt.xlabel("Trials")
plt.ylabel("Rewards")
plt.grid()#put the grid
qlearningLegend = mpatches.Patch(color='red', label='Q-learning')
SarsaLegend = mpatches.Patch(color='blue', label='Sarsa')
plt.legend(handles=[qlearningLegend,SarsaLegend])
plt.show()#print in output the plot and give the possibility to save it on your computer
plt.savefig('taxi_sarsa_vs_Q_learning.png')
os.remove("/home/giacomo/Scrivania/Q_Learning_Games_v2_/Sarsa_vs_Q_Learning/Taxi_Game/Taxi_Analysis/rewards_taxi_sarsa.txt")
os.remove("/home/giacomo/Scrivania/Q_Learning_Games_v2_/Sarsa_vs_Q_Learning/Taxi_Game/Taxi_Analysis/rewards_taxi_qlearning.txt")#to remove the file
|
[
"noreply@github.com"
] |
noreply@github.com
|
844abc176cf2c1ec5d48f1e98a0747b94eccebec
|
004b7726cfbedd6ecb6a1dec455211f1b1635919
|
/OCR_Test/gendata.py
|
8b5a276f17c703b35952b97c6ac1e980d94f708b
|
[] |
no_license
|
bluedott/p3_fuckup
|
e1cafabeda43b2d190836bd994c4af5d5b8c2379
|
6ff119cded8c30ef3acfc02c5ecefaa4d9178520
|
refs/heads/main
| 2023-01-22T00:16:01.252835
| 2020-12-07T03:00:45
| 2020-12-07T03:00:45
| 319,187,963
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,790
|
py
|
# GenData.py
import sys
import numpy as np
import cv2
import os
# module level variables ##########################################################################
MIN_CONTOUR_AREA = 100
RESIZED_IMAGE_WIDTH = 20
RESIZED_IMAGE_HEIGHT = 30
###################################################################################################
def main():
imgTrainingNumbers = cv2.imread("training_chars_small_test.png") # read in training numbers image
if imgTrainingNumbers is None: # if image was not read successfully
print ("error: image not read from file \n\n") # print error message to std out
os.system("pause") # pause so user can see error message
return # and exit function (which exits program)
# end if
imgGray = cv2.cvtColor(imgTrainingNumbers, cv2.COLOR_BGR2GRAY) # get grayscale image
imgBlurred = cv2.GaussianBlur(imgGray, (5,5), 0) # blur
# filter image from grayscale to black and white
imgThresh = cv2.adaptiveThreshold(imgBlurred, # input image
255, # make pixels that pass the threshold full white
cv2.ADAPTIVE_THRESH_GAUSSIAN_C, # use gaussian rather than mean, seems to give better results
cv2.THRESH_BINARY_INV, # invert so foreground will be white, background will be black
11, # size of a pixel neighborhood used to calculate threshold value
2) # constant subtracted from the mean or weighted mean
cv2.imshow("imgThresh", imgThresh) # show threshold image for reference
imgThreshCopy = imgThresh.copy() # make a copy of the thresh image, this in necessary b/c findContours modifies the image
imgContours, npaContours, npaHierarchy = cv2.findContours(imgThreshCopy, # input image, make sure to use a copy since the function will modify this image in the course of finding contours
cv2.RETR_EXTERNAL, # retrieve the outermost contours only
cv2.CHAIN_APPROX_SIMPLE) # compress horizontal, vertical, and diagonal segments and leave only their end points
# declare empty numpy array, we will use this to write to file later
# zero rows, enough cols to hold all image data
npaFlattenedImages = np.empty((0, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT))
intClassifications = [] # declare empty classifications list, this will be our list of how we are classifying our chars from user input, we will write to file at the end
# possible chars we are interested in are digits 0 through 9, put these in list intValidChars
intValidChars = [ord('0'), ord('1'), ord('2'), ord('3'), ord('4'), ord('5'), ord('6'), ord('7'), ord('8'), ord('9'),
ord('A'), ord('B'), ord('C'), ord('D'), ord('E'), ord('F'), ord('G'), ord('H'), ord('I'), ord('J'),
ord('K'), ord('L'), ord('M'), ord('N'), ord('O'), ord('P'), ord('Q'), ord('R'), ord('S'), ord('T'),
ord('U'), ord('V'), ord('W'), ord('X'), ord('Y'), ord('Z')]
for npaContour in npaContours: # for each contour
if cv2.contourArea(npaContour) > MIN_CONTOUR_AREA: # if contour is big enough to consider
[intX, intY, intW, intH] = cv2.boundingRect(npaContour) # get and break out bounding rect
# draw rectangle around each contour as we ask user for input
cv2.rectangle(imgTrainingNumbers, # draw rectangle on original training image
(intX, intY), # upper left corner
(intX+intW,intY+intH), # lower right corner
(0, 0, 255), # red
2) # thickness
imgROI = imgThresh[intY:intY+intH, intX:intX+intW] # crop char out of threshold image
imgROIResized = cv2.resize(imgROI, (RESIZED_IMAGE_WIDTH, RESIZED_IMAGE_HEIGHT)) # resize image, this will be more consistent for recognition and storage
cv2.imshow("imgROI", imgROI) # show cropped out char for reference
cv2.imshow("imgROIResized", imgROIResized) # show resized image for reference
cv2.imshow("training_numbers.png", imgTrainingNumbers) # show training numbers image, this will now have red rectangles drawn on it
intChar = cv2.waitKey(0) # get key press
if intChar == 27: # if esc key was pressed
sys.exit() # exit program
elif intChar in intValidChars: # else if the char is in the list of chars we are looking for . . .
intClassifications.append(intChar) # append classification char to integer list of chars (we will convert to float later before writing to file)
npaFlattenedImage = imgROIResized.reshape((1, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT)) # flatten image to 1d numpy array so we can write to file later
npaFlattenedImages = np.append(npaFlattenedImages, npaFlattenedImage, 0) # add current flattened impage numpy array to list of flattened image numpy arrays
# end if
# end if
# end for
fltClassifications = np.array(intClassifications, np.float32) # convert classifications list of ints to numpy array of floats
npaClassifications = fltClassifications.reshape((fltClassifications.size, 1)) # flatten numpy array of floats to 1d so we can write to file later
print ("\n\ntraining complete !!\n")
np.savetxt("classifications2.txt", npaClassifications) # write flattened images to file
np.savetxt("flattened_images2.txt", npaFlattenedImages) #
cv2.destroyAllWindows() # remove windows from memory
return
###################################################################################################
if __name__ == "__main__":
main()
# end if
|
[
"34089891+bluedott@users.noreply.github.com"
] |
34089891+bluedott@users.noreply.github.com
|
664fd5e22572825ff02d5842fce584a415d850a8
|
62bdde43ce88507530610a2b77d2ce0859eebc8b
|
/SWEA/queue/L5105-미로의_거리/L5105-미로의_거리-jiwoong.py
|
9ca4aecca30d1d8a77e4b2053a655343efd4a697
|
[] |
no_license
|
j2woong1/algo-itzy
|
6810f688654105cf4aefda3b0876f714ca8cbd08
|
7cf6cd8383dd8e9ca63f605609aab003790e1565
|
refs/heads/master
| 2023-08-21T12:51:54.874102
| 2021-10-03T04:35:21
| 2021-10-03T04:35:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,008
|
py
|
def bfs(x, y, z, visited):
# 상하좌우
dx = [0, 1, 0, -1]
dy = [-1, 0, 1, 0]
# 현재 위치 queue
q = [(x, y, z)]
# 큐가 빌 때까지
while q:
curr_x, curr_y, curr_z = q.pop(0)
visited[curr_y][curr_x] = 1
for i in range(4):
# 나가지 않으면
if 0 <= curr_y + dy[i] < N and 0 <= curr_x + dx[i] < N:
new_x = curr_x + dx[i]
new_y = curr_y + dy[i]
if arr[new_y][new_x] == 3:
return curr_z
if not arr[new_y][new_x] and not visited[new_y][new_x]:
q.append((new_x, new_y, curr_z + 1))
return 0
T = int(input())
for tc in range(1, T + 1):
N = int(input())
arr = []
for i in range(N):
arr.append(list(map(int, input())))
visited = [[0] * N for _ in range(N)]
for i in range(N):
for j in range(N):
if arr[i][j] == 2:
print(f'#{tc} {bfs(j, i, 0, visited)}')
|
[
"j2woong1@gmail.com"
] |
j2woong1@gmail.com
|
f2f5ef38ae9682ff7b25a938c4bedefddb4f355e
|
51a38a3b7d90bae3b8d137decb681eea5264c1d6
|
/i_scene_cp77_gltf/material_types/glass.py
|
188e0a71a10560152bd25f176c3e8fc445ba17c1
|
[] |
no_license
|
Walrus159/cp77research
|
8ece0de4fec7ab9a61c43dbafc38350ee1f6d0ef
|
4612c86245f874dec3fbf5c2deff9cbf998d23ce
|
refs/heads/main
| 2023-07-19T12:32:59.968590
| 2021-08-31T23:56:42
| 2021-08-31T23:56:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,285
|
py
|
import bpy
import os
from ..main.common import imageFromPath
class Glass:
def __init__(self, BasePath,image_format):
self.BasePath = BasePath
self.image_format = image_format
def create(self,Glass,Mat):
CurMat = Mat.node_tree
CurMat.nodes['Principled BSDF'].inputs['Transmission'].default_value = 1
Color = CurMat.nodes.new("ShaderNodeRGB")
Color.location = (-400,200)
Color.hide = True
Color.label = "TintColor"
Color.outputs[0].default_value = (float(Glass["TintColor"]["Red"])/255,float(Glass["TintColor"]["Green"])/255,float(Glass["TintColor"]["Blue"])/255,float(Glass["TintColor"]["Alpha"])/255)
CurMat.links.new(Color.outputs[0],CurMat.nodes['Principled BSDF'].inputs['Base Color'])
IOR = CurMat.nodes.new("ShaderNodeValue")
IOR.location = (-400,-150)
IOR.outputs[0].default_value = float(Glass["IOR"])
IOR.hide = True
IOR.label = "IOR"
CurMat.links.new(IOR.outputs[0],CurMat.nodes['Principled BSDF'].inputs['IOR'])
rImg = imageFromPath(self.BasePath + Glass["Roughness"],self.image_format,True)
rImgNode = CurMat.nodes.new("ShaderNodeTexImage")
rImgNode.location = (-800,50)
rImgNode.image = rImg
rImgNode.label = "Roughness"
CurMat.links.new(rImgNode.outputs[0],CurMat.nodes['Principled BSDF'].inputs['Roughness'])
nImg = imageFromPath(self.BasePath + Glass["Normal"],self.image_format,True)
nImgNode = CurMat.nodes.new("ShaderNodeTexImage")
nImgNode.location = (-800,-300)
nImgNode.image = nImg
nImgNode.label = "Normal"
nRgbCurve = CurMat.nodes.new("ShaderNodeRGBCurve")
nRgbCurve.location = (-500,-300)
nRgbCurve.hide = True
nRgbCurve.mapping.curves[2].points[0].location = (0,1)
nRgbCurve.mapping.curves[2].points[1].location = (1,1)
nMap = CurMat.nodes.new("ShaderNodeNormalMap")
nMap.location = (-200,-300)
nMap.hide = True
CurMat.links.new(nImgNode.outputs[0],nRgbCurve.inputs[1])
CurMat.links.new(nRgbCurve.outputs[0],nMap.inputs[1])
CurMat.links.new(nMap.outputs[0],CurMat.nodes['Principled BSDF'].inputs['Normal'])
|
[
"65016231+ja-to@users.noreply.github.com"
] |
65016231+ja-to@users.noreply.github.com
|
cfbc73e87a1f617dd5d05185217fead9b365b1cd
|
8c0e6de3a106ef148dd2994973f3f5cb807c70eb
|
/General Programing/Documentation Hub/imageviewer.py
|
ac39a35f9c94c5dffe56d0b6d169a8e29ad2f0fd
|
[] |
no_license
|
TKprotich/Practices-and-Experimenting
|
8f8928cc43e6cb9996b064d66dc6783fc8edf362
|
211e11f3f52b36dd6dc944d3b503c81d412acb4b
|
refs/heads/main
| 2023-01-11T00:12:14.404189
| 2020-11-15T13:41:24
| 2020-11-15T13:41:24
| 304,259,482
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 180
|
py
|
import pyglet
window = pyglet.window.Window()
image = pyglet.resource.image('workout.JPG')
@window.event
def on_draw():
window.clear()
image.blit(0, 0)
pyglet.app.run()
|
[
"marchemjor@gmail.com"
] |
marchemjor@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.