hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fa1db442c9e53ef996398c6e62a52fb34b4bd8ef
| 2,350
|
py
|
Python
|
tests/data/events.py
|
stackriot-labs/gitsome
|
d7c57abc7cb66e9c910a844f15d4536866da3310
|
[
"Apache-2.0"
] | 7,986
|
2015-11-07T11:59:21.000Z
|
2022-03-27T17:20:49.000Z
|
tests/data/events.py
|
themaximum88/gitsome
|
d7c57abc7cb66e9c910a844f15d4536866da3310
|
[
"Apache-2.0"
] | 161
|
2016-05-09T09:53:48.000Z
|
2022-02-22T04:18:59.000Z
|
tests/data/events.py
|
themaximum88/gitsome
|
d7c57abc7cb66e9c910a844f15d4536866da3310
|
[
"Apache-2.0"
] | 539
|
2016-04-05T05:39:58.000Z
|
2022-03-23T20:47:52.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2015 Donne Martin. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
formatted_events = u'\x1b[35m 1. \x1b[0mdonnemartin \x1b[0m\x1b[32mcommented on commit \x1b[0m\x1b[36mAAA23e2\x1b[0m\x1b[32m at \x1b[0m\x1b[36muser1/repo1\x1b[0m\x1b[33m (just now)\x1b[0m\n \x1b[36m\x1b[0mfoo\x1b[0m\n\x1b[35m 2. \x1b[0mdonnemartin \x1b[0m\x1b[32mcreated\x1b[0m\x1b[32m branch\x1b[0m\x1b[36m master\x1b[0m\x1b[32m at \x1b[0m\x1b[36muser1/repo1\x1b[0m\x1b[33m (just now)\x1b[0m\n \x1b[36m\x1b[0m\x1b[0m\n\x1b[35m 3. \x1b[0mdonnemartin \x1b[0m\x1b[32mfollowed \x1b[0m\x1b[36muser1/repo1\x1b[0m\x1b[33m (just now)\x1b[0m\n\x1b[35m 4. \x1b[0mdonnemartin \x1b[0m\x1b[32mforked\x1b[0m\x1b[36m user1/repo1\x1b[0m\x1b[33m (just now)\x1b[0m\n\x1b[35m 5. \x1b[0mdonnemartin \x1b[0m\x1b[32mcommented on \x1b[0m\x1b[36muser1/repo1#1\x1b[0m\x1b[33m (just now)\x1b[0m\n \x1b[36m\x1b[0mfoo\x1b[0m\n \x1b[36m\x1b[0mfoo\x1b[0m\n\x1b[35m 6. \x1b[0mdonnemartin \x1b[0m\x1b[32mclosed issue \x1b[0m\x1b[36muser1/repo1#1\x1b[0m\x1b[33m (just now)\x1b[0m\n \x1b[36m\x1b[0mfoo\x1b[0m\n\x1b[35m 7. \x1b[0mdonnemartin \x1b[0m\x1b[32mclosed pull request \x1b[0m\x1b[36muser1/repo1#1\x1b[0m\x1b[33m (just now)\x1b[0m\n \x1b[36m\x1b[0mfoo\x1b[0m\n\x1b[35m 8. \x1b[0mdonnemartin \x1b[0m\x1b[32mpushed to\x1b[0m\x1b[36m master\x1b[0m\x1b[32m at \x1b[0m\x1b[36muser1/repo1\x1b[0m\x1b[33m (just now)\x1b[0m\n \x1b[36m5ee4d1b: \x1b[0m\x1b[0mFix GitHubCli class docstring\x1b[0m\n \x1b[36mfc2309b: \x1b[0m\x1b[0mUpdate gh configure docstring\x1b[0m\n \x1b[36mdde19b7: \x1b[0m\x1b[0mUpdate gh create-comment docstring\x1b[0m\n\x1b[35m 9. \x1b[0mdonnemartin \x1b[0m\x1b[32mreleased \x1b[0m\x1b[36m0.5.0 \x1b[0m\x1b[32mat \x1b[0m\x1b[36muser1/repo1\x1b[0m\x1b[33m (just now)\x1b[0m\n\x1b[0m'
| 138.235294
| 1,770
| 0.714043
|
93f551e5469b1f02fbf0ae7bd5d72a1f50913ecb
| 6,617
|
py
|
Python
|
landlab/components/stream_power/examples/plot_concavities_forAGU.py
|
awickert/landlab
|
496de56717a5877db96f354a1b1285bfabe8b56f
|
[
"MIT"
] | 1
|
2015-08-17T19:29:50.000Z
|
2015-08-17T19:29:50.000Z
|
landlab/components/stream_power/examples/plot_concavities_forAGU.py
|
awickert/landlab
|
496de56717a5877db96f354a1b1285bfabe8b56f
|
[
"MIT"
] | 1
|
2018-04-07T08:24:56.000Z
|
2018-04-07T13:52:03.000Z
|
landlab/components/stream_power/examples/plot_concavities_forAGU.py
|
awickert/landlab
|
496de56717a5877db96f354a1b1285bfabe8b56f
|
[
"MIT"
] | 2
|
2017-07-03T20:21:13.000Z
|
2018-09-06T23:58:19.000Z
|
from pylab import figure, plot, xlabel, ylabel, title, loglog, show, gca, xlim, ylim, legend
import numpy as np
yunnan_propx = np.loadtxt('yunnan_proplength.txt')
yunnan_theta = np.loadtxt('yunnan_theta.txt')
fagaras_propx = np.loadtxt('fagaras_proplength.txt')
fagaras_theta = np.loadtxt('fagaras_theta.txt')
ladakh_propx = np.loadtxt('ladakh_proplength.txt')
ladakh_theta = np.loadtxt('ladakh_theta.txt')
#this data is u=0.0001->0.0005, no threshold, explicit, small random K var from 1e-6
pureDL_nothresh_propx = np.loadtxt('pureDLnothresh_proplength.txt')
pureDL_nothresh_theta = np.loadtxt('pureDLnothresh_theta.txt')
seddepNMG_propx = np.loadtxt('seddepNMG_proplength.txt')
seddepNMG_theta = np.loadtxt('seddepNMG_theta.txt')
stormsDL_thresh_propx = np.loadtxt('stormsDLthresh1e-1_proplength.txt')
stormsDL_thresh_theta = np.loadtxt('stormsDLthresh1e-1_theta.txt')
stormsDL_nothresh_propx = np.loadtxt('stormsDLnothresh_proplength.txt')
stormsDL_nothresh_theta = np.loadtxt('stormsDLnothresh_theta.txt')
bevel1seddep_propx = np.loadtxt('seddepNMG0.0001bevel_proplength.txt')
bevel1seddep_theta = np.loadtxt('seddepNMG0.0001bevel_theta.txt')
bevel2seddep_propx = np.loadtxt('seddepNMG0.0006bevel_proplength.txt') #no perturbation... we're not crossing the hump
bevel2seddep_theta = np.loadtxt('seddepNMG0.0006bevel_theta.txt')
aparabolicNMG2x10_propx = np.loadtxt('aparabolicNMG20001x10_proplength.txt')
aparabolicNMG2x10_theta = np.loadtxt('aparabolicNMG20001x10_theta.txt')
aparabolicNMG2x5_propx = np.loadtxt('aparabolicNMG20001x5_proplength.txt')
aparabolicNMG2x5_theta = np.loadtxt('aparabolicNMG20001x5_theta.txt')
aparabolicNMG4x100_propx = np.loadtxt('aparabolicNMG4000001x100_proplength.txt')
aparabolicNMG4x100_theta = np.loadtxt('aparabolicNMG4000001x100_theta.txt')
aparabolicNMG4x10_propx = np.loadtxt('aparabolicNMG4000001x10_proplength.txt')
aparabolicNMG4x10_theta = np.loadtxt('aparabolicNMG4000001x10_theta.txt')
aparabolicNMG5x10_propx = np.loadtxt('aparabolicNMG500001x10_proplength.txt')
aparabolicNMG5x10_theta = np.loadtxt('aparabolicNMG500001x10_theta.txt')
figure('concavities_just_one_sde')
plot(seddepNMG_propx, seddepNMG_theta, 'rx-', label='sediment flux dependent')
gca().set_yscale('log')
xlim([0,1])
y_scale = gca().get_ylim()
plot(np.array([0.,1.]), 0.5*np.ones(2.), 'k--')
figure('concavities_just_one_DL')
plot(pureDL_nothresh_propx, pureDL_nothresh_theta, 'b.-', label='pure detachment limited')
gca().set_yscale('log')
xlim([0,1])
ylim(y_scale)
plot(np.array([0.,1.]), 0.5*np.ones(2.), 'k--')
legend(loc=2)
figure('concavities_one_sde_one_DL')
plot(pureDL_nothresh_propx, pureDL_nothresh_theta, 'b.-', label='pure detachment limited')
plot(seddepNMG_propx, seddepNMG_theta, 'rx-', label='sediment flux dependent')
gca().set_yscale('log')
xlim([0,1])
plot(np.array([0.,1.]), 0.5*np.ones(2.), 'k--')
legend(loc=2)
figure('all_models')
plot(np.array([0.,1.]), 0.5*np.ones(2.), 'k--')
plot(pureDL_nothresh_propx, pureDL_nothresh_theta, 'b.-', label='pureDL')
plot(stormsDL_nothresh_propx, stormsDL_nothresh_theta, 'b+-', label='pureDL_storms')
plot(stormsDL_thresh_propx, stormsDL_thresh_theta, 'b*-', label='pureDL_storms_thresh')
plot(seddepNMG_propx, seddepNMG_theta, 'rx-', label='U=0.0001,K=5*10^-5,5*dU')
plot(aparabolicNMG2x10_propx[1:], aparabolicNMG2x10_theta[1:], 'r+-', label='10*U,2*K,10*dU')
plot(aparabolicNMG2x5_propx, aparabolicNMG2x5_theta, 'rv-', label='10*U,2*K,5*dU')
plot(aparabolicNMG4x100_propx, aparabolicNMG4x100_theta, 'r<-', label='0.1*U,100*dU')
plot(aparabolicNMG4x10_propx, aparabolicNMG4x10_theta, 'r>-', label='0.1*U,10*dU')
plot(aparabolicNMG5x10_propx, aparabolicNMG5x10_theta, 'r^-', label='0.2*K,10*dU')
plot(bevel1seddep_propx, bevel1seddep_theta, 'rp-', label='bevel_lowangle')
plot(bevel2seddep_propx[1:], bevel2seddep_theta[1:], 'rh-', label='bevel_highangle') #first val is a bad pick
gca().set_yscale('log')
y_scale_all = gca().get_ylim()
xlim([0,1])
legend(loc=2)
figure('all_DL')
plot(np.array([0.,1.]), 0.5*np.ones(2.), 'k--')
plot(pureDL_nothresh_propx, pureDL_nothresh_theta, 'b.-', label='pureDL')
plot(stormsDL_nothresh_propx, stormsDL_nothresh_theta, 'b+-', label='pureDL_storms')
plot(stormsDL_thresh_propx, stormsDL_thresh_theta, 'b*-', label='pureDL_storms_thresh')
gca().set_yscale('log')
xlim([0,1])
ylim(y_scale_all)
legend(loc=2)
figure('all_model_just_Ladakh')
plot(np.array([0.,1.]), 0.5*np.ones(2.), 'k--')
plot(pureDL_nothresh_propx, pureDL_nothresh_theta, ':', color='0.6', label='pure detachment limited')
plot(stormsDL_nothresh_propx, stormsDL_nothresh_theta, ':', color='0.6')
plot(stormsDL_thresh_propx, stormsDL_thresh_theta, ':', color='0.6')
plot(seddepNMG_propx, seddepNMG_theta, '-', color='0.3', label='sed flux dependent')
plot(aparabolicNMG2x10_propx[1:], aparabolicNMG2x10_theta[1:], '-', color='0.3')
plot(aparabolicNMG2x5_propx, aparabolicNMG2x5_theta, '-', color='0.3')
plot(aparabolicNMG4x100_propx, aparabolicNMG4x100_theta, '-', color='0.3')
plot(aparabolicNMG4x10_propx, aparabolicNMG4x10_theta, '-', color='0.3')
plot(aparabolicNMG5x10_propx, aparabolicNMG5x10_theta, '-', color='0.3')
plot(bevel1seddep_propx, bevel1seddep_theta, '-', color='0.3')
plot(bevel2seddep_propx[1:], bevel2seddep_theta[1:], '-', color='0.3') #first val is a bad pick
plot(ladakh_propx, ladakh_theta, 'bo', label='Ladakh field data')
gca().set_yscale('log')
xlim([0,1])
ylim(y_scale_all)
legend(loc=2)
figure('all_model_all_data')
plot(np.array([0.,1.]), 0.5*np.ones(2.), 'k--')
plot(pureDL_nothresh_propx, pureDL_nothresh_theta, ':', color='0.6', label='pure detachment limited')
plot(stormsDL_nothresh_propx, stormsDL_nothresh_theta, ':', color='0.6')
plot(stormsDL_thresh_propx, stormsDL_thresh_theta, ':', color='0.6')
plot(seddepNMG_propx, seddepNMG_theta, '-', color='0.3', label='sed flux dependent')
plot(aparabolicNMG2x10_propx[1:], aparabolicNMG2x10_theta[1:], '-', color='0.3')
plot(aparabolicNMG2x5_propx, aparabolicNMG2x5_theta, '-', color='0.3')
plot(aparabolicNMG4x100_propx, aparabolicNMG4x100_theta, '-', color='0.3')
plot(aparabolicNMG4x10_propx, aparabolicNMG4x10_theta, '-', color='0.3')
plot(aparabolicNMG5x10_propx, aparabolicNMG5x10_theta, '-', color='0.3')
plot(bevel1seddep_propx, bevel1seddep_theta, '-', color='0.3')
plot(bevel2seddep_propx[1:], bevel2seddep_theta[1:], '-', color='0.3') #first val is a bad pick
plot(yunnan_propx, yunnan_theta, 'sr', label='Red River field data')
plot(ladakh_propx, ladakh_theta, 'bo', label='Ladakh field data')
plot(fagaras_propx, fagaras_theta, 'vg', label='Fagaras field data')
gca().set_yscale('log')
xlim([0,1])
ylim(y_scale_all)
legend(loc=2)
| 53.796748
| 118
| 0.767115
|
f883ede988d8c7ff74636ec5037471134c42eda6
| 6,946
|
py
|
Python
|
lib/yaml/reader.py
|
pflarr/pyyaml
|
239110c403da1068498591e0c3bbff12cfc121ae
|
[
"MIT"
] | 2
|
2018-04-27T22:12:50.000Z
|
2020-11-27T23:32:06.000Z
|
lib/yaml/reader.py
|
pflarr/pyyaml
|
239110c403da1068498591e0c3bbff12cfc121ae
|
[
"MIT"
] | null | null | null |
lib/yaml/reader.py
|
pflarr/pyyaml
|
239110c403da1068498591e0c3bbff12cfc121ae
|
[
"MIT"
] | 2
|
2020-01-29T20:36:20.000Z
|
2021-03-08T02:05:35.000Z
|
# This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length` characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current character.
from __future__ import unicode_literals, division, print_function
__all__ = ['Reader', 'ReaderError']
from .error import YAMLError, Mark
import codecs, re, sys
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, bytes):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to a unicode string,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `bytes` object,
# - a `str` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream):
self.name = None
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = ''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, str):
self.name = "<unicode string>"
self.check_printable(stream)
self.buffer = stream+'\0'
elif isinstance(stream, bytes):
self.name = "<byte string>"
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>")
self.eof = False
self.raw_buffer = None
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in '\n\x85\u2028\u2029' \
or (ch == '\r' and self.buffer[self.pointer] != '\n'):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
if isinstance(self.raw_buffer, bytes):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError as exc:
character = self.raw_buffer[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += '\0'
self.raw_buffer = None
break
def update_raw(self, size=4096):
data = self.stream.read(size)
if self.raw_buffer is None:
self.raw_buffer = data
else:
self.raw_buffer += data
self.stream_pointer += len(data)
if not data:
self.eof = True
#try:
# import psyco
# psyco.bind(Reader)
#except ImportError:
# pass
| 35.804124
| 107
| 0.560035
|
08bf20f42b00ff927888ab8a1ab64f9b0a98eaba
| 1,586
|
py
|
Python
|
python/testData/inspections/PyArgumentListInspection/badarglist.py
|
Sajadrahimi/intellij-community
|
ab9ff612dde3ee94ecae33cbc0ea639fa51550d4
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyArgumentListInspection/badarglist.py
|
Sajadrahimi/intellij-community
|
ab9ff612dde3ee94ecae33cbc0ea639fa51550d4
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyArgumentListInspection/badarglist.py
|
Sajadrahimi/intellij-community
|
ab9ff612dde3ee94ecae33cbc0ea639fa51550d4
|
[
"Apache-2.0"
] | 1
|
2022-01-02T19:58:08.000Z
|
2022-01-02T19:58:08.000Z
|
# bad argument list samples
class A:
def foo(self, x, y):
pass
# no self, but so what
def bar(one, two):
pass
a = A()
a.foo(1,2)
a.bar(<warning descr="Parameter 'two' unfilled">)</warning>;
def f1():
pass
f1()
f1<warning descr="Unexpected argument(s)">(<warning descr="Unexpected argument">1</warning>)</warning>
f1<warning descr="Unexpected argument(s)">(<warning descr="Unexpected argument">a = 1</warning>)</warning>
def f2(a):
pass
f2(<warning descr="Parameter 'a' unfilled">)</warning> # ok, fail
f2(1) # ok, pass
f2<warning descr="Unexpected argument(s)">(1, <warning descr="Unexpected argument">2</warning>)</warning> # ok, fail
f2(a = 1) # ok, pass
f2(<warning descr="Unexpected argument">b = 1</warning><warning descr="Parameter 'a' unfilled">)</warning> # ok, fail
f2<warning descr="Unexpected argument(s)">(a = 1, <warning descr="Unexpected argument">b = 2</warning>)</warning> # ok, fail
def f3(a, b):
pass
f3(1, 2)
f3<warning descr="Unexpected argument(s)">(1, 2, <warning descr="Unexpected argument">3</warning>)</warning>
f3(b=2, a=1)
f3<warning descr="Unexpected argument(s)">(b=1, <error descr="Keyword argument repeated">b=2</error>, a=1)</warning>
f3(1, b=2)
f3(a=1, <error descr="Positional argument after keyword argument">2</error><warning descr="Parameter 'b' unfilled">)</warning>
def f4(a, *b):
pass
f4(1)
f4(1, 2)
f4(1, 2, 3)
f4(1, *(2, 3))
f4(*(1,2,3))
f4(a=1, <error descr="Positional argument after keyword argument">2</error>, <error descr="Positional argument after keyword argument">3</error>)
| 28.321429
| 145
| 0.664565
|
9d7bb559e11f6498887ac670402f66515d1c2a29
| 11,578
|
py
|
Python
|
code/python/StocksAPIforDigitalPortals/v2/fds/sdk/StocksAPIforDigitalPortals/model/inline_response2005_data_reported_key_figures_first_fiscal_year_ratios_enterprise_value_ebitda.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/StocksAPIforDigitalPortals/v2/fds/sdk/StocksAPIforDigitalPortals/model/inline_response2005_data_reported_key_figures_first_fiscal_year_ratios_enterprise_value_ebitda.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/StocksAPIforDigitalPortals/v2/fds/sdk/StocksAPIforDigitalPortals/model/inline_response2005_data_reported_key_figures_first_fiscal_year_ratios_enterprise_value_ebitda.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
Prime Developer Trial
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.StocksAPIforDigitalPortals.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.StocksAPIforDigitalPortals.exceptions import ApiAttributeError
class InlineResponse2005DataReportedKeyFiguresFirstFiscalYearRatiosEnterpriseValueEbitda(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'minimum': (float,), # noqa: E501
'maximum': (float,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'minimum': 'minimum', # noqa: E501
'maximum': 'maximum', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""InlineResponse2005DataReportedKeyFiguresFirstFiscalYearRatiosEnterpriseValueEbitda - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
minimum (float): Minimum value.. [optional] # noqa: E501
maximum (float): Maximum value.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InlineResponse2005DataReportedKeyFiguresFirstFiscalYearRatiosEnterpriseValueEbitda - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
minimum (float): Minimum value.. [optional] # noqa: E501
maximum (float): Maximum value.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 44.530769
| 124
| 0.578943
|
76bc248ecd134f17b32444962d08929ebb3edca2
| 1,107
|
py
|
Python
|
Machine Learning Algorithms/Section 2 - Regression/2. Multiple Linear Regression/multiple_linear_regression.py
|
Hrishi97/All-About-Machine-Learning
|
e141951bc629cbd8c3068cc9d284cb40478d2d75
|
[
"Unlicense"
] | 389
|
2021-06-13T13:57:13.000Z
|
2022-03-30T07:49:47.000Z
|
multiple_linear_regression.py
|
rishikonapure/-100daysofmlcode
|
50ea710eb197287ae1e538c8528c88c4503d9f9a
|
[
"MIT"
] | 23
|
2020-07-21T04:54:58.000Z
|
2022-03-08T23:30:06.000Z
|
multiple_linear_regression.py
|
rishikonapure/-100daysofmlcode
|
50ea710eb197287ae1e538c8528c88c4503d9f9a
|
[
"MIT"
] | 109
|
2021-06-13T14:26:21.000Z
|
2022-03-29T11:55:27.000Z
|
# Multiple Linear Regression
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('50_Startups.csv')
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, -1].values
print(X)
# Encoding categorical data
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder
ct = ColumnTransformer(transformers=[('encoder', OneHotEncoder(), [3])], remainder='passthrough')
X = np.array(ct.fit_transform(X))
print(X)
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# Training the Multiple Linear Regression model on the Training set
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(X_train, y_train)
# Predicting the Test set results
y_pred = regressor.predict(X_test)
np.set_printoptions(precision=2)
print(np.concatenate((y_pred.reshape(len(y_pred),1), y_test.reshape(len(y_test),1)),1))
| 33.545455
| 97
| 0.785005
|
1141c9a9df8b6d197d8d175dc525309ee4a1e479
| 14,846
|
py
|
Python
|
analyze/analyzeMinutes.py
|
Aquaware/MarketAlertWithXM
|
6cfbc26f7b32880ff9a6911599b4a9614345e505
|
[
"MIT"
] | null | null | null |
analyze/analyzeMinutes.py
|
Aquaware/MarketAlertWithXM
|
6cfbc26f7b32880ff9a6911599b4a9614345e505
|
[
"MIT"
] | null | null | null |
analyze/analyzeMinutes.py
|
Aquaware/MarketAlertWithXM
|
6cfbc26f7b32880ff9a6911599b4a9614345e505
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../common'))
import pandas as pd
import numpy as np
from datetime import date, datetime, timedelta
from Timeframe import Timeframe
from CandlePlot import CandlePlot, BandPlot, makeFig
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import calendar
from ta.trend import SMAIndicator
import TradeSimulation
TIME = 'time'
OPEN = 'open'
HIGH = 'high'
LOW = 'low'
CLOSE = 'close'
DIF = 'close-open'
SIGMA = 'sigma'
DMA_SLOW = 'dma_slow'
DMA_FAST = 'dma_fast'
BID = 'bid'
ASK = 'ask'
MID = 'mid'
SPREAD = 'spread'
THRESHOLD = 'threshold'
DELAY = 'delay'
LOSSCUT = 'loscut'
def weekday(year, month, day):
d = date(year, month, day)
day_index = d.weekday()
return calendar.day_name[day_index][:3]
def dirPath(root, stock, year):
path = root + stock + '/' + str(year).zfill(4) + '/'
return path
def filename(stock, year, month, day):
path = stock + '_Tick_' + str(year).zfill(4) + '-' + str(month).zfill(2) + '-' + str(day).zfill(2) + '.csv'
return path
def timeFilter(tohlc_dic, year, month, day, hourmins):
time = tohlc_dic[TIME]
try:
t1 = datetime(year, month, day, hourmins[0][0], hourmins[0][1])
t2 = datetime(year, month, day, hourmins[1][0], hourmins[1][1])
if t1 > t2:
t2 += timedelta(days=1)
if t1 > time[-1] or t2 < time[0]:
return (0, None)
except:
return (0, None)
begin = None
stop = None
for (i, t) in enumerate(time):
if begin is None:
if t >= t1:
begin = i
else:
if t >= t2:
stop = i
break
if stop is None:
stop = len(time)
if begin is None or stop is None:
return (0, None)
dic = {}
dic[TIME] = time[begin:stop]
dic[OPEN] = tohlc_dic[OPEN][begin: stop]
dic[HIGH] = tohlc_dic[HIGH][begin: stop]
dic[LOW] = tohlc_dic[LOW][begin: stop]
dic[CLOSE] = tohlc_dic[CLOSE][begin: stop]
return (stop - begin, dic)
def candle(prices):
o = prices[0]
c = prices[-1]
h = np.max(prices)
l = np.min(prices)
return [o, h, l, c]
def tohlc2ohlc(tohlc):
time = []
ohlc = []
for value in tohlc:
time.append(value[0])
ohlc.append(value[1:])
return (time, ohlc)
def separate(tohlc, timeframe):
time = None
data = []
open = None
high = None
low = None
close = None
for t, o, h, l, c in tohlc:
tt = timeframe.roundTime(t)
if time is None:
time = tt
open = o
high = h
low = l
close = c
else:
if tt > time:
data.append([time, open, high, low, close])
time = tt
open = o
high = h
low = l
close = c
else:
if h > high:
high = h
if l < low:
low = l
close = c
data.append([time, open, high, low, close])
time = []
open = []
high = []
low = []
close = []
dif = []
for t, o, h, l, c in data:
if t is None or c is None or o is None:
continue
time.append(t)
open.append(o)
high.append(h)
low.append(l)
close.append(c)
dif.append(c - o)
dic = {}
dic[TIME] = time
dic[OPEN] = open
dic[HIGH] = high
dic[LOW] = low
dic[CLOSE] = close
dic[DIF] = dif
return dic
def SMA(array, window):
ind = SMAIndicator(close=pd.Series(array), window=window)
close = ind.sma_indicator()
return close.values.tolist()
def drawGraph(market, title, timeframe, tohlc, display_time_range, trades):
fig = plt.figure(figsize=(14, 6))
gs = gridspec.GridSpec(6, 1) #縦,横
ax1 = plt.subplot(gs[0:5, 0])
ax2 = plt.subplot(gs[5:6, 0])
time = tohlc[TIME]
open = tohlc[OPEN]
high = tohlc[HIGH]
low = tohlc[LOW]
close = tohlc[CLOSE]
ohlc = []
for o, h, l, c in zip(open, high, low, close):
ohlc.append([o, h, l, c])
if display_time_range is None:
t_range = (time[0], time[-1])
else:
tt = time[0]
t0 = datetime(tt.year, tt.month, tt.day, display_time_range[0][0], display_time_range[0][1])
t1 = datetime(tt.year, tt.month, tt.day, display_time_range[1][0], display_time_range[1][1])
if t1 < t0:
t1 += timedelta(days=1)
t_range = [t0, t1]
graph1 = CandlePlot(fig, ax1, title)
graph1.xlimit(t_range)
graph1.drawCandle(time, ohlc, timerange=t_range)
windows =[3, 5, 7, 10, 20]
colors = ['salmon', 'red', 'gray', 'yellowgreen', 'green']
mas = {}
for w, color in zip(windows, colors):
ma = SMA(close, w)
mas['MA' + str(w)] = ma
if color is not None:
graph1.drawLine(time, ma, color=color, label='MA' + str(w))
for trade in trades:
[status, topen, open_price, tclose, close_price, profit1, tpeak, peak_price, profit2] = trade
if status > 0:
color = 'green'
marker = '^'
else:
color = 'red'
marker = 'v'
graph1.drawMarker(topen, open_price, marker, color, markersize=10)
graph1.drawMarker(tpeak, peak_price, 'x', color, markersize=10)
flag = []
for o, c in zip(open, close):
try:
v = (c - o) / o * 100.0
except:
v = 0
flag.append(v)
graph2 = BandPlot(fig, ax2, 'Flag')
graph2.xlimit(t_range)
graph2.drawLine(time, flag, timerange=t_range)
#ax1.legend()
#ax2.legend()
def priceRange(ohlc):
p = []
for o, h, l, c in ohlc:
p.append(c)
return (max(p), min(p))
def drawByDay(market, tf, ticks, year, month):
for day in range(1, 32):
count, data = timeFilter(ticks, year, month, day, [[21, 30], [4, 30]])
#ticks = fromDb(market, year, month, day, [22, 23], None)
if count > 100:
(tohlc, spreads) = ticks2TOHLC(tf, data)
time, ohlc = tohlc2ohlc(tohlc)
price_range = priceRange(ohlc)
title = market + ' (' + tf.symbol + ') ' + str(year) + '-' + str(month) + '-' + str(day) + ' (' + weekday(year, month, day)[:3]+ ') Price Range: ' + str(price_range[0] - price_range[1])
drawGraph(title, tf, time, ohlc)
def showByDay(market, year, month, day, timeframe, display_time_range, tohlc_dic):
title = market + ' ' + str(year) + '-' + str(month) + '-' + str(day) + ' (' + weekday(year, month, day)[:3] + ') '
count, dic = timeFilter(tohlc_dic, year, month, day, [[8, 0], [7, 0]])
if count > 100:
drawGraph(market, title, timeframe, dic, display_time_range)
def importClickSec(dir_path, market, year, month):
ym = str(year) + str(month).zfill(2)
dir_path = dir_path + '/' + market + '/' + market + '_' + ym + '/' + ym + '/'
tohlc = []
for day in range(1, 32):
file_name = market + '_' + ym + str(day).zfill(2) + '.csv'
path = os.path.join(dir_path, file_name)
try:
df0 = pd.read_csv(path, encoding='sjis')
df = df0[['日時', '始値(BID)', '高値(BID)', '安値(BID)', '終値(BID)']]
values= df.values.tolist()
except:
continue
for value in values:
[tint, o, h, l, c] = value
t = str(tint)
time = datetime(int(t[0:4]), int(t[4:6]), int(t[6:8]), int(t[8:10]), int(t[10:12]))
tohlc.append([time, float(value[1]), float(value[2]), float(value[3]), float(value[4])])
return tohlc
def dayRange(year, month):
if month == 12:
tend = datetime(year + 1, 1, 1)
else:
tend = datetime(year, month + 1, 1)
days = []
for day in range(1, 32):
try:
t = datetime(year, month, day)
except:
break
if t < tend:
days.append(day)
else:
break
return days
def show(market, timeframe, year, month):
display_time_range = [[8, 0], [7, 0]]
tohlc = importClickSec("../click_sec_data", market, year, month)
tohlc_dic = separate(tohlc, timeframe)
for day in dayRange(year, month):
showByDay(market, year, month, day, timeframe, display_time_range, tohlc_dic)
def filterUpper(array, threshold):
out = []
for v in array:
if v > threshold:
out.append(v)
return out
def filterLower(array, threshold):
out = []
for v in array:
if v < threshold:
out.append(v)
return out
def rangeHistogram(market, timeframe):
for year in [2019, 2020, 2021]:
ranges = []
for month in range(1, 13):
tohlc = importClickSec("../click_sec_data", market, year, month)
tohlc_dic = separate(tohlc, timeframe)
for (o, c) in zip(tohlc_dic[OPEN], tohlc_dic[CLOSE]):
ranges.append((c - o) / o * 100)
higher = filterUpper(ranges, 0.3)
lower = filterLower(ranges, -0.3)
vmin = np.min(ranges)
vmax = np.max(ranges)
fig, axes= plt.subplots(1,2)
axes[1].hist(higher, bins=10)
#axes[1].set_title(market + "-" + timeframe.symbol + " " + str(year) + " Min: " + str(vmin) + " Max: " + str(vmax))
axes[0].hist(lower, bins=10)
axes[0].set_title(market + "-" + timeframe.symbol + " " + str(year) + " Min: " + str(vmin)[:7] + " Max: " + str(vmax)[:7])
fig.show()
def judge(rng, threshold):
lower = np.min(threshold)
upper = np.max(threshold)
if rng > lower and rng < upper:
return True
else:
return False
def longBarLowerStrategy(market, timeframe, threshold, after_minutes):
out = []
for year in [2019, 2020, 2021]:
longBars = []
tend = None
values = []
for month in range(1, 13):
tohlc = importClickSec("../click_sec_data", market, year, month)
tohlc_dic = separate(tohlc, timeframe)
for (t, o, c) in zip(tohlc_dic[TIME], tohlc_dic[OPEN], tohlc_dic[CLOSE]):
rng = (c - o) / o * 100
if tend is None:
if judge(rng, threshold):
tend = t + timedelta(minutes= after_minutes)
values.append([t, o, c])
else:
if t > tend:
longBars.append(values)
tend = None
values = []
else:
values.append([t, o, c])
print('*** Year ', year)
for longBar in longBars:
closes = []
begin = None
for i, (t, o, c) in enumerate(longBar):
if i == 0:
begin = [t, c]
print('Begin: t: ', t, 'Range: ', (c - o) / o * 100, 'Close:', c)
else:
closes.append(c)
if i == len(longBar)-1:
end = [t, c]
print(' -> End: t: ', t, 'close: ', c, 'Profit:', c - begin[1])
if len(closes) > 0:
minv = np.min(closes)
maxv = np.max(closes)
is_short = threshold[0] < 0
if is_short:
profit = minv - begin[1]
else:
profit = maxv - begin[1]
print (' -> Min: ', minv, maxv, 'profit: ', profit)
out.append([year, begin[0], begin[1], end[0], end[1], end[1] - begin[1], minv, maxv, profit])
print ('')
data = []
s = 0.0
for d in out:
s += d[-1]
dd = d
dd.append(s)
data.append(dd)
df = pd.DataFrame(data=data, columns=['Year', 'tbegin', 'close', 'tend', 'close', 'profit', 'close-min', 'close-max', 'profit', 'profit-sum'])
#df.to_excel(market + 'LongBarStrategy.xlsx', index=False)
return s
def test1():
market = "JP225"
timeframe = Timeframe("M10")
year = 2021
month = 10
show(market, timeframe, year, month)
def analyze():
market = "SPOT_GOLD" #"CHNA50" #"US30" #WTI" #SPOT_GOLD" #"JP225"
tf = "M5"
timeframe = Timeframe(tf)
#rangeHistogram(market, timeframe)
r1 = [[0.3, 0.6], [0.4, 0.6], [0.5, 0.7], [-0.3, -0.6], [-0.4, -0.6], [-0.5, -0.7]]
r2 = [[0.5, 1.0], [1.0, 2.0], [3.0, 5.0], [-0.5, -1.0], [-1.0, -2.0], [-3.0, -5.0]]
out = []
for threshold in r1:
for delay in [15, 30, 60, 90, 120]:
profit = longBarLowerStrategy(market, timeframe, threshold, delay)
out.append([market, tf, threshold, delay, profit])
df = pd.DataFrame(data= out, columns=['Market', 'Timeframe', 'threshold', 'delay', 'profit'])
df.to_excel('./docs/' + market + '-LongBarStragegySummary.xlsx', index=False)
def trade():
market = "SPOT_GOLD" #"JP225" #"CHNA50" #"US30" #WTI" #SPOT_GOLD" #"JP225"
tf = "M15"
timeframe = Timeframe(tf)
data_time_range = [[8, 0], [7, 0]]
params = [ {THRESHOLD: [0.25, 0.5], DELAY: 60, LOSSCUT: 0.5},
{THRESHOLD: [-0.25, -0.5], DELAY: 60, LOSSCUT: 0.5}]
out = []
for year in [2019]: #, 2020, 2021]:
for month in range(1, 13):
tohlc = importClickSec("../click_sec_data", market, year, month)
tohlc_dic = separate(tohlc, timeframe)
for day in dayRange(year, month):
date_str = str(year) + '-' + str(month) + '-' + str(day)
count, dic = timeFilter(tohlc_dic, year, month, day, data_time_range)
if count > 50:
sim = TradeSimulation.Simulation(dic, timeframe, data_time_range)
trades = []
for param in params:
profit, trade = sim.runLongBar(param)
if len(trade) > 0:
out += trade
trades += trade
title = market + " " + date_str
drawGraph(market, title, timeframe, dic, data_time_range, trades)
#df = pd.DataFrame(data=out, columns=['Status', 'OpenTime', 'OpenPrice', 'CloseTime', 'ClosePrice', 'Profit1', 'MaxTime', 'MaxPrice', 'profit2'])
#df.to_excel('./docs/' + market + '-tradeSummary.xlsx', index=False)
if __name__ == '__main__':
trade()
| 31.926882
| 201
| 0.505725
|
2fa2e1721327d71cd3cc1aefb5183d97a913170a
| 1,986
|
py
|
Python
|
backend/api/admin/resource/claritylist.py
|
blodstone/harness
|
048a15d5f971b5b87cf6e80db98c7f9dd7a2cdbc
|
[
"MIT"
] | null | null | null |
backend/api/admin/resource/claritylist.py
|
blodstone/harness
|
048a15d5f971b5b87cf6e80db98c7f9dd7a2cdbc
|
[
"MIT"
] | null | null | null |
backend/api/admin/resource/claritylist.py
|
blodstone/harness
|
048a15d5f971b5b87cf6e80db98c7f9dd7a2cdbc
|
[
"MIT"
] | null | null | null |
import urllib.parse
from flask import request
from flask_restful import Resource, abort
from backend.model.project import ClarityProject, ClarityProjectSchema
from backend.model.project_status import ProjectStatus
from backend.model import ma
class ProgressObject(object):
def __init__(self, total, current):
self.total = total
self.current = current
class ProgressSchema(ma.Schema):
class Meta:
fields = ('total', 'current')
class ClarityUIObject(object):
def __init__(self, no, project, link, progress):
self.no = no
self.project = project
self.link = link
self.progress = progress
class ClarityUISchema(ma.Schema):
class Meta:
# Fields to expose
fields = ('no', 'project', 'link', 'progress')
project = ma.Nested(ClarityProjectSchema)
progress = ma.Nested(ProgressSchema)
class ClarityListResource(Resource):
def __get_project_progress(self, project):
total = ProjectStatus.query.filter_by(clarity_proj_id=project.id).count()
current = ProjectStatus.query.filter_by(
clarity_proj_id=project.id, is_finished=True).count()
return ProgressObject(total=total, current=current)
def get(self):
projects = ClarityProject.query.all()
if len(projects) == 0:
return abort(404, message=f"No clarity projects.")
else:
schema = ClarityUISchema(many=True)
clarity_ui_objs = []
for no, project in enumerate(projects):
progress = self.__get_project_progress(project)
link = urllib.parse.urljoin(
request.host_url,
f"#/Clarity/{project.id}"
)
clarity_ui_obj = ClarityUIObject(
no=no+1, project=project, link=link, progress=progress)
clarity_ui_objs.append(clarity_ui_obj)
return schema.dump(clarity_ui_objs)
| 32.557377
| 81
| 0.638469
|
9a8a72241d1d1a945bb43b6e7167fa7a16148f29
| 17,540
|
py
|
Python
|
psat_server_web/atlas/atlas/dbviews.py
|
genghisken/psat-server-web
|
63c697f1d08dc2173328d3018aadf8efc1e8e14f
|
[
"MIT"
] | null | null | null |
psat_server_web/atlas/atlas/dbviews.py
|
genghisken/psat-server-web
|
63c697f1d08dc2173328d3018aadf8efc1e8e14f
|
[
"MIT"
] | 11
|
2021-03-11T17:28:29.000Z
|
2022-01-05T11:35:14.000Z
|
psat_server_web/atlas/atlas/dbviews.py
|
genghisken/psat-server-web
|
63c697f1d08dc2173328d3018aadf8efc1e8e14f
|
[
"MIT"
] | null | null | null |
from django.db import models
#from atlas.utils import *
from math import log10
from atlas.models import TcsCrossMatchesExternal, TcsDetectionLists, TcsImages
from gkutils.commonutils import ra_to_sex, dec_to_sex, getFlagDefs, getDateFractionMJD, FLAGS, transform, J2000toGalactic
class CustomLCPoints(models.Model):
"""CustomLCPoints.
"""
id = models.IntegerField(primary_key=True, db_column='id')
mag = models.FloatField(db_column='mag')
magerr = models.FloatField(db_column='magerr')
mjd = models.FloatField(db_column='mjd')
exptime = models.FloatField(db_column='exptime')
filter = models.CharField(max_length=90, db_column='filter')
zp = models.FloatField(db_column='zp')
expname = models.CharField(max_length=90, db_column='expname')
ra = models.FloatField(db_column='ra')
dec = models.FloatField(db_column='dec')
atlas_metadata_id = models.IntegerField(db_column='atlas_metadata_id')
class CustomLCBlanks(models.Model):
"""CustomLCBlanks.
"""
id = models.IntegerField(primary_key=True, db_column='id')
mjd = models.FloatField(db_column='mjd')
exptime = models.FloatField(db_column='exptime')
filter = models.CharField(max_length=90, db_column='filter')
zp = models.FloatField(db_column='zp')
expname = models.CharField(max_length=90, db_column='expname')
filename = models.CharField(max_length=765, db_column='filename')
input = models.CharField(max_length=765, db_column='input')
reference = models.CharField(max_length=765, db_column='reference')
pointing = models.CharField(max_length=765, db_column='pointing')
class CustomFollowupLCData(models.Model):
"""CustomFollowupLCData.
"""
id = models.IntegerField(primary_key=True, db_column='id')
transient_object_id = models.IntegerField(db_column='transient_object_id')
mjd = models.FloatField(db_column='mjd')
mag = models.FloatField(db_column='mag')
magerr = models.FloatField(db_column='magerr')
filter = models.CharField(max_length=90, db_column='filter')
telescope_name = models.CharField(max_length=90, db_column='telescope_name')
telescope_description = models.CharField(max_length=180, db_column='telescope_description')
instrument_name = models.CharField(max_length=90, db_column='instrument_name')
instrument_description = models.CharField(max_length=180, db_column='instrument_description')
class FollowupRaw(models.Model):
"""FollowupRaw.
"""
rank = models.IntegerField(db_column='rank')
id = models.BigIntegerField(primary_key=True, db_column='id')
atlas_designation = models.CharField(max_length=60, db_column='atlas_designation')
other_designation = models.CharField(max_length=60, db_column='other_designation')
ra = models.FloatField(db_column='ra')
dec = models.FloatField(db_column='dec')
object_classification = models.IntegerField(db_column='object_classification')
followup_flag_date = models.DateField(db_column='followup_flag_date')
observation_status = models.CharField(max_length=40, db_column='observation_status')
current_trend = models.CharField(max_length=40, db_column='current_trend')
earliest_mjd = models.FloatField(db_column='earliest_mjd')
earliest_mag = models.FloatField(db_column='earliest_mag')
earliest_filter = models.CharField(max_length=80, db_column='earliest_filter')
latest_mjd = models.FloatField(db_column='latest_mjd')
latest_mag = models.FloatField(db_column='latest_mag')
latest_filter = models.CharField(max_length=80, db_column='latest_filter')
catalogue = models.CharField(max_length=60, db_column='catalogue')
catalogue_object_id = models.CharField(max_length=30, db_column='catalogue_object_id')
separation = models.FloatField(db_column='separation')
realbogus_factor = models.FloatField(db_column='realbogus_factor')
rb_pix = models.FloatField(db_column='zooniverse_score')
date_modified = models.FloatField(db_column='date_modified')
external_crossmatches = models.CharField(max_length=1500, db_column='external_crossmatches')
discovery_target = models.CharField(max_length=90, db_column='discovery_target')
@property
def ra_sex(self):
"""ra_sex.
"""
ra_in_sex = ra_to_sex (self.ra)
return ra_in_sex
@property
def dec_sex(self):
"""dec_sex.
"""
dec_in_sex = dec_to_sex (self.dec)
return dec_in_sex
@property
def decode_flag_bits(self):
"""decode_flag_bits.
"""
object_definition = getFlagDefs(self.object_classification, FLAGS, delimiter = ' ')
return object_definition
@property
def externalXMs(self):
"""This is a Hack to get all the external crossmatches per row. Note that
it only gets executed 100 times (for each page) so it is not disastrous
for database performance.
"""
xms = TcsCrossMatchesExternal.objects.filter(transient_object_id__id=self.id).order_by('external_designation')
#names = xms.values_list("external_designation", flat=True)
#nameColumn = ", ".join(names)
return xms
class WebViewAbstractFollowup(models.Model):
"""WebViewAbstractFollowup.
"""
rank = models.IntegerField(db_column='rank')
id = models.BigIntegerField(primary_key=True, db_column='id')
atlas_designation = models.CharField(max_length=60, db_column='atlas_designation')
other_designation = models.CharField(max_length=60, db_column='other_designation')
ra = models.FloatField(db_column='ra')
dec = models.FloatField(db_column='dec')
ra_avg = models.FloatField(db_column='ra_avg')
dec_avg = models.FloatField(db_column='dec_avg')
object_classification = models.IntegerField(db_column='object_classification')
sherlockClassification = models.CharField(max_length=120, db_column='sherlockClassification')
followup_flag_date = models.DateField(db_column='followup_flag_date')
observation_status = models.CharField(max_length=40, db_column='observation_status')
current_trend = models.CharField(max_length=40, db_column='current_trend')
earliest_mjd = models.FloatField(db_column='earliest_mjd')
earliest_mag = models.FloatField(db_column='earliest_mag')
earliest_filter = models.CharField(max_length=80, db_column='earliest_filter')
latest_mjd = models.FloatField(db_column='latest_mjd')
latest_mag = models.FloatField(db_column='latest_mag')
latest_filter = models.CharField(max_length=80, db_column='latest_filter')
catalogue = models.CharField(max_length=60, db_column='catalogue')
catalogue_object_id = models.CharField(max_length=30, db_column='catalogue_object_id')
separation = models.FloatField(db_column='separation')
realbogus_factor = models.FloatField(db_column='realbogus_factor')
rb_pix = models.FloatField(db_column='zooniverse_score')
date_modified = models.FloatField(db_column='date_modified')
external_crossmatches = models.CharField(max_length=1500, db_column='external_crossmatches')
discovery_target = models.CharField(max_length=90, db_column='discovery_target')
rms = models.FloatField(db_column='rms')
detection_list_id = models.ForeignKey(TcsDetectionLists, null=True, to_field='id', db_column='detection_list_id', on_delete=models.CASCADE)
images_id = models.ForeignKey(TcsImages, to_field='id', db_column='images_id', on_delete=models.CASCADE)
class Meta:
"""Meta.
"""
abstract = True
@property
def ra_sex(self):
"""ra_sex.
"""
if self.ra_avg:
ra_in_sex = ra_to_sex (self.ra_avg)
else:
ra_in_sex = ra_to_sex (self.ra)
return ra_in_sex
@property
def dec_sex(self):
"""dec_sex.
"""
if self.dec_avg:
dec_in_sex = dec_to_sex (self.dec_avg)
else:
dec_in_sex = dec_to_sex (self.dec)
return dec_in_sex
@property
def decode_flag_bits(self):
"""decode_flag_bits.
"""
object_definition = getFlagDefs(self.object_classification, FLAGS, delimiter = ' ')
return object_definition
@property
def externalXMs(self):
"""This is a Hack to get all the external crossmatches per row. Note that
it only gets executed 100 times (for each page) so it is not disastrous
for database performance.
"""
xms = TcsCrossMatchesExternal.objects.filter(transient_object_id__id=self.id).order_by('external_designation')
#names = xms.values_list("external_designation", flat=True)
#nameColumn = ", ".join(names)
#sys.stderr.write('\nOBJECT (%s) = %s\n' % (self.ID, nameColumn))
return xms
@property
def galactic(self):
"""galactic.
"""
if self.ra_avg and self.dec_avg:
g = transform([self.ra_avg, self.dec_avg], J2000toGalactic)
else:
g = transform([self.ra, self.dec], J2000toGalactic)
return g
class WebViewFollowupTransients(WebViewAbstractFollowup):
"""WebViewFollowupTransients.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followupall'
class WebViewFollowupTransients0(WebViewAbstractFollowup):
"""WebViewFollowupTransients0.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup0'
class WebViewFollowupTransients1(WebViewAbstractFollowup):
"""WebViewFollowupTransients1.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup1'
class WebViewFollowupTransients2(WebViewAbstractFollowup):
"""WebViewFollowupTransients2.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup2'
class WebViewFollowupTransients3(WebViewAbstractFollowup):
"""WebViewFollowupTransients3.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup3'
class WebViewFollowupTransients4(WebViewAbstractFollowup):
"""WebViewFollowupTransients4.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup4'
class WebViewFollowupTransients5(WebViewAbstractFollowup):
"""WebViewFollowupTransients5.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup5'
class WebViewFollowupTransients6(WebViewAbstractFollowup):
"""WebViewFollowupTransients6.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup6'
class WebViewFollowupTransients7(WebViewAbstractFollowup):
"""WebViewFollowupTransients7.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup7'
class WebViewFollowupTransients8(WebViewAbstractFollowup):
"""WebViewFollowupTransients8.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup8'
class WebViewFollowupTransientsGeneric(WebViewAbstractFollowup):
"""WebViewFollowupTransientsGeneric.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup'
class WebViewFollowupTransientsGenericGW(WebViewAbstractFollowup):
"""WebViewFollowupTransientsGenericGW.
"""
class Meta(WebViewAbstractFollowup.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup_gw'
# 2011-04-14 KWS New model for User Defined Lists.
# 2013-10-23 KWS Added confidence_factor.
# 2014-02-20 KWS Added external_crossmatches and discovery_target.
class WebViewAbstractUserDefined(models.Model):
"""WebViewAbstractUserDefined.
"""
rank = models.IntegerField(db_column='rank')
id = models.BigIntegerField(db_column='id', primary_key=True)
atlas_designation = models.CharField(max_length=60, db_column='atlas_designation')
other_designation = models.CharField(max_length=60, db_column='other_designation')
local_comments = models.CharField(max_length=768, db_column='local_comments')
ra = models.FloatField(db_column='ra')
dec = models.FloatField(db_column='dec')
ra_avg = models.FloatField(db_column='ra_avg')
dec_avg = models.FloatField(db_column='dec_avg')
object_classification = models.IntegerField(db_column='object_classification')
sherlockClassification = models.CharField(max_length=120, db_column='sherlockClassification')
followup_flag_date = models.DateField(db_column='followup_flag_date')
observation_status = models.CharField(max_length=40, db_column='observation_status')
current_trend = models.CharField(max_length=40, db_column='current_trend')
earliest_mjd = models.FloatField(db_column='earliest_mjd')
earliest_mag = models.FloatField(db_column='earliest_mag')
earliest_filter = models.CharField(max_length=80, db_column='earliest_filter')
latest_mjd = models.FloatField(db_column='latest_mjd')
latest_mag = models.FloatField(db_column='latest_mag')
latest_filter = models.CharField(max_length=80, db_column='latest_filter')
catalogue = models.CharField(max_length=60, db_column='catalogue')
catalogue_object_id = models.CharField(max_length=30, db_column='catalogue_object_id')
separation = models.FloatField(db_column='separation')
# Extra columns for the user defined object list table
object_group_id = models.IntegerField(db_column='object_group_id')
detection_list_id = models.ForeignKey(TcsDetectionLists, null=True, to_field='id', db_column='detection_list_id', on_delete=models.CASCADE)
realbogus_factor = models.FloatField(db_column='realbogus_factor')
rb_pix = models.FloatField(db_column='zooniverse_score')
date_modified = models.DateTimeField(db_column='date_modified')
external_crossmatches = models.CharField(max_length=1500, db_column='external_crossmatches')
discovery_target = models.CharField(max_length=90, db_column='discovery_target')
rms = models.FloatField(db_column='rms')
class Meta:
"""Meta.
"""
abstract = True
@property
def ra_sex(self):
"""ra_sex.
"""
ra_in_sex = ra_to_sex (self.ra)
return ra_in_sex
@property
def dec_sex(self):
"""dec_sex.
"""
dec_in_sex = dec_to_sex (self.dec)
return dec_in_sex
# 2013-12-20 KWS Added option to grab space-delimited RA and DEC (e.g. for producing catalogues)
@property
def ra_sex_spaces(self):
"""ra_sex_spaces.
"""
ra_in_sex = ra_to_sex (self.ra, delimiter=' ')
return ra_in_sex
@property
def dec_sex_spaces(self):
"""dec_sex_spaces.
"""
dec_in_sex = dec_to_sex (self.dec, delimiter=' ')
return dec_in_sex
@property
def decode_flag_bits(self):
"""decode_flag_bits.
"""
object_definition = getFlagDefs(self.object_classification, FLAGS, delimiter = ' ')
return object_definition
@property
def externalXMs(self):
"""This is a Hack to get all the external crossmatches per row. Note that
it only gets executed 100 times (for each page) so it is not disastrous
for database performance.
"""
xms = TcsCrossMatchesExternal.objects.filter(transient_object_id__id=self.id).order_by('external_designation')
#names = xms.values_list("external_designation", flat=True)
#nameColumn = ", ".join(names)
#sys.stderr.write('\nOBJECT (%s) = %s\n' % (self.ID, nameColumn))
return xms
# 2015-03-13 KWS New methods to retrieve the earliest and latest dates
# in date format.
@property
def getEarliestDate(self):
"""getEarliestDate.
"""
dateFraction = getDateFractionMJD(self.earliest_mjd)
return dateFraction
@property
def getLatestDate(self):
"""getLatestDate.
"""
dateFraction = getDateFractionMJD(self.latest_mjd)
return dateFraction
class WebViewUserDefined(WebViewAbstractUserDefined):
"""WebViewUserDefined.
"""
class Meta(WebViewAbstractUserDefined.Meta):
"""Meta.
"""
db_table = 'atlas_v_followup_userdefined'
# 2018-08-01 KWS Added custom view for getting ATLAS recurrences for PESSTO.
# This is much faster than the ORM query and doesn't take up
# all the memory!
class AtlasVRecurrencesddcPessto(models.Model):
"""AtlasVRecurrencesddcPessto.
"""
rank = models.IntegerField(db_column='rank')
id = models.BigIntegerField(db_column='id', primary_key=True)
name = models.CharField(max_length=90, db_column='name')
tns_name = models.CharField(max_length=90, db_column='tns_name')
ra = models.FloatField(db_column='ra')
dec = models.FloatField(db_column='dec')
expname = models.CharField(max_length=90, db_column='expname')
mag = models.FloatField(db_column='mag')
dm = models.FloatField(db_column='dm')
filter = models.CharField(max_length=90, db_column='filter')
mjd = models.FloatField(db_column='mjd')
class Meta:
"""Meta.
"""
managed = False
db_table = 'atlas_v_recurrencesddc_pessto'
| 35.869121
| 143
| 0.701767
|
f0c8b35da052c54719297dabd73fad3e8d3007b7
| 1,902
|
py
|
Python
|
tests/materials_test.py
|
hammy4815/EMpy
|
64f2c356fbfb783277f69c2a69e020272b91df5d
|
[
"MIT"
] | null | null | null |
tests/materials_test.py
|
hammy4815/EMpy
|
64f2c356fbfb783277f69c2a69e020272b91df5d
|
[
"MIT"
] | null | null | null |
tests/materials_test.py
|
hammy4815/EMpy
|
64f2c356fbfb783277f69c2a69e020272b91df5d
|
[
"MIT"
] | null | null | null |
# pylint: disable=no-self-use
from unittest import TestCase
from numpy import array
from numpy.testing import assert_almost_equal, assert_raises
import EMpy_gpu.materials as mat
class RefractiveIndexTest(TestCase):
def test_all_nones(self):
with assert_raises(ValueError):
mat.RefractiveIndex()
def test_const(self):
test_rix = 1.50
a = mat.RefractiveIndex(n0_const=test_rix)
self.assertEqual(a.get_rix(1.0)[0], array([test_rix]))
def test_poly(self):
test_poly = [1, 1] # n(wl) = 1 * wl + 1
test_rix = 2.0 # n(1) = 1 * 1 + 1 = 2
a = mat.RefractiveIndex(n0_poly=test_poly)
assert_almost_equal(a.get_rix(1.0)[0], array([test_rix]))
def test_smcoeffs(self):
test_poly = [1] * 6
''' 6-coeffs:
n(wls) = 1. +
B1 * wls ** 2 / (wls ** 2 - C1) +
B2 * wls ** 2 / (wls ** 2 - C2) +
B3 * wls ** 2 / (wls ** 2 - C3)
'''
test_rix = 1.0536712127723509e-08
a = mat.RefractiveIndex(n0_smcoeffs=test_poly)
assert_almost_equal(a.get_rix(0.5)[0], array([test_rix]))
def test_func(self):
test_rix = 1.50
def test_func_const(x):
# returns a const
return 0.0 * x + test_rix
a = mat.RefractiveIndex(n0_func=test_func_const)
assert_almost_equal(a.get_rix([1.0, 1.5]), array([1.5, 1.5]))
def test_func_var(x):
# returns a const
return 1.0 * x + test_rix
b = mat.RefractiveIndex(n0_func=test_func_var)
assert_almost_equal(b.get_rix([1.0, 1.5]), array([2.5, 3.0]))
def test_known(self):
test_rix = 1.50
test_wl = 1.0
n0_known = {
test_wl: test_rix
}
a = mat.RefractiveIndex(n0_known=n0_known)
self.assertEqual(a.get_rix(test_wl)[0], array([test_rix]))
| 29.71875
| 69
| 0.572555
|
cbe82cebba1975db85cc22bf6e450519f57c004b
| 3,914
|
py
|
Python
|
message_families/audit_proof/aca_py_audit_proof/manager.py
|
ianco/aca-py-audit-plugin
|
2280e79bca0cc785865b08b8e8cbcb5829d57e8e
|
[
"Apache-2.0"
] | null | null | null |
message_families/audit_proof/aca_py_audit_proof/manager.py
|
ianco/aca-py-audit-plugin
|
2280e79bca0cc785865b08b8e8cbcb5829d57e8e
|
[
"Apache-2.0"
] | null | null | null |
message_families/audit_proof/aca_py_audit_proof/manager.py
|
ianco/aca-py-audit-plugin
|
2280e79bca0cc785865b08b8e8cbcb5829d57e8e
|
[
"Apache-2.0"
] | null | null | null |
"""Classes to support proof audit."""
import logging
from aries_cloudagent.core.error import BaseError
from aries_cloudagent.revocation.models.revocation_registry import RevocationRegistry
from aries_cloudagent.core.error import BaseError
from aries_cloudagent.core.profile import Profile
from aries_cloudagent.ledger.base import BaseLedger
from aries_cloudagent.indy.verifier import IndyVerifier
class AuditProofManagerError(BaseError):
"""Audit Proof error."""
class AuditProofManager:
"""Class for providing proof audits."""
def __init__(self, profile: Profile):
"""
Initialize an AuditProofManager.
Args:
profile: The profile for this proof audit
"""
self._profile = profile
self._logger = logging.getLogger(__name__)
@property
def profile(self) -> Profile:
"""
Accessor for the current injection profile.
Returns:
The injection profile for this connection
"""
return self._profile
async def verify_presentation(
self, presentation_request: dict, presentation: dict
):
"""
Verify a presentation.
Args:
presentation_request: indy presentation request
presentation: indy presentation to verify
Returns:
verification status
"""
indy_proof_request = presentation_request
indy_proof = presentation
schema_ids = []
credential_definition_ids = []
schemas = {}
credential_definitions = {}
rev_reg_defs = {}
rev_reg_entries = {}
identifiers = indy_proof["identifiers"]
ledger = self._profile.inject(BaseLedger)
async with ledger:
for identifier in identifiers:
schema_ids.append(identifier["schema_id"])
credential_definition_ids.append(identifier["cred_def_id"])
# Build schemas for anoncreds
if identifier["schema_id"] not in schemas:
schemas[identifier["schema_id"]] = await ledger.get_schema(
identifier["schema_id"]
)
if identifier["cred_def_id"] not in credential_definitions:
credential_definitions[
identifier["cred_def_id"]
] = await ledger.get_credential_definition(
identifier["cred_def_id"]
)
if identifier.get("rev_reg_id"):
if identifier["rev_reg_id"] not in rev_reg_defs:
rev_reg_defs[
identifier["rev_reg_id"]
] = await ledger.get_revoc_reg_def(identifier["rev_reg_id"])
if identifier.get("timestamp"):
rev_reg_entries.setdefault(identifier["rev_reg_id"], {})
if (
identifier["timestamp"]
not in rev_reg_entries[identifier["rev_reg_id"]]
):
(
found_rev_reg_entry,
_found_timestamp,
) = await ledger.get_revoc_reg_entry(
identifier["rev_reg_id"], identifier["timestamp"]
)
rev_reg_entries[identifier["rev_reg_id"]][
identifier["timestamp"]
] = found_rev_reg_entry
verifier = self._profile.inject(IndyVerifier)
verified = await verifier.verify_presentation(
indy_proof_request,
indy_proof,
schemas,
credential_definitions,
rev_reg_defs,
rev_reg_entries,
)
return verified
| 32.616667
| 85
| 0.553654
|
9ec37c0a2d5ef0fddf647b6b8005092800c7fc4a
| 1,279
|
py
|
Python
|
cifar10_test.py
|
z-a-f/PyTorch_CIFAR10
|
3233d8f86c546de438d0d117ff6698c85a0ca841
|
[
"MIT"
] | 1
|
2020-07-12T23:18:09.000Z
|
2020-07-12T23:18:09.000Z
|
cifar10_test.py
|
z-a-f/PyTorch_CIFAR10
|
3233d8f86c546de438d0d117ff6698c85a0ca841
|
[
"MIT"
] | null | null | null |
cifar10_test.py
|
z-a-f/PyTorch_CIFAR10
|
3233d8f86c546de438d0d117ff6698c85a0ca841
|
[
"MIT"
] | null | null | null |
import os, shutil
import torch
from argparse import ArgumentParser
from pytorch_lightning import Trainer
from cifar10_module import CIFAR10_Module
def main(hparams):
# If only train on 1 GPU. Must set_device otherwise PyTorch always store model on GPU 0 first
if type(hparams.gpus) == str:
if len(hparams.gpus) == 2: # GPU number and comma e.g. '0,' or '1,'
torch.cuda.set_device(int(hparams.gpus[0]))
save_to_path = os.path.join(kThisPath, 'test_temp')
os.makedirs(save_to_path, exist_ok=True)
model = CIFAR10_Module(hparams, pretrained=True)
trainer = Trainer(gpus=hparams.gpus, default_save_path=save_to_path)
trainer.test(model)
shutil.rmtree(save_to_path)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--classifier', type=str, default='resnet18')
parser.add_argument('--data_dir', type=str, default='/data/huy/cifar10/')
parser.add_argument('--gpus', default='0,')
parser.add_argument('--max_epochs', type=int, default=100)
parser.add_argument('--batch_size', type=int, default=256)
parser.add_argument('--learning_rate', type=float, default=1e-2)
parser.add_argument('--weight_decay', type=float, default=1e-2)
args = parser.parse_args()
main(args)
| 39.96875
| 97
| 0.713057
|
43d64d215f00c56a0967f1ff60395d9b841a365f
| 3,920
|
py
|
Python
|
pyiArduinoI2Cexpander/examples/FindDevices.py
|
tremaru/pyiArduinoI2Cexpander
|
01154070bb1696346897113930379b52680b5669
|
[
"MIT"
] | null | null | null |
pyiArduinoI2Cexpander/examples/FindDevices.py
|
tremaru/pyiArduinoI2Cexpander
|
01154070bb1696346897113930379b52680b5669
|
[
"MIT"
] | 1
|
2021-09-16T14:05:26.000Z
|
2021-09-16T14:05:26.000Z
|
pyiArduinoI2Cexpander/examples/FindDevices.py
|
tremaru/pyiArduinoI2Cexpander
|
01154070bb1696346897113930379b52680b5669
|
[
"MIT"
] | 1
|
2021-03-15T08:47:38.000Z
|
2021-03-15T08:47:38.000Z
|
# Данный пример ищет модули на шине и выводит информацию о них.
#
import smbus # Подключаем модуль шины smbus(i2c)
choices = { # Создаём список устройств
0x01: "кнопкой", # Если значение 1, значит это кнопка.
0x02: "светодиодом", # Если значение 2, значит это RGB светодиод.
0x03: "потенциометром", # Если значение 3, значит это потенциометр.
0x04: "звукоизлучателем", # Если значение 4, значит это звукоизлучатель.
0x05: "датчиком DHT", # Если значение 5, значит это датчик владности и температуры.
0x06: "датчиком света", # Если значение 6, значит это датчик света.
0x07: "расширителем выводов", # Если значение 7, значит это датчик света.
0x08: "LED матрицей", # Если значение 8, значит это светодиодная матрица.
0x09: "Энкодером", # Если значение 9, значит это энкодер.
0x0A: "реле на 2 канала", # Если значение A, значит это электромеханическое реле на 2 канала.
0x0B: "реле на 4 канала", # Если значение B, значит это твердотельное реле на 4 канала.
0x0C: "силовым ключём 4 N", # Если значение C, значит это силовой ключ на 4 N-канала.
0x0D: "силовым ключём 4 P", # Если значение D, значит это силовой ключ на 4 P-канала.
0x0E: "силовым ключём 4 N", # Если значение E, значит это силовой ключ на 4 N-канала, с измерением тока.
0x0F: "силовым ключём 4 P" # Если значение F, значит это силовой ключ на 4 P-канала, с измерением тока.
} #
try: #
bus = smbus.SMBus(1) # Пробуем открыть файл шины i2c
except FileNotFoundError: # Если файл не найден
print("Шина i2c не включена. Запустите" #
"`raspi-config` и включите шину.") # Выводим сообщение в stdout
except: # Если любая другая ошибка
print("неизвестная ошибка") # Выводим сообщение в stdout
else: # Если ошибок не найдено
for i in range(7, 127): # Проходим по всем доступным адресам на шине I2C ...
try: # Пробуем вывести адрес
bus.write_quick(i) # устройства на шину
except OSError: # Если устройства с текущим адресом не на шине
continue # Возвращаемся в начало цикла for со следующим адресом
except: # Если любая другая ошибка
print("неизвестная ошибка") # Выводим сообщение в stdout
else: # Если устройство найдено
print("Устройство с адресом"
"%#.2X" % i, end='') # Выводим адрес в stdout
k = bus.read_i2c_block_data(i, 0x04) # Читаем регистры, начиная с 4
if k[2]>>1 == i and (k[3]==0xc3
or k[3]==0x3c): # Если значение второго элемента массива k совпадает с адресом устройства, а в третьем элементе хранится идентификатор 0xC3 (модуль Metro), или 3С (модуль Flash), то ...
print(" является ", end='') # Выводим текст в stdout
model = choices.get(k[0],
"неизвестным модулем") # Cравниваем модель модуля со списком, если ничего не совпало - записываем в model строку по умолчанию
print("%s с версией прошивки %d."
% (model, k[1])) # Выводим модель и версию в stdout
else: # Выводим текст, если устройство не опознано
print(" не опознано") # или на шине два устроойства с одним адресом
| 80
| 213
| 0.539286
|
880310388e6c829f07bf86dfcb9346436ea5303f
| 602
|
py
|
Python
|
Sketchbots/sw/labqueue/lask/server/xmp.py
|
rlugojr/ChromeWebLab
|
60f964b3f283c15704b7a04b7bb50cb15791e2e4
|
[
"Apache-2.0"
] | 306
|
2015-01-09T14:03:44.000Z
|
2017-09-16T13:03:35.000Z
|
Sketchbots/sw/labqueue/lask/server/xmp.py
|
rlugojr/ChromeWebLab
|
60f964b3f283c15704b7a04b7bb50cb15791e2e4
|
[
"Apache-2.0"
] | 90
|
2019-03-26T05:36:00.000Z
|
2021-07-28T05:30:16.000Z
|
Sketchbots/sw/labqueue/lask/server/xmp.py
|
rlugojr/ChromeWebLab
|
60f964b3f283c15704b7a04b7bb50cb15791e2e4
|
[
"Apache-2.0"
] | 119
|
2015-01-26T15:04:33.000Z
|
2017-09-13T09:30:53.000Z
|
# Copyright 2013 Google Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| 43
| 77
| 0.727575
|
9c0e21d216903c0dc6ad9111c1cc62f2b7ef12f6
| 198
|
py
|
Python
|
ems_wheniwork/views/api/exceptions.py
|
uw-asa/django-ems-wheniwork
|
6a19f4a860fda369c68b8cb48518d61904d98683
|
[
"Apache-2.0"
] | null | null | null |
ems_wheniwork/views/api/exceptions.py
|
uw-asa/django-ems-wheniwork
|
6a19f4a860fda369c68b8cb48518d61904d98683
|
[
"Apache-2.0"
] | null | null | null |
ems_wheniwork/views/api/exceptions.py
|
uw-asa/django-ems-wheniwork
|
6a19f4a860fda369c68b8cb48518d61904d98683
|
[
"Apache-2.0"
] | null | null | null |
"""
Custom exceptions used by EMS Scheduler.
"""
class MissingParamException(Exception):
pass
class InvalidParamException(Exception):
pass
class NotFoundException(Exception):
pass
| 12.375
| 40
| 0.742424
|
9529d7ae50c90c42cc3229363fd381e4e4993d31
| 6,428
|
py
|
Python
|
cfg_exporter/helper.py
|
dong50252409/cfg_exporter
|
8e6fdfef00dbb228eb34ffdf4c1c23a74f1d7e18
|
[
"MIT"
] | 3
|
2021-12-10T10:26:15.000Z
|
2022-02-11T08:34:31.000Z
|
cfg_exporter/helper.py
|
dong50252409/cfg_exporter
|
8e6fdfef00dbb228eb34ffdf4c1c23a74f1d7e18
|
[
"MIT"
] | null | null | null |
cfg_exporter/helper.py
|
dong50252409/cfg_exporter
|
8e6fdfef00dbb228eb34ffdf4c1c23a74f1d7e18
|
[
"MIT"
] | 1
|
2022-02-11T09:16:00.000Z
|
2022-02-11T09:16:00.000Z
|
import os
from argparse import RawTextHelpFormatter, ArgumentTypeError, ArgumentParser
from cfg_exporter.const import ExportType, ExtensionType, TEMPLATE_EXTENSION
def valid_source(source):
if os.path.exists(source):
return source
else:
raise ArgumentTypeError(_('the source path does not exists `{source}`').format(source=source))
def valid_export(export):
if export in ExportType.__members__:
return ExportType[export]
else:
raise ArgumentTypeError(_('the export file type does not exits {export}').format(export=export))
def valid_table(row_num):
try:
row_num = int(row_num)
assert row_num > 0
return row_num
except (ValueError, AssertionError):
raise ArgumentTypeError(_('{row_num} is not a valid line number').format(row_num=row_num))
def valid_lang_template(lang_template):
if os.path.exists(lang_template):
return lang_template
else:
raise ArgumentTypeError(_('the lang template path does not exists `{lang_template}`')
.format(source=lang_template))
parser = ArgumentParser(description=_('Configuration table export toolset'), formatter_class=RawTextHelpFormatter)
base_group = parser.add_argument_group(title=_('Base options'))
base_group.add_argument('--clear_dir', default=False, action='store_true',
help=_('clear the output directory.'))
base_group.add_argument('--exclude_files', default=[], nargs="*",
help=_('specify a list of file names not to load.'))
base_group.add_argument('-e', '--export_type', type=valid_export,
metavar=f'[{",".join(ExportType.__members__.keys())}]',
help=_('specify the configuration table export type.'))
base_group.add_argument('--file_prefix', default='',
help=_('specify the prefix of the output filename.'))
base_group.add_argument('--force', default=False, action='store_true',
help=_('force all configuration tables to be generated.'))
base_group.add_argument('-o', '--output', type=str, default="",
help=_('specify the configuration table output path.'))
base_group.add_argument('-r', '--recursive', default=False, action='store_true',
help=_('recursively search the source path.'))
base_group.add_argument('--verification', default=False, action='store_true',
help=_('verify only the correctness of the configuration table.'))
base_group.add_argument('-s', '--source', type=valid_source, required=True,
help=_(
'specify the configuration table source path.\nsupported file types [{extensions}]').format(
extensions=",".join(ExtensionType.__members__.keys())))
base_group.add_argument('--template_path',
help=_('specify the extension template path.\n'
'the template name consists of the table name, export type, '
'and {template_extension} extension\n'
'e.g:\n'
'`item.erl.{template_extension}` `item.hrl.{template_extension}` '
'`item.lua.{template_extension}`\n'
'loads the template based on the specified export type\n'
'e.g:\n'
'`--export_type erl` templates ending with `.erl.{template_extension}` '
'and `.hrl.{template_extension}` will be loaded\n'
'`--export_type lua` templates ending with `.lua.{template_extension}` will be loaded'
).format(template_extension=TEMPLATE_EXTENSION))
base_group.add_argument('--verbose', default=False, action='store_true',
help=_('show the details.'))
table_group = parser.add_argument_group(title=_('Table options'))
table_group.add_argument('--data_row', type=valid_table, required=True,
help=_('specify the start line number of the configuration table body data.'))
table_group.add_argument('--desc_row', type=valid_table,
help=_('specify the line number of the configuration table column description.'))
table_group.add_argument('--field_row', type=valid_table, required=True,
help=_('specify the line number of the configuration table field name.'))
table_group.add_argument('--rule_row', type=valid_table,
help=_('specify the line number of the configuration table check rule.'))
table_group.add_argument('--type_row', type=valid_table, required=True,
help=_('specify the line number of the configuration table data type.'))
lang_group = parser.add_argument_group(title=_('Multi languages options'))
lang_group.add_argument('--lang_template', type=valid_lang_template,
help=_('specify the language template path.'))
lang_group.add_argument('--export_lang_template',
help=_('output language template.'))
csv_group = parser.add_argument_group(title=_('CSV options'))
csv_group.add_argument('--csv_encoding', default='utf-8-sig', metavar='ENCODING',
help=_('specify the default encoding format for CSV files.\nDEFAULT UTF-8'))
erl_group = parser.add_argument_group(title=_('Erlang options'))
erl_group.add_argument('--erl_dir', default='',
help=_('specify output directory for where to generate the .erl.'))
erl_group.add_argument('--hrl_dir', default='',
help=_('specify output directory for where to generate the .hrl.'))
lua_group = parser.add_argument_group(title=_('LUA options'))
lua_group.add_argument('--lua_optimize', default=False, action='store_true',
help=_('remove default value fields ( store them into metatable ) '
'and reuse all table values to save memory'))
py_group = parser.add_argument_group(title=_('PYTHON options'))
py_group.add_argument('--py_optimize', default=False, action='store_true',
help=_('remove default value fields and reuse all table values to save memory'))
args = parser.parse_args()
__all__ = ('args',)
| 45.588652
| 120
| 0.632078
|
12ca0ab0e9e224776635b85b9cccd063e505e9d0
| 10,281
|
py
|
Python
|
image_widget.py
|
zhangkaisong/YoloAll
|
7bb8c0ce11b6c033f0e6fd15621fdb5d5a2f1787
|
[
"Apache-2.0"
] | 1
|
2021-12-05T07:52:50.000Z
|
2021-12-05T07:52:50.000Z
|
image_widget.py
|
zhangkaisong/YoloAll
|
7bb8c0ce11b6c033f0e6fd15621fdb5d5a2f1787
|
[
"Apache-2.0"
] | null | null | null |
image_widget.py
|
zhangkaisong/YoloAll
|
7bb8c0ce11b6c033f0e6fd15621fdb5d5a2f1787
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
import os
if hasattr(sys, 'frozen'):
os.environ['PATH'] = sys._MEIPASS + ";" + os.environ['PATH']
from PyQt5.QtWidgets import *
from PyQt5 import QtCore, QtGui, uic
from PyQt5.QtCore import *
from PyQt5.QtGui import *
import copy
import xml.etree.cElementTree as et
import os
import cv2
import math
from PIL import Image
# ui配置文件
cUi, cBase = uic.loadUiType("image_widget.ui")
# 主界面
class ImageWidget(QWidget, cUi):
def __init__(self): #, main_widget=None):
# 设置UI
QMainWindow.__init__(self)
cUi.__init__(self)
self.setupUi(self)
self.comboBoxCamera.addItem('0')
self.comboBoxCamera.addItem('1')
self.comboBoxCamera.addItem('2')
self.timer = QTimer()
self.video_cap = None
self.camera_cap = None
self.qpixmap = None
self.qpixmap_bg = None
self.cAlg = None
self.infer = None
self.class_map = None
self.alg_time = None
self.color_list = [QColor(255,0,0),
QColor(0,255,0),
QColor(0,0,255),
QColor(0,255,255),
QColor(255,0,255),
QColor(8,46,84),
QColor(199,97,20),
QColor(255,227,132),
QColor(255,255,0),
QColor(128,138,135)]
self.change_background('normal')
@pyqtSlot()
def on_btnPhoto_clicked(self):
print('on_btnPhoto_clicked')
img_path = QFileDialog.getOpenFileName(self, "选取图片", "./", "Images (*.jpg);;Images (*.png)")
img_path = img_path[0]
if img_path != '':
self.slot_photo_frame(img_path)
@pyqtSlot()
def on_btnVideo_clicked(self):
print('on_btnVideo_clicked')
video_path = QFileDialog.getOpenFileName(self, "选取视频", "./", "Videos (*.mp4);;Images (*.3gp)")
video_path = video_path[0]
if video_path != '':
self.video_cap = cv2.VideoCapture(video_path)
self.timer.start()
self.timer.setInterval(int(1000 / float(30.0)))
self.timer.timeout.connect(self.slot_video_frame)
@pyqtSlot()
def on_btnCamera_clicked(self):
print('on_btnCamera_clicked')
if self.camera_cap is None:
self.camera_cap = cv2.VideoCapture(int(0))
self.timer.start()
self.timer.setInterval(int(1000 / float(30.0)))
self.timer.timeout.connect(self.slot_camera_frame)
else:
self.camera_cap.release()
self.camera_cap = None
self.timer.stop()
@pyqtSlot()
def on_btnStop_clicked(self):
self.stop_all()
def slot_photo_frame(self, photo_path):
img = cv2.imread(photo_path)
self.cAlg.add_img(img)
def slot_camera_frame(self):
if self.camera_cap is not None:
# get a frame
ret, img = self.camera_cap.read()
if ret is False:
self.stop_all()
return
self.cAlg.add_img(img)
def slot_video_frame(self):
if self.video_cap is not None:
ret, img = self.video_cap.read()
if ret is False:
self.stop_all()
return
self.cAlg.add_img(img)
def slot_alg_result(self, img, result, time_spend):
if result['type'] == 'info':
print(result['result'])
return
elif result['type'] == 'img':
img = result['result']
self.infer = None
else:
self.infer = result
height, width, bytesPerComponent = img.shape
bytesPerLine = bytesPerComponent * width
cv2.cvtColor(img, cv2.COLOR_BGR2RGB, img)
image = QImage(img.data, width, height, bytesPerLine, QImage.Format_RGB888)
self.qpixmap = QPixmap.fromImage(image)
self.alg_time = time_spend
self.update()
def stop_all(self):
self.timer.stop()
self.qpixmap = None
if self.camera_cap is not None:
self.camera_cap.release()
self.camera_cap = None
if self.video_cap is not None:
self.video_cap.release()
self.video_cap = None
def set_alg_handle(self, handle):
self.cAlg = handle
def change_background(self, bg_name):
self.qpixmap_bg = None
bg_path = './icons/bg_' + bg_name + '.png'
self.qpixmap_bg = QPixmap(bg_path)
self.update()
def draw_image(self, painter):
pen = QPen()
font = QFont("Microsoft YaHei")
if self.qpixmap is not None:
painter.drawPixmap(QtCore.QRect(0, 0, self.width(), self.height()), self.qpixmap)
pen.setColor(self.getColor(0))
painter.setPen(pen)
pointsize = font.pointSize()
font.setPixelSize(pointsize*180/72)
painter.setFont(font)
painter.drawText(10, 30, 'time=%.4f seconds fps=%.4f' % (self.alg_time, 1 / self.alg_time))
else:
if self.qpixmap_bg is not None:
painter.drawPixmap(QtCore.QRect(0, 0, self.width(), self.height()), self.qpixmap_bg)
pen.setColor(QColor(0, 0, 0))
pen.setWidth(4)
painter.setPen(pen)
painter.drawRect(0, 0, self.width(), self.height())
def draw_infer(self, painter):
if self.infer is None:
return
# class
if self.infer['type'] == 'classify':
self.draw_infer_class(painter)
# det
elif self.infer['type'] == 'detection':
self.draw_infer_det(painter)
# kp
elif self.infer['type'] == 'keypoint':
self.draw_infer_kp(painter)
else:
print('unknown info type')
assert(False)
def draw_infer_class(self, painter):
font = QFont("宋体")
pointsize = font.pointSize()
font.setPixelSize(pointsize*90/72)
painter.setFont(font)
pen = QPen()
pen.setWidth(1)
pen.setColor(QColor(0, 255, 0))
painter.setPen(pen)
top1 = self.infer['result'][0]
name = self.infer['result'][1]
score = self.infer['result'][2]
painter.drawText(10, 50, 'top1=%s(%.4f)' % (name, score))
def draw_infer_det(self, painter):
pass
def draw_infer_kp(self, painter):
x_scale = self.width() / self.qpixmap.width()
y_scale = self.height() / self.qpixmap.height()
for kps in self.infer['result']:
kps[:,0] = kps[:,0] * y_scale
kps[:,1] = kps[:,1] * x_scale
nose = kps[0]
left_shoulder = kps[5]
right_shoulder = kps[6]
center_shoulder = (left_shoulder + right_shoulder) / 2
right_shoulder = kps[6]
left_elbow = kps[7]
right_elbow = kps[8]
left_wrist = kps[9]
right_wrist = kps[10]
left_hip = kps[11]
right_hip = kps[12]
center_hip = (left_hip + right_hip) / 2
left_knee = kps[13]
right_knee = kps[14]
left_ankle = kps[15]
right_ankle = kps[16]
pen = QPen()
pen.setColor(self.getColor(0))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(nose[1], nose[0], center_shoulder[1], center_shoulder[0])
pen.setColor(self.getColor(1))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(center_shoulder[1], center_shoulder[0], center_hip[1], center_hip[0])
pen.setColor(self.getColor(2))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(left_shoulder[1], left_shoulder[0], right_shoulder[1], right_shoulder[0])
pen.setColor(self.getColor(3))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(left_shoulder[1], left_shoulder[0], left_elbow[1], left_elbow[0])
pen.setColor(self.getColor(4))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(left_elbow[1], left_elbow[0], left_wrist[1], left_wrist[0])
pen.setColor(self.getColor(5))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(right_shoulder[1], right_shoulder[0], right_elbow[1], right_elbow[0])
pen.setColor(self.getColor(6))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(right_elbow[1], right_elbow[0], right_wrist[1], right_wrist[0])
pen.setColor(self.getColor(7))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(left_hip[1], left_hip[0], right_hip[1], right_hip[0])
pen.setColor(self.getColor(8))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(left_hip[1], left_hip[0], left_knee[1], left_knee[0])
pen.setColor(self.getColor(9))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(left_knee[1], left_knee[0], left_ankle[1], left_ankle[0])
pen.setColor(self.getColor(10))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(right_hip[1], right_hip[0], right_knee[1], right_knee[0])
pen.setColor(self.getColor(11))
pen.setWidth(3)
painter.setPen(pen)
painter.drawLine(right_knee[1], right_knee[0], right_ankle[1], right_ankle[0])
def paintEvent(self, event):
painter = QtGui.QPainter(self)
self.draw_image(painter)
self.draw_infer(painter)
def getColor(self, index):
return self.color_list[index % len(self.color_list)]
if __name__ == "__main__":
cApp = QApplication(sys.argv)
cImageWidget = ImageWidget()
cImageWidget.show()
sys.exit(cApp.exec_())
| 35.329897
| 104
| 0.548779
|
06ba49e42e54f7ee4c7e52a132b320670ecd9e9f
| 10,348
|
py
|
Python
|
rubikenv/rubikgym.py
|
Forbu/rubikenv
|
2dcf156b4dd03541c176c430675d8ddd5653825f
|
[
"Apache-2.0"
] | null | null | null |
rubikenv/rubikgym.py
|
Forbu/rubikenv
|
2dcf156b4dd03541c176c430675d8ddd5653825f
|
[
"Apache-2.0"
] | null | null | null |
rubikenv/rubikgym.py
|
Forbu/rubikenv
|
2dcf156b4dd03541c176c430675d8ddd5653825f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 21 15:51:06 2018
@author: adrien
"""
import numpy as np
import pandas as pd
import gym
from gym import spaces
class rubik_cube:
"""
This is a rubik's cube class simulator
Attributes :
- state : a 9x6 array of value between 1 and 6
"""
number_of_face = 6
number_of_element_in_sideface = 3
def __init__(self, init_state=None):
"""
Initialisation of the rubik
"""
# init state initialisation
if init_state is not None:
init_state = init_state.astype(int)
self.state = init_state
self.init_state = np.copy(init_state)
else:
# perfect cube
init_state = np.zeros((self.number_of_element_in_sideface,
self.number_of_element_in_sideface,self.number_of_face))
for i in range(self.number_of_face):
init_state[:,:,i] = i
init_state = init_state.astype(int)
self.state = init_state
self.init_state = np.copy(init_state)
# other ?
def setInit(self):
# perfect cube
init_state = np.zeros((self.number_of_element_in_sideface,
self.number_of_element_in_sideface,self.number_of_face))
for i in range(self.number_of_face):
init_state[:, :, i] = i
init_state = init_state.astype(int)
self.state = init_state
self.init_state = np.copy(init_state)
def move(self,index_move):
"""
For the convention there is exactly 12 possible moves
the move are indexed between 0 and 11
the index is in
[X Y Z] with
X : 0 1 2 3
Y : 4 5 6 7
Z : 8 9 10 11
The first two number here are the move corresponding the a certain
position on the face.
The two other number at the end are the inverse of those move (the two first)
X Y and Z corresponding to the rotation angle
"""
value_side = index_move % 2 # entre 0 et 1 the position of the rotation on the face
value_side_rotation = index_move // 4 # entre 0 et 2 the rotation index of the array
value_side_inverse = (index_move % 4)//2 # entre 0 et 1 if inverse or not
#print("value_side= ", str(value_side))
#print("value_side_rotation= ", str(value_side_rotation))
#print("value_side_inverse= ", str(value_side_inverse))
if value_side == 1:
value_side = 2 # correction to simplify the calculation
if value_side_rotation == 0:
# inversion value
if value_side_inverse == 0:
self.state[:,value_side,[5,1,4,3]] = self.state[:,value_side,[1,4,3,5]]
if value_side == 0:
self.state[:,:,0] = np.rot90(self.state[:,:,0],k=3)
else:
self.state[:,:,2] = np.rot90(self.state[:,:,2])
else:
self.state[:,value_side,[5,1,4,3]] = self.state[:,value_side,[3,5,1,4]]
if value_side == 0:
self.state[:,:,0] = np.rot90(self.state[:,:,0])
else:
self.state[:,:,2] = np.rot90(self.state[:,:,2], k=3)
elif value_side_rotation == 1:
# inversion value
if value_side_inverse == 0:
self.state[:,value_side,[5,0,4,2]] = self.state[:,value_side,[0,4,2,5]]
if value_side == 0:
self.state[:,:,1] = np.rot90(self.state[:,:,1],k=3)
else:
self.state[:,:,3] = np.rot90(self.state[:,:,3])
else:
self.state[:,value_side,[5,0,4,2]] = self.state[:,value_side,[2,5,0,4]]
if value_side == 0:
self.state[:,:,1] = np.rot90(self.state[:,:,1])
else:
self.state[:,:,3] = np.rot90(self.state[:,:,3], k=3)
# TODO again
elif value_side_rotation == 2:
tmp_state = np.copy(self.state)
# inversion value
if value_side_inverse == 0:
# TODO more complex
self.state[:,value_side,0] = tmp_state[value_side,:,1][::-1]
self.state[2-value_side,:,3] = tmp_state[:,value_side,0]
self.state[:,2-value_side,2] = tmp_state[2-value_side,:,3][::-1]
self.state[value_side,:,1] = tmp_state[:,2-value_side,2]
if value_side == 0:
self.state[:,:,4] = np.rot90(self.state[:,:,4],k=3)
else:
self.state[:,:,5] = np.rot90(self.state[:,:,5])
else:
self.state[value_side,:,1] = tmp_state[:,value_side,0][::-1]
self.state[:,value_side,0] = tmp_state[2-value_side,:,3]
self.state[2-value_side,:,3] = tmp_state[:,2-value_side,2][::-1]
self.state[:,2-value_side,2] = tmp_state[value_side,:,1]
if value_side == 0:
self.state[:,:,4] = np.rot90(self.state[:,:,4])
else:
self.state[:,:,5] = np.rot90(self.state[:,:,5], k=3)
def move_cube(self, index_move,state):
"""
For the convention there is exactly 12 possible moves
the move are indexed between 0 and 11
the index is in
[X Y Z] with
X : 0 1 2 3
Y : 4 5 6 7
Z : 8 9 10 11
The first two number here are the move corresponding the a certain
position on the face.
The two other number at the end are the inverse of those move (the two first)
X Y and Z corresponding to the rotation angle
"""
value_side = index_move % 2 # entre 0 et 1 the position of the rotation on the face
value_side_rotation = index_move // 4 # entre 0 et 2 the rotation index of the array
value_side_inverse = (index_move % 4)//2 # entre 0 et 1 if inverse or not
#print("value_side= ", str(value_side))
#print("value_side_rotation= ", str(value_side_rotation))
#print("value_side_inverse= ", str(value_side_inverse))
if value_side == 1:
value_side = 2 # correction to simplify the calculation
if value_side_rotation == 0:
# inversion value
if value_side_inverse == 0:
state[:,value_side,[5,1,4,3]] = state[:,value_side,[1,4,3,5]]
if value_side == 0:
state[:,:,0] = np.rot90(state[:,:,0],k=3)
else:
state[:,:,2] = np.rot90(state[:,:,2])
else:
state[:,value_side,[5,1,4,3]] = state[:,value_side,[3,5,1,4]]
if value_side == 0:
state[:,:,0] = np.rot90(state[:,:,0])
else:
state[:,:,2] = np.rot90(state[:,:,2], k=3)
elif value_side_rotation == 1:
# inversion value
if value_side_inverse == 0:
state[:,value_side,[5,0,4,2]] = state[:,value_side,[0,4,2,5]]
if value_side == 0:
state[:,:,1] = np.rot90(state[:,:,1],k=3)
else:
state[:,:,3] = np.rot90(state[:,:,3])
else:
state[:,value_side,[5,0,4,2]] = state[:,value_side,[2,5,0,4]]
if value_side == 0:
state[:,:,1] = np.rot90(state[:,:,1])
else:
state[:,:,3] = np.rot90(state[:,:,3], k=3)
# TODO again
elif value_side_rotation == 2:
tmp_state = np.copy(state)
# inversion value
if value_side_inverse == 0:
# TODO more complex
state[:,value_side,0] = tmp_state[value_side,:,1][::-1]
state[2-value_side,:,3] = tmp_state[:,value_side,0]
state[:,2-value_side,2] = tmp_state[2-value_side,:,3][::-1]
state[value_side,:,1] = tmp_state[:,2-value_side,2]
if value_side == 0:
state[:,:,4] = np.rot90(state[:,:,4],k=3)
else:
state[:,:,5] = np.rot90(state[:,:,5])
else:
state[value_side,:,1] = tmp_state[:,value_side,0][::-1]
state[:,value_side,0] = tmp_state[2-value_side,:,3]
state[2-value_side,:,3] = tmp_state[:,2-value_side,2][::-1]
state[:,2-value_side,2] = tmp_state[value_side,:,1]
if value_side == 0:
state[:,:,4] = np.rot90(state[:,:,4])
else:
state[:,:,5] = np.rot90(state[:,:,5], k=3)
return state
class rubikgym(gym.Env, rubik_cube):
reward_range = (-1, 1)
spec = None
# Set these in ALL subclasses
action_space = spaces.Discrete(12)
# flatten discret space
observation_space = spaces.MultiDiscrete([6 for _ in range(3*3*6)])
def __init__(self):
gym.Env.__init__(self)
rubik_cube.__init__(self)
def step(self, action):
self.move(action)
return self.state, 0, 0, 0
def reset(self):
self.setInit(), 0
def render(self, mode='human'):
print(self.state)
def set_init(self, state):
self.init_state = state
self.state = state
| 35.560137
| 92
| 0.469463
|
71ccaeec04285c9d8689dbfa9fe0eb1fe7d9922c
| 4,762
|
py
|
Python
|
once_upon_a_repository/utilities.py
|
allegroai/trains-blogs
|
19a1bc971f9eb5f31a0d08dd553ea0d1f5437e9d
|
[
"Apache-2.0"
] | 12
|
2020-02-06T10:02:36.000Z
|
2022-01-15T19:38:16.000Z
|
once_upon_a_repository/utilities.py
|
allegroai/trains-blogs
|
19a1bc971f9eb5f31a0d08dd553ea0d1f5437e9d
|
[
"Apache-2.0"
] | 1
|
2021-09-08T01:54:46.000Z
|
2021-09-08T01:54:46.000Z
|
once_upon_a_repository/utilities.py
|
allegroai/trains-blogs
|
19a1bc971f9eb5f31a0d08dd553ea0d1f5437e9d
|
[
"Apache-2.0"
] | 3
|
2020-02-10T00:19:44.000Z
|
2020-05-31T13:51:06.000Z
|
import json
import attr
import cv2
import numpy as np
import torch
import torchvision
from torchvision.models.detection.faster_rcnn import FastRCNNPredictor
from torchvision.models.detection.mask_rcnn import MaskRCNNPredictor
from torchvision.transforms import functional as F
from torchvision_references import utils
def safe_collate(batch):
batch = list(filter(lambda x: x is not None, batch))
return utils.collate_fn(batch)
def draw_boxes(im, boxes, labels, color=(150, 0, 0)):
for box, draw_label in zip(boxes, labels):
draw_box = box.astype('int')
im = cv2.rectangle(im, tuple(draw_box[:2]), tuple(draw_box[2:]), color, 2)
im = cv2.putText(im, str(draw_label), (draw_box[0], max(0, draw_box[1]-5)),
cv2.FONT_HERSHEY_COMPLEX, 0.8, color, 2)
return im
def draw_debug_images(images, targets, predictions=None, score_thr=0.3):
debug_images = []
for image, target in zip(images, targets):
img = draw_boxes(np.array(F.to_pil_image(image.cpu())),
[box.cpu().numpy() for box in target['boxes']],
[label.item() for label in target['labels']])
if predictions:
img = draw_boxes(img,
[box.cpu().numpy() for box, score in
zip(predictions[target['image_id'].item()]['boxes'],
predictions[target['image_id'].item()]['scores']) if score >= score_thr],
[label.item() for label, score in
zip(predictions[target['image_id'].item()]['labels'],
predictions[target['image_id'].item()]['scores']) if score >= score_thr],
color=(0, 150, 0))
debug_images.append(img)
return debug_images
def draw_mask(target):
masks = [channel*label for channel, label in zip(target['masks'].cpu().numpy(), target['labels'].cpu().numpy())]
masks_sum = sum(masks)
masks_out = masks_sum + 25*(masks_sum > 0)
return (masks_out*int(255/masks_out.max())).astype('uint8')
def get_model_instance_segmentation(num_classes, hidden_layer):
# load an instance segmentation model pre-trained on COCO
model = torchvision.models.detection.maskrcnn_resnet50_fpn(pretrained=True)
# get number of input features for the classifier
in_features = model.roi_heads.box_predictor.cls_score.in_features
# replace the pre-trained head with a new one
model.roi_heads.box_predictor = FastRCNNPredictor(in_features, num_classes)
# now get the number of input features for the mask classifier
in_features_mask = model.roi_heads.mask_predictor.conv5_mask.in_channels
# and replace the mask predictor with a new one
model.roi_heads.mask_predictor = MaskRCNNPredictor(in_features_mask, hidden_layer, num_classes)
return model
def get_iou_types(model):
model_without_ddp = model
if isinstance(model, torch.nn.parallel.DistributedDataParallel):
model_without_ddp = model.module
iou_types = ["bbox"]
if isinstance(model_without_ddp, torchvision.models.detection.MaskRCNN):
iou_types.append("segm")
if isinstance(model_without_ddp, torchvision.models.detection.KeypointRCNN):
iou_types.append("keypoints")
return iou_types
@attr.s(auto_attribs=True)
class CocoLikeAnnotations():
def __attrs_post_init__(self):
self.coco_like_json: dict = {'images': [], 'annotations': []}
self._ann_id: int = 0
def update_images(self, file_name, height, width, id):
self.coco_like_json['images'].append({'file_name': file_name,
'height': height, 'width': width,
'id': id})
def update_annotations(self, box, label_id, image_id, is_crowd=0):
segmentation, bbox, area = self.extract_coco_info(box)
self.coco_like_json['annotations'].append({'segmentation': segmentation, 'bbox': bbox, 'area': area,
'category_id': int(label_id), 'id': self._ann_id, 'iscrowd': is_crowd,
'image_id': image_id})
self._ann_id += 1
@staticmethod
def extract_coco_info(box):
segmentation = list(map(int, [box[0], box[1], box[0], box[3], box[2], box[3], box[2], box[1]]))
bbox = list(map(int, np.append(box[:2], (box[2:] - box[:2]))))
area = int(bbox[2] * bbox[3])
return segmentation, bbox, area
def dump_to_json(self, path_to_json='/tmp/inference_results/inference_results.json'):
with open(path_to_json, "w") as write_file:
json.dump(self.coco_like_json, write_file)
| 42.517857
| 116
| 0.634817
|
dfd7c5f562d84ddb6236ae21284c21d516b30548
| 7,751
|
py
|
Python
|
configs/trainval/daotad_eccv2022/7.b.ii.py
|
klauscc/vedatad
|
c59f5ddc8fb227ef08baccbb513948bb1bb23857
|
[
"Apache-2.0"
] | null | null | null |
configs/trainval/daotad_eccv2022/7.b.ii.py
|
klauscc/vedatad
|
c59f5ddc8fb227ef08baccbb513948bb1bb23857
|
[
"Apache-2.0"
] | null | null | null |
configs/trainval/daotad_eccv2022/7.b.ii.py
|
klauscc/vedatad
|
c59f5ddc8fb227ef08baccbb513948bb1bb23857
|
[
"Apache-2.0"
] | null | null | null |
# 1. data
dataset_type = "Thumos14Dataset"
data_root = "data/thumos14/"
img_norm_cfg = dict(
mean=[122.7709, 116.7460, 104.0937], std=[68.5005, 66.6322, 70.3232], to_rgb=True
)
num_frames = 480
chunk_size = 1
img_shape = (224, 224)
overlap_ratio = 0.25
keep_ratio = 0.2
feat_downsample = 1
expid = "7.b.ii"
data = dict(
samples_per_gpu=2,
workers_per_gpu=6,
train=dict(
typename=dataset_type,
ann_file=data_root + "annotations/val.json",
video_prefix=data_root + "frames_15fps_256x256/val",
pipeline=[
dict(typename="LoadMetaInfo"),
dict(typename="LoadAnnotations"),
dict(typename="Time2Frame"),
dict(typename="TemporalRandomCrop", num_frames=num_frames, iof_th=0.75),
dict(typename="LoadFrames", to_float32=True),
dict(typename="SpatialRandomCrop", crop_size=img_shape),
dict(
typename="PhotoMetricDistortion",
brightness_delta=32,
contrast_range=(0.5, 1.5),
saturation_range=(0.5, 1.5),
hue_delta=18,
p=0.5,
),
dict(typename="Rotate", limit=(-45, 45), border_mode="reflect101", p=0.5),
dict(typename="SpatialRandomFlip", flip_ratio=0.5),
dict(typename="Normalize", **img_norm_cfg),
dict(typename="Pad", size=(num_frames, *img_shape)),
dict(typename="DefaultFormatBundle"),
dict(
typename="Collect",
keys=["imgs", "gt_segments", "gt_labels", "gt_segments_ignore"],
),
],
),
val=dict(
typename=dataset_type,
ann_file=data_root + "annotations/test.json",
video_prefix=data_root + "frames_15fps_256x256/test",
pipeline=[
dict(typename="LoadMetaInfo"),
dict(typename="Time2Frame"),
dict(
typename="OverlapCropAug",
num_frames=num_frames,
overlap_ratio=overlap_ratio,
transforms=[
dict(typename="TemporalCrop"),
dict(typename="LoadFrames", to_float32=True),
dict(typename="SpatialCenterCrop", crop_size=img_shape),
dict(typename="Normalize", **img_norm_cfg),
dict(typename="Pad", size=(num_frames, *img_shape)),
dict(typename="DefaultFormatBundle"),
dict(typename="Collect", keys=["imgs"]),
],
),
],
),
)
# 2. model
num_classes = 20
strides = [8, 16, 32, 64, 128]
use_sigmoid = True
scales_per_octave = 5
octave_base_scale = 2
num_anchors = scales_per_octave
model = dict(
typename="MemSingleStageDetector",
chunk_size=chunk_size,
backbone=dict(
typename="ChunkActionClip",
chunk_size=chunk_size,
forward_mode="batch",
pretrained_model="data/pretrained_models/action-clip/vit-b-16-32f.pt",
),
neck=[
dict(
typename="SRMResizeFeature",
srm_cfg=dict(
kernel_size=2,
),
),
dict(
typename="SRMSwin",
srm_cfg=dict(
in_channels=512,
out_channels=512,
with_transformer=False,
),
),
dict(
typename="Transformer1DRelPos",
encoder_layer_cfg=dict(
dim=512,
num_heads=16,
max_seq_len=num_frames // strides[0],
drop_path=0.1,
),
num_layers=3,
),
dict(
typename="SelfAttnTDM",
in_channels=512,
out_channels=512,
strides=2,
num_heads=8,
kernel_sizes=(7, 7, 5, 5),
stage_layers=(1, 1, 1, 1),
out_indices=(0, 1, 2, 3, 4),
out_order="bct",
),
dict(
typename="FPN",
in_channels=[512, 512, 512, 512, 512],
out_channels=256,
num_outs=5,
start_level=0,
conv_cfg=dict(typename="Conv1d"),
norm_cfg=dict(typename="SyncBN"),
),
],
head=dict(
typename="RetinaHead",
num_classes=num_classes,
num_anchors=num_anchors,
in_channels=256,
stacked_convs=4,
feat_channels=256,
use_sigmoid=use_sigmoid,
conv_cfg=dict(typename="Conv1d"),
norm_cfg=dict(typename="SyncBN"),
),
)
# 3. engines
meshgrid = dict(
typename="SegmentAnchorMeshGrid",
strides=strides,
base_anchor=dict(
typename="SegmentBaseAnchor",
base_sizes=strides,
octave_base_scale=octave_base_scale,
scales_per_octave=scales_per_octave,
),
)
segment_coder = dict(
typename="DeltaSegmentCoder", target_means=[0.0, 0.0], target_stds=[1.0, 1.0]
)
train_engine = dict(
typename="MemBankTrainEngine",
membank=dict(
chunk_size=chunk_size,
keep_ratio=keep_ratio,
feat_downsample=feat_downsample,
mode="random",
mem_bank_meta_file=f"data/tmp/eccv2022/thumos14/memory_mechanism/{expid}/actionclip_vitb16_15fps_256x256_crop224x224/meta_val.json",
mem_bank_dir=f"data/tmp/eccv2022/thumos14/memory_mechanism/{expid}/actionclip_vitb16_15fps_256x256_crop224x224/val",
),
model=model,
criterion=dict(
typename="SegmentAnchorCriterion",
num_classes=num_classes,
meshgrid=meshgrid,
segment_coder=segment_coder,
reg_decoded_segment=True,
loss_cls=dict(
typename="FocalLoss",
use_sigmoid=use_sigmoid,
gamma=2.0,
alpha=0.25,
loss_weight=1.0,
),
loss_segment=dict(typename="DIoULoss", loss_weight=1.0),
train_cfg=dict(
assigner=dict(
typename="MaxIoUAssigner",
pos_iou_thr=0.6,
neg_iou_thr=0.4,
min_pos_iou=0,
ignore_iof_thr=-1,
ignore_wrt_candidates=True,
iou_calculator=dict(typename="SegmentOverlaps"),
),
allowed_border=-1,
pos_weight=-1,
debug=False,
),
),
optimizer=dict(
typename="SGD",
lr=0.01,
momentum=0.9,
weight_decay=0.0001,
paramwise_cfg=dict(custom_keys=dict(backbone={"lr_mult": 0.4})),
),
)
# 3.2 val engine
val_engine = dict(
typename="ValEngine",
model=model,
meshgrid=meshgrid,
converter=dict(
typename="SegmentAnchorConverter",
num_classes=num_classes,
segment_coder=segment_coder,
nms_pre=1000,
use_sigmoid=use_sigmoid,
),
num_classes=num_classes,
test_cfg=dict(
score_thr=0.005, nms=dict(typename="nmw", iou_thr=0.5), max_per_video=1200
),
use_sigmoid=use_sigmoid,
)
# 4. hooks
hooks = [
dict(typename="OptimizerHook"),
dict(
typename="CosineRestartLrSchedulerHook",
periods=[100] * 12,
restart_weights=[1] * 12,
warmup="linear",
warmup_iters=500,
warmup_ratio=1e-1,
min_lr_ratio=1e-2,
),
dict(typename="EvalHook", eval_cfg=dict(mode="anet")),
dict(typename="SnapshotHook", interval=100),
dict(typename="LoggerHook", interval=10),
]
# 5. work modes
modes = ["train"]
max_epochs = 1000
# 6. checkpoint
# optimizer = dict(filepath='epoch_900_optim.pth')
# meta = dict(filepath='epoch_900_meta.pth')
# 7. misc
seed = 10
dist_params = dict(backend="nccl")
log_level = "INFO"
find_unused_parameters = False
# gpu_mem_fraction = 0.2
| 29.249057
| 140
| 0.56883
|
fdd52d1a1220d7207f5871e3fcdca365b1fa0e6c
| 7,964
|
py
|
Python
|
livvkit/components/numerics_tests/ismip.py
|
jhkennedy/LIVVkit
|
680120cd437e408673e62e535fc0a246c7fc17db
|
[
"BSD-3-Clause"
] | null | null | null |
livvkit/components/numerics_tests/ismip.py
|
jhkennedy/LIVVkit
|
680120cd437e408673e62e535fc0a246c7fc17db
|
[
"BSD-3-Clause"
] | null | null | null |
livvkit/components/numerics_tests/ismip.py
|
jhkennedy/LIVVkit
|
680120cd437e408673e62e535fc0a246c7fc17db
|
[
"BSD-3-Clause"
] | null | null | null |
# coding=utf-8
# Copyright (c) 2015-2017, UT-BATTELLE, LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Utilities to provide numerical verification for the ISMIP test cases
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import six
import os
import numpy as np
import matplotlib.pyplot as plt
import livvkit
from livvkit.util.LIVVDict import LIVVDict
from livvkit.util import elements
from livvkit.util import functions
case_color = {'bench': '#d7191c',
'test': '#fc8d59'}
line_style = {'bench': 'o-',
'test': '-'}
setup = None
def set_up():
global setup
setup = functions.read_json(os.path.join(os.path.dirname(__file__), 'ismip.json'))
for exp, size in [('ismip-hom-a', '005'), ('ismip-hom-c', '005'), ('ismip-hom-f', '000')]:
recreate_file = os.path.join(livvkit.__path__[0], setup[exp]["data_dir"],
setup[exp]['pattern'][0].replace('???', size))
setup[exp]['interp_points'] = \
np.genfromtxt(recreate_file, delimiter=',', missing_values='nan',
usecols=(0,), unpack=True)
if exp == 'ismip-hom-f':
setup[exp]['interp_points'] = setup[exp]['interp_points']*100 - 50
def get_case_length(case):
return str(int(case.split('-')[-1][1:])).zfill(3)
def run(config, analysis_data):
case = config['name']
if case in ['ismip-hom-a', 'ismip-hom-c', 'ismip-hom-f']:
coord = 'x_hat'
else:
coord = 'y_hat'
lengths = list(set(
[get_case_length(d) for d in six.iterkeys(analysis_data)]
))
plot_list = []
for p, pattern in enumerate(sorted(setup[case]['pattern'])):
fig_label = pattern.split('_')[1]
description = ''
for l in sorted(lengths):
plt.figure(figsize=(10, 8), dpi=150)
plt.xlabel(setup[case]['xlabel'][p])
plt.ylabel(setup[case]['ylabel'][p])
if case in ['ismip-hom-a', 'ismip-hom-c']:
plt.title(str(int(l))+' km')
title = fig_label[0:-1]+'. '+fig_label[-1]+': '+str(int(l))+' km'
else:
plt.title('No-Slip Bed')
title = fig_label[0:-2]+'. '+fig_label[-2:]+': No-Slip Bed'
plot_file = os.path.join(config["plot_dir"], config['name']+'_'+fig_label+'_'+l+'.png')
recreate_file = os.path.join(
livvkit.__path__[0], setup[case]["data_dir"], pattern
).replace('???', l)
axis, fs_amin, fs_amax, fs_mean, fs_std, ho_amin, ho_amax, ho_mean, ho_std = \
np.genfromtxt(recreate_file, delimiter=',', missing_values='nan', unpack=True)
if case in ['ismip-hom-f']:
axis = axis*100.0 - 50.0
plt.fill_between(axis, ho_amin, ho_amax, facecolor='green', alpha=0.5)
plt.fill_between(axis, fs_amin, fs_amax, facecolor='blue', alpha=0.5)
plt.plot(axis, fs_mean, 'b-', linewidth=2, label='Full stokes')
plt.plot(axis, ho_mean, 'g-', linewidth=2, label='Higher order')
analysis = {}
for a in six.iterkeys(analysis_data):
if int(l) == int(a.split('-')[-1][1:]):
analysis[a] = analysis_data[a]
for a in six.iterkeys(analysis):
for model in sorted(six.iterkeys(analysis[a])):
plt.plot(analysis[a][model][coord],
analysis[a][model][config['plot_vars'][p]],
line_style[model],
color=case_color[model],
linewidth=2,
label=a+'-'+model)
plt.legend(loc='best')
if livvkit.publish:
plt.savefig(os.path.splitext(plot_file)[0]+'.eps', dpi=600)
plt.savefig(plot_file)
plt.close()
plot_list.append(elements.image(title, description, os.path.basename(plot_file)))
return elements.gallery("Numerics Plots", plot_list)
def summarize_result(data, config):
case = config['name']
summary = LIVVDict()
lengths = list(set([get_case_length(d) for d in six.iterkeys(data)]))
for p, pattern in enumerate(sorted(setup[case]['pattern'])):
for l in sorted(lengths):
recreate_file = os.path.join(
livvkit.__path__[0], setup[case]["data_dir"], pattern
).replace('???', l)
axis, fs_amin, fs_amax, fs_mean, fs_std, ho_amin, ho_amax, ho_mean, ho_std = \
np.genfromtxt(recreate_file, delimiter=',', missing_values='nan', unpack=True)
analysis = {}
for a in six.iterkeys(data):
if int(l) == int(a.split('-')[-1][1:]):
analysis[a] = data[a]
for a in six.iterkeys(analysis):
for model in sorted(six.iterkeys(analysis[a])):
if setup[case]['ylabel'][p].split(" ")[0].lower() == 'surface':
percent_errors = np.divide(analysis[a][model][config['plot_vars'][p]]
- ho_mean, ho_mean+1000)
coefficient = np.divide(ho_std, ho_mean+1000)
else:
percent_errors = np.divide(analysis[a][model][config['plot_vars'][p]]
- ho_mean, ho_mean)
coefficient = np.divide(ho_std, ho_mean)
label = a+' '+setup[case]['ylabel'][p].split(" ")[0]
if model.lower() == 'bench':
summary[label]['Bench mean % error'] = \
'{:3.2%}'.format(np.nanmean(percent_errors))
else:
summary[label]['Test mean % error'] = \
'{:3.2%}'.format(np.nanmean(percent_errors))
summary[label]['Coefficient of variation'] = \
'{:3.2%}'.format(np.nanmean(coefficient))
return summary
def print_summary(case, summary):
""" Show some statistics from the run """
for subcase in six.iterkeys(summary):
message = case + " " + subcase
print(" " + message)
print(" " + "-"*len(message))
for key, val in summary[subcase].items():
print(" "*4 + key.ljust(25) + ":" + val.rjust(7))
print("")
| 40.632653
| 99
| 0.573707
|
ff82373210dc1842acf9ffc54515bb4033c6bbaf
| 14,270
|
py
|
Python
|
ml_service/pipelines/img_class_build_parallel_batchscore_pipeline.py
|
MFG-Azure-MLOps-Hub/MLOpsImgClass
|
4a1bbeb292590d12d2f46da4f0b993b86730e4eb
|
[
"MIT"
] | 1
|
2020-12-08T03:10:35.000Z
|
2020-12-08T03:10:35.000Z
|
ml_service/pipelines/img_class_build_parallel_batchscore_pipeline.py
|
MFG-Azure-MLOps-Hub/MLOpsImgClass
|
4a1bbeb292590d12d2f46da4f0b993b86730e4eb
|
[
"MIT"
] | null | null | null |
ml_service/pipelines/img_class_build_parallel_batchscore_pipeline.py
|
MFG-Azure-MLOps-Hub/MLOpsImgClass
|
4a1bbeb292590d12d2f46da4f0b993b86730e4eb
|
[
"MIT"
] | 1
|
2020-12-08T03:10:37.000Z
|
2020-12-08T03:10:37.000Z
|
"""
Copyright (C) Microsoft Corporation. All rights reserved.
Microsoft Corporation (“Microsoft”) grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. This license does not purport to express any claim of ownership over
data you may have shared with Microsoft in the creation of the Software Code.
Unless applicable law gives you more rights, Microsoft reserves all other
rights not expressly granted herein, whether by implication, estoppel or
otherwise.
THE SOFTWARE CODE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
MICROSOFT OR ITS LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE CODE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os
from azureml.pipeline.steps import ParallelRunConfig, ParallelRunStep
from ml_service.util.manage_environment import get_environment
from ml_service.pipelines.load_sample_data import create_sample_data_csv
from ml_service.util.env_variables import Env
from ml_service.util.attach_compute import get_compute
from azureml.core import (
Workspace,
Dataset,
Datastore,
RunConfiguration,
)
from azureml.pipeline.core import Pipeline, PipelineData, PipelineParameter
from azureml.core.compute import ComputeTarget
from azureml.data.datapath import DataPath
from azureml.pipeline.steps import PythonScriptStep
from typing import Tuple
def get_or_create_datastore(
datastorename: str, ws: Workspace, env: Env, input: bool = True
) -> Datastore:
"""
Obtains a datastore with matching name. Creates it if none exists.
:param datastorename: Name of the datastore
:param ws: Current AML Workspace
:param env: Environment variables
:param input: Datastore points to the input container if
this is True(default) or the output storage container otherwise
:returns: Datastore
:raises: ValueError
"""
if datastorename is None:
raise ValueError("Datastore name is required.")
containername = (
env.scoring_datastore_input_container
if input
else env.scoring_datastore_output_container
)
if datastorename in ws.datastores:
datastore = ws.datastores[datastorename]
# the datastore is not registered but we have all details to register it
elif (
env.scoring_datastore_access_key is not None
and containername is not None # NOQA: E501
): # NOQA:E501
datastore = Datastore.register_azure_blob_container(
workspace=ws,
datastore_name=datastorename,
account_name=env.scoring_datastore_storage_name,
account_key=env.scoring_datastore_access_key,
container_name=containername,
)
else:
raise ValueError(
"No existing datastore named {} nor was enough information supplied to create one.".format( # NOQA: E501
datastorename
)
)
return datastore
def get_input_dataset(ws: Workspace, ds: Datastore, env: Env) -> Dataset:
"""
Gets an input dataset wrapped around an input data file. The input
data file is assumed to exist in the supplied datastore.
:param ws: AML Workspace
:param ds: Datastore containing the data file
:param env: Environment variables
:returns: Input Dataset
"""
scoringinputds = Dataset.Tabular.from_delimited_files(
path=DataPath(ds, env.scoring_datastore_input_filename)
)
scoringinputds = scoringinputds.register(
ws,
name=env.scoring_dataset_name,
tags={"purpose": "scoring input", "format": "csv"},
create_new_version=True,
).as_named_input(env.scoring_dataset_name)
return scoringinputds
def get_fallback_input_dataset(ws: Workspace, env: Env) -> Dataset:
"""
Called when an input datastore does not exist or no input data file exists
at that location. Create a sample dataset using the img_class dataset from
scikit-learn. Useful when debugging this code in the absence of the input
data location Azure blob.
:param ws: AML Workspace
:param env: Environment Variables
:returns: Fallback input dataset
:raises: FileNotFoundError
"""
# This call creates an example CSV from sklearn sample data. If you
# have already bootstrapped your project, you can comment this line
# out and use your own CSV.
create_sample_data_csv(
file_name=env.scoring_datastore_input_filename, for_scoring=True
)
if not os.path.exists(env.scoring_datastore_input_filename):
error_message = (
"Could not find CSV dataset for scoring at {}. "
+ "No alternate data store location was provided either.".format(
env.scoring_datastore_input_filename
) # NOQA: E501
)
raise FileNotFoundError(error_message)
# upload the input data to the workspace default datastore
default_datastore = ws.get_default_datastore()
scoreinputdataref = default_datastore.upload_files(
[env.scoring_datastore_input_filename],
target_path="scoringinput",
overwrite=False,
)
scoringinputds = (
Dataset.Tabular.from_delimited_files(scoreinputdataref)
.register(ws, env.scoring_dataset_name, create_new_version=True)
.as_named_input(env.scoring_dataset_name)
)
return scoringinputds
def get_output_location(
ws: Workspace, env: Env, outputdatastore: Datastore = None
) -> PipelineData:
"""
Returns a Datastore wrapped as a PipelineData instance suitable
for passing into a pipeline step. Represents the location where
the scoring output should be written. Uses the default workspace
blob store if no output datastore is supplied.
:param ws: AML Workspace
:param env: Environment Variables
:param outputdatastore: AML Datastore, optional, default is None
:returns: PipelineData wrapping the output datastore
"""
if outputdatastore is None:
output_loc = PipelineData(
name="defaultoutput", datastore=ws.get_default_datastore()
)
else:
output_loc = PipelineData(
name=outputdatastore.name, datastore=outputdatastore
) # NOQA: E501
return output_loc
def get_inputds_outputloc(
ws: Workspace, env: Env
) -> Tuple[Dataset, PipelineData]: # NOQA: E501
"""
Prepare the input and output for the scoring step. Input is a tabular
dataset wrapped around the scoring data. Output is PipelineData
representing a location to write the scores down.
:param ws: AML Workspace
:param env: Environment Variables
:returns: Input dataset and output location
"""
if env.scoring_datastore_storage_name is None:
# fall back to default
scoringinputds = get_fallback_input_dataset(ws, env)
output_loc = get_output_location(ws, env)
else:
inputdatastore = get_or_create_datastore(
"{}_in".format(env.scoring_datastore_storage_name), ws, env
)
outputdatastore = get_or_create_datastore(
"{}_out".format(env.scoring_datastore_storage_name),
ws,
env,
input=False, # NOQA: E501
)
scoringinputds = get_input_dataset(ws, inputdatastore, env)
output_loc = get_output_location(ws, env, outputdatastore)
return (scoringinputds, output_loc)
def get_run_configs(
ws: Workspace, computetarget: ComputeTarget, env: Env
) -> Tuple[ParallelRunConfig, RunConfiguration]:
"""
Creates the necessary run configurations required by the
pipeline to enable parallelized scoring.
:param ws: AML Workspace
:param computetarget: AML Compute target
:param env: Environment Variables
:returns: Tuple[Scoring Run configuration, Score copy run configuration]
"""
# get a conda environment for scoring
environment = get_environment(
ws,
env.aml_env_name_scoring,
conda_dependencies_file=env.aml_env_score_conda_dep_file,
enable_docker=True,
use_gpu=env.use_gpu_for_scoring,
create_new=env.rebuild_env_scoring,
)
score_run_config = ParallelRunConfig(
entry_script=env.batchscore_script_path,
source_directory=env.sources_directory_train,
error_threshold=10,
output_action="append_row",
compute_target=computetarget,
node_count=env.max_nodes_scoring,
environment=environment,
run_invocation_timeout=300,
)
copy_run_config = RunConfiguration()
copy_run_config.environment = get_environment(
ws,
env.aml_env_name_score_copy,
conda_dependencies_file=env.aml_env_scorecopy_conda_dep_file,
enable_docker=True,
use_gpu=env.use_gpu_for_scoring,
create_new=env.rebuild_env_scoring,
)
return (score_run_config, copy_run_config)
def get_scoring_pipeline(
scoring_dataset: Dataset,
output_loc: PipelineData,
score_run_config: ParallelRunConfig,
copy_run_config: RunConfiguration,
computetarget: ComputeTarget,
ws: Workspace,
env: Env,
) -> Pipeline:
"""
Creates the scoring pipeline.
:param scoring_dataset: Data to score
:param output_loc: Location to save the scoring results
:param score_run_config: Parallel Run configuration to support
parallelized scoring
:param copy_run_config: Script Run configuration to support
score copying
:param computetarget: AML Compute target
:param ws: AML Workspace
:param env: Environment Variables
:returns: Scoring pipeline instance
"""
# To help filter the model make the model name, model version and a
# tag/value pair bindable parameters so that they can be passed to
# the pipeline when invoked either over REST or via the AML SDK.
model_name_param = PipelineParameter(
"model_name", default_value=" "
) # NOQA: E501
model_version_param = PipelineParameter(
"model_version", default_value=" "
) # NOQA: E501
model_tag_name_param = PipelineParameter(
"model_tag_name", default_value=" "
) # NOQA: E501
model_tag_value_param = PipelineParameter(
"model_tag_value", default_value=" "
) # NOQA: E501
scoring_step = ParallelRunStep(
name="scoringstep",
inputs=[scoring_dataset],
output=output_loc,
arguments=[
"--model_name",
model_name_param,
"--model_version",
model_version_param,
"--model_tag_name",
model_tag_name_param,
"--model_tag_value",
model_tag_value_param,
],
parallel_run_config=score_run_config,
allow_reuse=False,
)
copying_step = PythonScriptStep(
name="scorecopystep",
script_name=env.batchscore_copy_script_path,
source_directory=env.sources_directory_train,
arguments=[
"--output_path",
output_loc,
"--scoring_output_filename",
env.scoring_datastore_output_filename
if env.scoring_datastore_output_filename is not None
else "",
"--scoring_datastore",
env.scoring_datastore_storage_name
if env.scoring_datastore_storage_name is not None
else "",
"--score_container",
env.scoring_datastore_output_container
if env.scoring_datastore_output_container is not None
else "",
"--scoring_datastore_key",
env.scoring_datastore_access_key
if env.scoring_datastore_access_key is not None
else "",
],
inputs=[output_loc],
allow_reuse=False,
compute_target=computetarget,
runconfig=copy_run_config,
)
return Pipeline(workspace=ws, steps=[scoring_step, copying_step])
def build_batchscore_pipeline():
"""
Main method that builds and publishes a scoring pipeline.
"""
try:
env = Env()
# Get Azure machine learning workspace
aml_workspace = Workspace.get(
name=env.workspace_name,
subscription_id=env.subscription_id,
resource_group=env.resource_group,
)
# Get Azure machine learning cluster
aml_compute_score = get_compute(
aml_workspace,
env.compute_name_scoring,
env.vm_size_scoring,
for_batch_scoring=True,
)
input_dataset, output_location = get_inputds_outputloc(
aml_workspace, env
) # NOQA: E501
scoring_runconfig, score_copy_runconfig = get_run_configs(
aml_workspace, aml_compute_score, env
)
scoring_pipeline = get_scoring_pipeline(
input_dataset,
output_location,
scoring_runconfig,
score_copy_runconfig,
aml_compute_score,
aml_workspace,
env,
)
published_pipeline = scoring_pipeline.publish(
name=env.scoring_pipeline_name,
description="Diabetes Batch Scoring Pipeline",
)
pipeline_id_string = "##vso[task.setvariable variable=pipeline_id;isOutput=true]{}".format( # NOQA: E501
published_pipeline.id
)
print(pipeline_id_string)
except Exception as e:
print(e)
exit(1)
if __name__ == "__main__":
build_batchscore_pipeline()
| 33.263403
| 117
| 0.688788
|
1a2af3e295ac20af3f8bd5d081d8eed841351846
| 7,520
|
py
|
Python
|
src/.history/HiwinRA605_socket_ros_20190604110804.py
|
SamKaiYang/2019_Hiwin_Shaking
|
d599f8c87dc4da89eae266990d12eb3a8b0f3e16
|
[
"MIT"
] | null | null | null |
src/.history/HiwinRA605_socket_ros_20190604110804.py
|
SamKaiYang/2019_Hiwin_Shaking
|
d599f8c87dc4da89eae266990d12eb3a8b0f3e16
|
[
"MIT"
] | null | null | null |
src/.history/HiwinRA605_socket_ros_20190604110804.py
|
SamKaiYang/2019_Hiwin_Shaking
|
d599f8c87dc4da89eae266990d12eb3a8b0f3e16
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# license removed for brevity
#接收策略端命令 用Socket傳輸至控制端電腦
import socket
##多執行序
import threading
import time
##
import sys
import os
import numpy as np
import rospy
import matplotlib as plot
from std_msgs.msg import String
from ROS_Socket.srv import *
from ROS_Socket.msg import *
import HiwinRA605_socket_TCPcmd as TCP
import HiwinRA605_socket_Taskcmd as Taskcmd
import talker as talk
import enum
data = '0' #設定傳輸資料初始直
Arm_feedback = 1 #假設手臂忙碌
state_feedback = 0
NAME = 'socket_server'
client_response = 0 #回傳次數初始值
##------------class pos-------
class pos():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = x
self.y = y
self.z = z
self.pitch = pitch
self.roll = roll
self.yaw = yaw
##------------class socket_cmd---------
class socket_cmd():
def __init__(self, grip, setvel, ra, delay, setboth, action):
self.grip = grip
self.setvel = setvel
self.ra = ra
self.delay = delay
self.setboth = setboth
self.action = action
##-----------switch define------------##
class switch(object):
def __init__(self, value):
self.value = value
self.fall = False
def __iter__(self):
"""Return the match method once, then stop"""
yield self.match
raise StopIteration
def match(self, *args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
return True
elif self.value in args: # changed for v1.5, see below
self.fall = True
return True
else:
return False
##-----------client feedback arm state----------
def socket_client_arm_state(Arm_state):
global state_feedback
rospy.wait_for_service('arm_state')
try:
Arm_state_client = rospy.ServiceProxy('arm_state', arm_state)
state_feedback = Arm_state_client(Arm_state)
#pos_feedback_times = pos_feedback.response
return state_feedback
except rospy.ServiceException as e:
print ("Service call failed: %s"%e)
##-----------client feedback arm state end----------
##------------server -------
##--------touch strategy--------###
def point_data(req):
global client_response
pos.x = '%s'%req.x
pos.y = '%s'%req.y
pos.z = '%s'%req.z
pos.pitch = '%s'%req.pitch
pos.roll = '%s'%req.roll
pos.yaw = '%s'%req.yaw
client_response = client_response + 1
return(client_response)
##----------Arm Mode-------------###
def Arm_Mode(req):
socket_cmd.action = int('%s'%req.action)
socket_cmd.grip = int('%s'%req.grip)
socket_cmd.ra = int('%s'%req.ra)
socket_cmd.setvel = int('%s'%req.vel)
socket_cmd.setboth = int('%s'%req.both)
return(1)
##--------touch strategy end--------###
def socket_server():
rospy.init_node(NAME)
a = rospy.Service('arm_mode',arm_mode, Arm_Mode) ##server arm mode data
s = rospy.Service('arm_pos',arm_data, point_data) ##server arm point data
print ("Ready to connect")
rospy.spin() ## spin one
##------------server end-------
##----------socket 封包傳輸--------------##
##-----------socket client--------
def socket_client():
global Arm_feedback,data
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('192.168.0.1', 8080))#iclab 5
#s.connect(('192.168.1.102', 8080))#iclab computerx
except socket.error as msg:
print(msg)
sys.exit(1)
print('Connection has been successful')
print(s.recv(1024))
start_input=int(input('開始傳輸請按1,離開請按3 : '))
#start_input = 1
if start_input==1:
while 1:
##---------------socket 傳輸手臂命令-----------------
for case in switch(socket_cmd.action):
if case(Taskcmd.Action_Type.PtoP):
for case in switch(socket_cmd.setboth):
if case(Taskcmd.Ctrl_Mode.CTRL_POS):
data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
if case(Taskcmd.Ctrl_Mode.CTRL_EULER):
data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
if case(Taskcmd.Ctrl_Mode.CTRL_BOTH):
data = TCP.SetPtoP(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
break
if case(Taskcmd.Action_Type.Line):
for case in switch(socket_cmd.setboth):
if case(Taskcmd.Ctrl_Mode.CTRL_POS):
data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_POS,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel)
break
if case(Taskcmd.Ctrl_Mode.CTRL_EULER):
data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_EULER,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel )
break
if case(Taskcmd.Ctrl_Mode.CTRL_BOTH):
data = TCP.SetLine(socket_cmd.grip,Taskcmd.RA.ABS,Taskcmd.Ctrl_Mode.CTRL_BOTH,pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw,socket_cmd.setvel )
break
break
if case(Taskcmd.Action_Type.SetVel):
data = TCP.SetVel(socket_cmd.grip, socket_cmd.setvel)
break
if case(Taskcmd.Action_Type.Delay):
data = TCP.SetDelay(socket_cmd.grip,0)
break
if case(Taskcmd.Action_Type.Mode):
data = TCP.SetMode(socket_cmd.grip,0)
break
socket_cmd.action= 5
s.send(data.encode('utf-8'))#socket傳送for python to translate str
feedback_str = s.recv(1024)
#手臂端傳送手臂狀態
###test 0403
if str(feedback_str[2]) == '70':# F
feedback = 0
socket_client_arm_state(feedback)
print("isbusy false")
if str(feedback_str[2]) == '84':# T
feedback = 1
socket_client_arm_state(feedback)
print("isbusy true")
if str(feedback_str[2]) == '54':# 6
feedback = 6
socket_client_arm_state(feedback)
print("shutdown")
#Arm_feedback = TCP.Is_busy(feedback)
###test 0403
##---------------socket 傳輸手臂命令 end-----------------
if Arm_feedback == Taskcmd.Arm_feedback_Type.shutdown:
rospy.on_shutdown(myhook)
break
if start_input == 3:
pass
s.close()
##-----------socket client end--------
##-------------socket 封包傳輸 end--------------##
## 多執行緒
def thread_test():
socket_client()
## 多執行序 end
def myhook():
print ("shutdown time!")
if __name__ == '__main__':
socket_cmd.action = 5
t = threading.Thread(target=thread_test)
t.start()
socket_server()
t.join()
# Ctrl+K Ctrl+C 添加行注释 Add line comment
# Ctrl+K Ctrl+U 删除行注释 Remove line comment
#Ctrl+] / [ 缩进/缩进行 Indent/outdent line
| 36.862745
| 171
| 0.562234
|
e93e4b2d340bfe7b131b6091d0ecd8f0008f2f8b
| 10,106
|
py
|
Python
|
src/5_Upload-to-aws.py
|
biomage-ltd/data-ingest
|
cbac0d5aae262afa6afdd2ee74b8ef7c58e745f6
|
[
"MIT"
] | 2
|
2020-10-23T17:41:10.000Z
|
2021-02-10T20:50:49.000Z
|
src/5_Upload-to-aws.py
|
biomage-ltd/data-ingest
|
cbac0d5aae262afa6afdd2ee74b8ef7c58e745f6
|
[
"MIT"
] | 10
|
2021-01-07T11:34:57.000Z
|
2021-06-22T15:46:46.000Z
|
src/5_Upload-to-aws.py
|
biomage-ltd/data-ingest
|
cbac0d5aae262afa6afdd2ee74b8ef7c58e745f6
|
[
"MIT"
] | 1
|
2020-11-10T23:17:30.000Z
|
2020-11-10T23:17:30.000Z
|
#!/usr/bin/python3
################################################
## 5_upload-to-aws.py
## - Getting ready samples-table and experiments-table
## - Upload tables to DynamoDB
## - Upload experiment.rds to S3
################################################
import hashlib
import os
import pandas
from scipy.io import mmread
import matplotlib.pyplot as plt
import boto3
import json
from decimal import Decimal
from datetime import datetime
import uuid
COLOR_POOL = []
CLUSTER_ENVS = [os.getenv("CLUSTER_ENV", "staging")]
if CLUSTER_ENVS[0] == "all": CLUSTER_ENVS = ['staging', 'production']
WARN_TXT_COL = "\033[93m"
RESET_TXT_COL = "\033[0m"
ERR_TXT_COL = "\033[91m"
for CLUSTER_ENV in CLUSTER_ENVS:
print(f"{WARN_TXT_COL}Deploying to {CLUSTER_ENV}{RESET_TXT_COL}")
for CLUSTER_ENV in CLUSTER_ENVS:
if not (CLUSTER_ENV in ["staging", "production"]):
print(f"{ERR_TXT_COL}{CLUSTER_ENV} does not exists{RESET_TXT_COL}")
exit(1)
with open("/data-ingest/src/color_pool.json") as f:
COLOR_POOL = json.load(f)
def calculate_checksum(filenames):
hash = hashlib.md5()
for fn in filenames:
if os.path.isfile(fn):
hash.update(open(fn, "rb").read())
return hash.hexdigest()
# This function crate the table information for samples. As input it requires the experiment id and the config.
def create_samples_table(config, experiment_id):
# In samples_table we are going to add the core of the information
samples_table = {}
# Getting flag_filtered information
df_prefilered = pandas.read_csv(
"/output/df_flag_filtered.txt",
sep="\t",
na_values=["None"],
)
# Firstly, we identify the samples name. To do that we fetch the names of the folders (we suppose that the name
# of the folders corresponds with the samples name) or direclty get them from the config
if len(config["samples"]) > 1:
samples = config["samples"]
else:
samples = [
name
for name in os.listdir("/input")
if os.path.isdir(os.path.join("/input", name))
]
samples_table["ids"] = ["sample-" + sample for sample in samples]
# For the current datasets it could happen that they are not in the gz format, so we leave the alternative tsv format.
mime_options = {
"tsv": "application/tsv",
"gz": "application/gzip",
"mtx": "application/mtx",
}
for sample in samples:
# flag filtered
preFiltered = (
df_prefilered.loc[
df_prefilered.samples == sample, "flag_filtered"
].tolist()[0]
== "Filtered"
)
# Identify datetime
createdDate = datetime.now()
lastModified = datetime.now()
fileNames = {}
# Look for the file that are not hidden (the hidden files start with .hidden.tsv)
sample_files = [
sample + "/" + f
for f in os.listdir("/input/" + sample)
if not f.startswith(".")
]
# Iterate over each file to create the slot
for sample_file in sample_files:
fileNames[sample_file] = {
"objectKey": "",
"name": sample_file,
"size": os.stat("/input/" + sample_file).st_size,
"mime": mime_options[sample_file.split(".")[-1]],
"success": True,
"error": False,
}
# Add the whole information to each sample
samples_table["sample-" + sample] = {
"name": sample,
"uuid": str(uuid.uuid4()),
"species": config["organism"],
"type": config["input"]["type"],
"createdDate": createdDate.isoformat(),
"lastModified": lastModified.isoformat(),
"complete": True,
"error": False,
"fileNames": sample_files,
"files": fileNames,
"preFiltered": preFiltered,
}
return {"experimentId": experiment_id, "samples": samples_table}
# cell_sets fn for seurat samples name
def samples_sets():
# construct new cell set group
samples_annotations = pandas.read_csv(
"/output/samples-cells.csv",
sep="\t",
names=["Cells_ID", "Value"],
na_values=["None"],
)
cell_set = {
"key": "sample",
"name": "Samples",
"rootNode": True,
"children": [],
"type": "metadataCategorical",
}
for sample in samples_annotations["Value"].unique():
view = samples_annotations[samples_annotations.Value == sample]["Cells_ID"]
cell_set["children"].append(
{
"key": f"sample-{sample}",
"name": f"{sample}",
"color": COLOR_POOL.pop(0),
"cellIds": [int(d) for d in view.tolist()],
}
)
return cell_set
# cell_sets fn for seurat metadata information
def meta_sets():
meta_annotations = pandas.read_csv(
"/output/metadata-cells.csv",
sep="\t",
header=0,
)
cell_set_list = list()
# The first column is the cells_id, the rest is the metadata information
for i in range(1, len(meta_annotations.columns)):
# keep as key and name the name of the column
key = meta_annotations.columns[i]
name = meta_annotations.columns[i]
cell_set = {
"key": key,
"name": name,
"rootNode": True,
"children": [],
"type": "metadataCategorical",
}
for value in meta_annotations.iloc[:, i].unique():
view = meta_annotations[meta_annotations.iloc[:, i] == value]["cells_id"]
cell_set["children"].append(
{
"key": key + f"-{value}",
"name": f"{value}",
"color": COLOR_POOL.pop(0),
"cellIds": [int(d) for d in view.tolist()],
}
)
cell_set_list.append(cell_set)
return cell_set_list
def main():
experiment_id = calculate_checksum(
[
"/output/r-out-raw.mtx",
"/output/r-out-normalized.mtx",
"/output/r-out-cells.tsv",
]
)
# save experiment_id for record-keeping
with open("/output/experiment_id.txt", "w") as text_file:
text_file.write(experiment_id)
config = None
with open("/input/meta.json", "r") as f:
config = json.load(f)
# read config related with QC pipeline
config_dataProcessing = None
with open("/output/config_dataProcessing.json", "r") as f:
config_dataProcessing = json.load(f)
# Design cell_set scratchpad for DynamoDB
scratchpad = {
"key": "scratchpad",
"name": "Scratchpad",
"rootNode": True,
"children": [],
"type": "cellSets",
}
samples_data = create_samples_table(config, experiment_id)
samples_set = samples_sets()
if "metadata" in config.keys():
# Design cell_set meta_data for DynamoDB
meta_set = meta_sets()
cellSets = [samples_set, scratchpad] + meta_set
else:
# Design cell_set meta_data for DynamoDB
cellSets = [scratchpad, samples_set]
print("Experiment name is", config["name"])
FILE_NAMES = [
f"biomage-source-{CLUSTER_ENV}/{experiment_id}/r.rds"
for CLUSTER_ENV in CLUSTER_ENVS]
experiment_data = {
"apiVersion": "2.0.0-data-ingest-seurat-rds-automated",
"experimentId": experiment_id,
"experimentName": config["name"],
"meta": {
"organism": config["organism"],
"type": config["input"]["type"],
},
"processingConfig": config_dataProcessing,
}
cellSetsObject = {"cellSets": cellSets}
cell_sets_data = json.dumps(cellSetsObject)
# Conver to float all decimals
experiment_data = json.loads(json.dumps(experiment_data), parse_float=Decimal)
if CLUSTER_ENV == "production":
rbac_write = "a07c6615-d982-413b-9fdc-48bd85182e83"
else:
rbac_write = "70c213d4-e7b6-4920-aefb-706ce8606ee2"
experiment_data["rbac_can_write"] = set([rbac_write])
samples_data = json.loads(json.dumps(samples_data), parse_float=Decimal)
access_key = os.getenv("AWS_ACCESS_KEY_ID")
secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY")
for CLUSTER_ENV, FILE_NAME in zip(CLUSTER_ENVS, FILE_NAMES):
r_object_bucket, r_object_key = FILE_NAME.split("/", 1)
dynamo = boto3.resource(
"dynamodb",
aws_access_key_id=access_key,
aws_secret_access_key=secret_access_key,
region_name="eu-west-1",
).Table(f"experiments-{CLUSTER_ENV}")
dynamo.put_item(Item=experiment_data)
dynamo = boto3.resource(
"dynamodb",
aws_access_key_id=access_key,
aws_secret_access_key=secret_access_key,
region_name="eu-west-1",
).Table(f"samples-{CLUSTER_ENV}")
dynamo.put_item(Item=samples_data)
s3 = boto3.client(
"s3",
aws_access_key_id=access_key,
aws_secret_access_key=secret_access_key,
region_name="eu-west-1",
)
s3.put_object(
Body=cell_sets_data, Bucket=f"cell-sets-{CLUSTER_ENV}", Key=experiment_id
)
s3 = boto3.client(
"s3",
aws_access_key_id=access_key,
aws_secret_access_key=secret_access_key,
region_name="eu-west-1",
)
with open("/output/experiment.rds", "rb") as f:
s3.put_object(Body=f, Bucket=r_object_bucket, Key=r_object_key)
if CLUSTER_ENV == "production":
print("successful. experiment is now accessible at:")
print(f"https://scp.biomage.net/experiments/{experiment_id}/data-exploration")
elif CLUSTER_ENV == "staging":
print(f"successful. Experiment ID: {experiment_id} uploaded to staging.")
main()
print("Step 5 completed.")
| 30.439759
| 122
| 0.586483
|
f693863e5efd6d3824327951bced428042da4edc
| 1,046
|
py
|
Python
|
tests/bind_tests/boolean_tests/sweep_event_tests/strategies.py
|
skrat/martinez
|
86db48324cb50ecb52be8ab2e4278a6d5cdd562b
|
[
"MIT"
] | 7
|
2020-05-07T08:13:44.000Z
|
2021-12-17T07:33:51.000Z
|
tests/bind_tests/boolean_tests/sweep_event_tests/strategies.py
|
skrat/martinez
|
86db48324cb50ecb52be8ab2e4278a6d5cdd562b
|
[
"MIT"
] | 17
|
2019-11-29T23:17:26.000Z
|
2020-12-20T15:47:17.000Z
|
tests/bind_tests/boolean_tests/sweep_event_tests/strategies.py
|
skrat/martinez
|
86db48324cb50ecb52be8ab2e4278a6d5cdd562b
|
[
"MIT"
] | 1
|
2020-12-17T22:44:21.000Z
|
2020-12-17T22:44:21.000Z
|
from hypothesis import strategies
from tests.bind_tests.factories import (to_acyclic_bound_sweep_events,
to_bound_sweep_events,
to_nested_bound_sweep_events,
to_plain_bound_sweep_events)
from tests.bind_tests.hints import BoundPoint
from tests.bind_tests.utils import (bound_edges_types,
bound_polygons_types)
from tests.strategies import (booleans,
floats,
unsigned_integers)
booleans = booleans
unsigned_integers = unsigned_integers
points = strategies.builds(BoundPoint, floats, floats)
polygons_types = bound_polygons_types
edges_types = bound_edges_types
leaf_sweep_events = to_plain_bound_sweep_events(strategies.none())
acyclic_sweep_events = to_acyclic_bound_sweep_events()
sweep_events = to_bound_sweep_events()
nested_sweep_events = to_nested_bound_sweep_events()
maybe_sweep_events = strategies.none() | sweep_events
| 43.583333
| 70
| 0.68738
|
d2222ceb694665f3958bcd9c395bbc4372b26251
| 606
|
py
|
Python
|
example_project/blog/factories.py
|
allran/djangorestframework-appapi
|
5e843b70910ccd55d787096ee08eb85315c80000
|
[
"BSD-2-Clause"
] | 4
|
2019-10-15T06:47:29.000Z
|
2019-11-11T13:16:15.000Z
|
example_project/blog/factories.py
|
allran/djangorestframework-appapi
|
5e843b70910ccd55d787096ee08eb85315c80000
|
[
"BSD-2-Clause"
] | null | null | null |
example_project/blog/factories.py
|
allran/djangorestframework-appapi
|
5e843b70910ccd55d787096ee08eb85315c80000
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- encoding: utf-8 -*-
import factory
from faker import Factory as FakerFactory
from .models import (
Author,
Blog,
)
faker = FakerFactory.create()
faker.seed(883843)
class BlogFactory(factory.django.DjangoModelFactory):
class Meta:
model = Blog
title = factory.LazyAttribute(lambda x: faker.name())
content = factory.LazyAttribute(lambda x: faker.name())
class AuthorFactory(factory.django.DjangoModelFactory):
class Meta:
model = Author
name = factory.LazyAttribute(lambda x: faker.name())
email = factory.LazyAttribute(lambda x: faker.email())
| 21.642857
| 59
| 0.70132
|
8a27adb3e8e25e0949f740704a115c233df5e798
| 290
|
py
|
Python
|
src/camera-test.py
|
parhamzm/OpenCV-FaceDetection_Filtering
|
61cb497adeebac3d0c5062885078b4ba239ed963
|
[
"MIT"
] | null | null | null |
src/camera-test.py
|
parhamzm/OpenCV-FaceDetection_Filtering
|
61cb497adeebac3d0c5062885078b4ba239ed963
|
[
"MIT"
] | null | null | null |
src/camera-test.py
|
parhamzm/OpenCV-FaceDetection_Filtering
|
61cb497adeebac3d0c5062885078b4ba239ed963
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
cap = cv2.VideoCapture(0)
while True:
ret, frame = cap.read()
grayscale = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', frame)
cv2.imshow('frame2', grayscale)
if cv2.waitKey(20) & 0xFF == ord('q'):
break
| 22.307692
| 56
| 0.617241
|
af72e2f5e1e65273b8dd9fc984cfe188e06a8f1a
| 1,690
|
py
|
Python
|
indico/modules/events/sessions/models/types.py
|
uxmaster/indico
|
ecd19f17ef6fdc9f5584f59c87ec647319ce5d31
|
[
"MIT"
] | null | null | null |
indico/modules/events/sessions/models/types.py
|
uxmaster/indico
|
ecd19f17ef6fdc9f5584f59c87ec647319ce5d31
|
[
"MIT"
] | null | null | null |
indico/modules/events/sessions/models/types.py
|
uxmaster/indico
|
ecd19f17ef6fdc9f5584f59c87ec647319ce5d31
|
[
"MIT"
] | null | null | null |
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from sqlalchemy.ext.declarative import declared_attr
from indico.core.db import db
from indico.util.locators import locator_property
from indico.util.string import format_repr, return_ascii
class SessionType(db.Model):
__tablename__ = 'session_types'
@declared_attr
def __table_args__(cls):
return (db.Index('ix_uq_session_types_event_id_name_lower', cls.event_id, db.func.lower(cls.name),
unique=True),
{'schema': 'events'})
id = db.Column(
db.Integer,
primary_key=True
)
event_id = db.Column(
db.Integer,
db.ForeignKey('events.events.id'),
index=True,
nullable=False
)
name = db.Column(
db.String,
nullable=False
)
code = db.Column(
db.String,
nullable=False,
default=''
)
is_poster = db.Column(
db.Boolean,
nullable=False,
default=False
)
event = db.relationship(
'Event',
lazy=True,
backref=db.backref(
'session_types',
cascade='all, delete-orphan',
lazy=True
)
)
# relationship backrefs:
# - sessions (Session.type)
@return_ascii
def __repr__(self):
return format_repr(self, 'id', _text=self.name)
@locator_property
def locator(self):
return dict(self.event.locator, session_type_id=self.id)
| 23.802817
| 106
| 0.616568
|
c4f8370c65c932a5e676049d43b43fc639fa4ef9
| 3,782
|
py
|
Python
|
PyCapture2-2.13.31/examples/python3/SaveImageToAVIEx.py
|
sjtu-automatic-maritime-system/PengZhenghao
|
c063294d44ea9be972114b2c144dab3d9a2de863
|
[
"MIT"
] | 1
|
2019-04-16T09:07:26.000Z
|
2019-04-16T09:07:26.000Z
|
PyCapture2-2.13.31/examples/python3/SaveImageToAVIEx.py
|
sjtu-automatic-maritime-system/PengZhenghao
|
c063294d44ea9be972114b2c144dab3d9a2de863
|
[
"MIT"
] | null | null | null |
PyCapture2-2.13.31/examples/python3/SaveImageToAVIEx.py
|
sjtu-automatic-maritime-system/PengZhenghao
|
c063294d44ea9be972114b2c144dab3d9a2de863
|
[
"MIT"
] | null | null | null |
# =============================================================================
# Copyright (c) 2001-2018 FLIR Systems, Inc. All Rights Reserved.
#
# This software is the confidential and proprietary information of FLIR
# Integrated Imaging Solutions, Inc. ("Confidential Information"). You
# shall not disclose such Confidential Information and shall use it only in
# accordance with the terms of the license agreement you entered into
# with FLIR Integrated Imaging Solutions, Inc. (FLIR).
#
# FLIR MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF THE
# SOFTWARE, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE, OR NON-INFRINGEMENT. FLIR SHALL NOT BE LIABLE FOR ANY DAMAGES
# SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING
# THIS SOFTWARE OR ITS DERIVATIVES.
# =============================================================================
import PyCapture2
def print_build_info():
lib_ver = PyCapture2.getLibraryVersion()
print('PyCapture2 library version: %d %d %d %d' % (lib_ver[0], lib_ver[1], lib_ver[2], lib_ver[3]))
print()
def print_camera_info(cam):
cam_info = cam.getCameraInfo()
print('\n*** CAMERA INFORMATION ***\n')
print('Serial number - %d', cam_info.serialNumber)
print('Camera model - %s', cam_info.modelName)
print('Camera vendor - %s', cam_info.vendorName)
print('Sensor - %s', cam_info.sensorInfo)
print('Resolution - %s', cam_info.sensorResolution)
print('Firmware version - %s', cam_info.firmwareVersion)
print('Firmware build time - %s', cam_info.firmwareBuildTime)
print()
def save_video_helper(cam, file_format, filename, framerate):
num_images = 100
video = PyCapture2.FlyCapture2Video()
for i in range(num_images):
try:
image = cam.retrieveBuffer()
except PyCapture2.Fc2error as fc2Err:
print('Error retrieving buffer : %s' % fc2Err)
continue
print('Grabbed image {}'.format(i))
if (i == 0):
if file_format == 'AVI':
video.AVIOpen(filename, framerate)
elif file_format == 'MJPG':
video.MJPGOpen(filename, framerate, 75)
elif file_format == 'H264':
video.H264Open(filename, framerate, image.getCols(), image.getRows(), 1000000)
else:
print('Specified format is not available.')
return
video.append(image)
print('Appended image %d...' % i)
print('Appended {} images to {} file: {}...'.format(num_images, file_format, filename))
video.close()
#
# Example Main
#
# Print PyCapture2 Library Information
print_build_info()
# Ensure sufficient cameras are found
bus = PyCapture2.BusManager()
num_cams = bus.getNumOfCameras()
print('Number of cameras detected: %d' % num_cams)
if not num_cams:
print('Insufficient number of cameras. Exiting...')
exit()
# Select camera on 0th index
cam = PyCapture2.Camera()
cam.connect(bus.getCameraFromIndex(0))
# Print camera details
print_camera_info(cam)
print('Starting capture...')
cam.startCapture()
print('Detecting frame rate from Camera')
fRateProp = cam.getProperty(PyCapture2.PROPERTY_TYPE.FRAME_RATE)
framerate = fRateProp.absValue
print('Using frame rate of {}'.format(framerate))
for file_format in ('AVI','H264','MJPG'):
filename = 'SaveImageToAviEx_{}.avi'.format(file_format)
save_video_helper(cam, file_format, filename.encode('utf-8'), framerate)
print('Stopping capture...')
cam.stopCapture()
cam.disconnect()
input('Done! Press Enter to exit...\n')
| 35.345794
| 104
| 0.641988
|
ac818486ddb8df5fc9b2b1c4d429452d1fecff71
| 4,614
|
py
|
Python
|
ark-tweet-nlp-0.3.2/postagger.py
|
enlighter/twitter-disasters-info-retrieval
|
0a362fa68c1604152709dc25922c5611b0d1a484
|
[
"MIT"
] | null | null | null |
ark-tweet-nlp-0.3.2/postagger.py
|
enlighter/twitter-disasters-info-retrieval
|
0a362fa68c1604152709dc25922c5611b0d1a484
|
[
"MIT"
] | null | null | null |
ark-tweet-nlp-0.3.2/postagger.py
|
enlighter/twitter-disasters-info-retrieval
|
0a362fa68c1604152709dc25922c5611b0d1a484
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Simple Python wrapper for runTagger.sh script for CMU's Tweet Tokeniser and Part of Speech tagger: http://www.ark.cs.cmu.edu/TweetNLP/
Usage:
results=runtagger_parse(['example tweet 1', 'example tweet 2'])
results will contain a list of lists (one per tweet) of triples, each triple represents (term, type, confidence)
"""
import subprocess
import shlex
# The only relavent source I've found is here:
# http://m1ked.com/post/12304626776/pos-tagger-for-twitter-successfully-implemented-in
# which is a very simple implementation, my implementation is a bit more
# useful (but not much).
# NOTE this command is directly lifted from runTagger.sh
RUN_TAGGER_CMD = "java -XX:ParallelGCThreads=2 -Xmx500m -jar ark-tweet-nlp-0.3.2.jar"
def _split_results(rows):
"""Parse the tab-delimited returned lines, modified from: https://github.com/brendano/ark-tweet-nlp/blob/master/scripts/show.py"""
for line in rows:
line = line.strip() # remove '\n'
if len(line) > 0:
if line.count('\t') == 2:
parts = line.split('\t')
tokens = parts[0]
tags = parts[1]
confidence = float(parts[2])
yield tokens, tags, confidence
def _call_runtagger(tweets, run_tagger_cmd=RUN_TAGGER_CMD):
"""Call runTagger.sh using a named input file"""
# remove carriage returns as they are tweet separators for the stdin
# interface
tweets_cleaned = [tw.replace('\n', ' ') for tw in tweets]
message = "\n".join(tweets_cleaned)
# force UTF-8 encoding (from internal unicode type) to avoid .communicate encoding error as per:
# http://stackoverflow.com/questions/3040101/python-encoding-for-pipe-communicate
message = message.encode('utf-8')
# build a list of args
args = shlex.split(run_tagger_cmd)
args.append('--output-format')
args.append('conll')
po = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# old call - made a direct call to runTagger.sh (not Windows friendly)
#po = subprocess.Popen([run_tagger_cmd, '--output-format', 'conll'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = po.communicate(message)
# expect a tuple of 2 items like:
# ('hello\t!\t0.9858\nthere\tR\t0.4168\n\n',
# 'Listening on stdin for input. (-h for help)\nDetected text input format\nTokenized and tagged 1 tweets (2 tokens) in 7.5 seconds: 0.1 tweets/sec, 0.3 tokens/sec\n')
pos_result = result[0].strip('\n\n') # get first line, remove final double carriage return
pos_result = pos_result.split('\n\n') # split messages by double carriage returns
pos_results = [pr.split('\n') for pr in pos_result] # split parts of message by each carriage return
return pos_results
def runtagger_parse(tweets, run_tagger_cmd=RUN_TAGGER_CMD):
"""Call runTagger.sh on a list of tweets, parse the result, return lists of tuples of (term, type, confidence)"""
pos_raw_results = _call_runtagger(tweets, run_tagger_cmd)
pos_result = []
for pos_raw_result in pos_raw_results:
pos_result.append([x for x in _split_results(pos_raw_result)])
return pos_result
def check_script_is_present(run_tagger_cmd=RUN_TAGGER_CMD):
"""Simple test to make sure we can see the script"""
success = False
try:
args = shlex.split(run_tagger_cmd)
args.append("--help")
po = subprocess.Popen(args, stdout=subprocess.PIPE)
# old call - made a direct call to runTagger.sh (not Windows friendly)
#po = subprocess.Popen([run_tagger_cmd, '--help'], stdout=subprocess.PIPE)
while not po.poll():
lines = [l for l in po.stdout]
# we expected the first line of --help to look like the following:
assert "RunTagger [options]" in lines[0]
success = True
except OSError as err:
print "Caught an OSError, have you specified the correct path to runTagger.sh? We are using \"%s\". Exception: %r" % (run_tagger_cmd, repr(err))
return success
if __name__ == "__main__":
print "Checking that we can see \"%s\", this will crash if we can't" % (RUN_TAGGER_CMD)
success = check_script_is_present()
if success:
print "Success."
print "Now pass in two messages, get a list of tuples back:"
tweets = ['I predict I won\'t win a single game I bet on. Got Cliff Lee today, so if he loses its on me RT @e_one: Texas (cont) http:\//tl.gd\/6meogh', 'and a second message']
print runtagger_parse(tweets)
| 47.081633
| 183
| 0.683572
|
771d228a71da476ad35e7b87c9c46d55499dcd42
| 8,851
|
py
|
Python
|
great_expectations/exceptions.py
|
ap3xx/great_expectations
|
67251ff3fcb60b1a52a6ece1bec98fb8e96f6a96
|
[
"Apache-2.0"
] | null | null | null |
great_expectations/exceptions.py
|
ap3xx/great_expectations
|
67251ff3fcb60b1a52a6ece1bec98fb8e96f6a96
|
[
"Apache-2.0"
] | 47
|
2020-07-15T06:32:50.000Z
|
2022-03-29T12:03:23.000Z
|
great_expectations/exceptions.py
|
ap3xx/great_expectations
|
67251ff3fcb60b1a52a6ece1bec98fb8e96f6a96
|
[
"Apache-2.0"
] | null | null | null |
import importlib
import json
from marshmallow import ValidationError
class GreatExpectationsError(Exception):
def __init__(self, message):
self.message = message
super().__init__(message)
class GreatExpectationsValidationError(ValidationError, GreatExpectationsError):
def __init__(self, message, validation_error):
self.message = message
self.messages = validation_error.messages
class SuiteEditNotebookCustomTemplateModuleNotFoundError(ModuleNotFoundError):
def __init__(self, custom_module):
message = f"The custom module '{custom_module}' could not be found"
super().__init__(message)
class DataContextError(GreatExpectationsError):
pass
class CheckpointError(DataContextError):
pass
class CheckpointNotFoundError(CheckpointError):
pass
class StoreBackendError(DataContextError):
pass
class UnavailableMetricError(GreatExpectationsError):
pass
class ParserError(GreatExpectationsError):
pass
class InvalidConfigurationYamlError(DataContextError):
pass
class InvalidTopLevelConfigKeyError(GreatExpectationsError):
pass
class MissingTopLevelConfigKeyError(GreatExpectationsValidationError):
pass
class InvalidDataContextConfigError(GreatExpectationsValidationError):
pass
class InvalidBatchKwargsError(GreatExpectationsError):
pass
class InvalidBatchIdError(GreatExpectationsError):
pass
class InvalidDataContextKeyError(DataContextError):
pass
class UnsupportedConfigVersionError(DataContextError):
pass
class EvaluationParameterError(GreatExpectationsError):
pass
class ProfilerError(GreatExpectationsError):
pass
class InvalidConfigError(DataContextError):
def __init__(self, message):
self.message = message
super().__init__(self.message)
class MissingConfigVariableError(InvalidConfigError):
def __init__(self, message, missing_config_variable=None):
if not missing_config_variable:
missing_config_variable = []
self.message = message
self.missing_config_variable = missing_config_variable
super().__init__(self.message)
class AmbiguousDataAssetNameError(DataContextError):
def __init__(self, message, candidates=None):
self.message = message
self.candidates = candidates
super().__init__(self.message)
class StoreConfigurationError(DataContextError):
pass
class InvalidExpectationKwargsError(GreatExpectationsError):
pass
class InvalidExpectationConfigurationError(GreatExpectationsError):
pass
class InvalidValidationResultError(GreatExpectationsError):
pass
class GreatExpectationsTypeError(TypeError):
pass
class StoreError(DataContextError):
pass
class InvalidKeyError(StoreError):
pass
class InvalidCacheValueError(GreatExpectationsError):
def __init__(self, result_dict):
template = """\
Invalid result values were found when trying to instantiate an ExpectationValidationResult.
- Invalid result values are likely caused by inconsistent cache values.
- Great Expectations enables caching by default.
- Please ensure that caching behavior is consistent between the underlying Dataset (e.g. Spark) and Great Expectations.
Result: {}
"""
self.message = template.format(json.dumps(result_dict, indent=2))
super().__init__(self.message)
class ConfigNotFoundError(DataContextError):
"""The great_expectations dir could not be found."""
def __init__(self):
self.message = """Error: No great_expectations directory was found here!
- Please check that you are in the correct directory or have specified the correct directory.
- If you have never run Great Expectations in this project, please run `great_expectations init` to get started.
"""
super().__init__(self.message)
class PluginModuleNotFoundError(GreatExpectationsError):
"""A module import failed."""
def __init__(self, module_name):
template = """\
No module named `{}` could be found in your plugins directory.
- Please verify your plugins directory is configured correctly.
- Please verify you have a module named `{}` in your plugins directory.
"""
self.message = template.format(module_name, module_name)
colored_template = "<red>" + template + "</red>"
module_snippet = "</red><yellow>" + module_name + "</yellow><red>"
self.cli_colored_message = colored_template.format(
module_snippet, module_snippet
)
super().__init__(self.message)
class PluginClassNotFoundError(DataContextError, AttributeError):
"""A module import failed."""
def __init__(self, module_name, class_name):
class_name_changes = {
"FixedLengthTupleFilesystemStoreBackend": "TupleFilesystemStoreBackend",
"FixedLengthTupleS3StoreBackend": "TupleS3StoreBackend",
"FixedLengthTupleGCSStoreBackend": "TupleGCSStoreBackend",
"InMemoryEvaluationParameterStore": "EvaluationParameterStore",
"DatabricksTableGenerator": "DatabricksTableBatchKwargsGenerator",
"GlobReaderGenerator": "GlobReaderBatchKwargsGenerator",
"SubdirReaderGenerator": "SubdirReaderBatchKwargsGenerator",
"QueryGenerator": "QueryBatchKwargsGenerator",
"TableGenerator": "TableBatchKwargsGenerator",
"S3Generator": "S3GlobReaderBatchKwargsGenerator",
"ExtractAndStoreEvaluationParamsAction": "StoreEvaluationParametersAction",
"StoreAction": "StoreValidationResultAction",
}
if class_name_changes.get(class_name):
template = """The module: `{}` does not contain the class: `{}`.
The class name `{}` has changed to `{}`."""
self.message = template.format(
module_name, class_name, class_name, class_name_changes.get(class_name)
)
else:
template = """The module: `{}` does not contain the class: `{}`.
- Please verify that the class named `{}` exists."""
self.message = template.format(module_name, class_name, class_name)
colored_template = "<red>" + template + "</red>"
module_snippet = "</red><yellow>" + module_name + "</yellow><red>"
class_snippet = "</red><yellow>" + class_name + "</yellow><red>"
if class_name_changes.get(class_name):
new_class_snippet = (
"</red><yellow>" + class_name_changes.get(class_name) + "</yellow><red>"
)
self.cli_colored_message = colored_template.format(
module_snippet, class_snippet, class_snippet, new_class_snippet
)
else:
self.cli_colored_message = colored_template.format(
module_snippet, class_snippet, class_snippet,
)
super().__init__(self.message)
class ClassInstantiationError(GreatExpectationsError):
def __init__(self, module_name, package_name, class_name):
module_spec = importlib.util.find_spec(module_name, package=package_name)
if not module_spec:
if not package_name:
package_name = ""
self.message = f"""No module named "{package_name + module_name}" could be found in the repository. \
Please make sure that the file, corresponding to this package and module, exists and that dynamic loading of code \
modules, templates, and assets is supported in your execution environment. This error is unrecoverable.
"""
else:
self.message = f"""The module "{module_name}" exists; however, the system is unable to create an instance \
of the class "{class_name}", searched for inside this module. Please make sure that the class named "{class_name}" is \
properly defined inside its intended module and declared correctly by the calling entity. This error is unrecoverable.
"""
super().__init__(self.message)
class ExpectationSuiteNotFoundError(GreatExpectationsError):
def __init__(self, data_asset_name):
self.data_asset_name = data_asset_name
self.message = (
"No expectation suite found for data_asset_name %s" % data_asset_name
)
super().__init__(self.message)
class BatchKwargsError(DataContextError):
def __init__(self, message, batch_kwargs=None):
self.message = message
self.batch_kwargs = batch_kwargs
super().__init__(self.message)
class DatasourceInitializationError(GreatExpectationsError):
def __init__(self, datasource_name, message):
self.message = "Cannot initialize datasource %s, error: %s" % (
datasource_name,
message,
)
super().__init__(self.message)
class InvalidConfigValueTypeError(DataContextError):
pass
| 32.30292
| 120
| 0.707604
|
71945fb988133992fb441fa7ac8bb7f8654a6e1b
| 1,792
|
py
|
Python
|
python/pascal_triangle.py
|
shub0/leetcode
|
8221d10f201d001abcb15b27c9cf4b8cd5060f1f
|
[
"BSD-3-Clause"
] | null | null | null |
python/pascal_triangle.py
|
shub0/leetcode
|
8221d10f201d001abcb15b27c9cf4b8cd5060f1f
|
[
"BSD-3-Clause"
] | null | null | null |
python/pascal_triangle.py
|
shub0/leetcode
|
8221d10f201d001abcb15b27c9cf4b8cd5060f1f
|
[
"BSD-3-Clause"
] | null | null | null |
#! /usr/bin/python
'''
Given numRows, generate the first numRows of Pascal's triangle.
For example, given numRows = 5,
Return
[
[1],
[1,1],
[1,2,1],
[1,3,3,1],
[1,4,6,4,1]
]
'''
import math
class Solution:
# @param numRows, an integer
# @return a list of lists of integers
def generate(self, numRows):
if numRows == 0:
return []
if numRows == 1:
return [[1]]
pascal_triangle = list()
curr_row = [1]
pascal_triangle.append(curr_row)
for row_index in range(1, numRows):
curr_row = [0] * (row_index + 1)
curr_row[0] = 1
curr_row[-1] = 1
for column_index in range(1, row_index):
curr_row[column_index] = pascal_triangle[row_index - 1][column_index - 1] + pascal_triangle[row_index - 1][column_index]
pascal_triangle.append(curr_row)
return pascal_triangle
# @param n, an integer
# @param k, an integer
# @return an integer
def getCombinational(self, n, k):
return math.factorial(n) / math.factorial(n - k) / math.factorial(k)
# @param rowIndex, an integer
# @return a list of integers
def getRow(self, rowIndex):
if rowIndex == 0:
return [1]
prev_row = [1]
for curr_row_index in range(1, rowIndex + 1):
curr_row = [0] * (curr_row_index + 1)
curr_row[0] = 1
curr_row[-1] = 1
for column_index in range(1, curr_row_index):
curr_row[column_index] = prev_row[column_index - 1] + prev_row[column_index]
prev_row = curr_row
return prev_row
if __name__ == '__main__':
solution = Solution()
print solution.generate(7)
print solution.getRow(3)
| 28.444444
| 136
| 0.573103
|
2432cea86c791ca43e4b7bb4d8923916aa0be20d
| 122
|
py
|
Python
|
Python/School/Project/q2.py
|
abdalrhmanyasser/Abdalrhman_Rep
|
e0fc3caa2cc04e92f591ccd7934586986d194000
|
[
"CC0-1.0"
] | null | null | null |
Python/School/Project/q2.py
|
abdalrhmanyasser/Abdalrhman_Rep
|
e0fc3caa2cc04e92f591ccd7934586986d194000
|
[
"CC0-1.0"
] | null | null | null |
Python/School/Project/q2.py
|
abdalrhmanyasser/Abdalrhman_Rep
|
e0fc3caa2cc04e92f591ccd7934586986d194000
|
[
"CC0-1.0"
] | null | null | null |
new_text = []
for i in input("enter your text : ").split():
new_text.append(i.capitalize())
print(" ".join(new_text))
| 24.4
| 45
| 0.647541
|
2229d633063975601d8b69179326163a31b1091d
| 7,976
|
py
|
Python
|
PathPlanning/AStar/a_star.py
|
robberwick/PythonRobotics
|
e8ffc01cc7975e02e226b547583d955dda8d0150
|
[
"MIT"
] | 2
|
2020-03-07T11:04:57.000Z
|
2020-04-10T03:34:47.000Z
|
PathPlanning/AStar/a_star.py
|
JeffLIrion/PythonRobotics
|
487a7e4141dc4e2a0ae887e7fec98251900362b7
|
[
"MIT"
] | null | null | null |
PathPlanning/AStar/a_star.py
|
JeffLIrion/PythonRobotics
|
487a7e4141dc4e2a0ae887e7fec98251900362b7
|
[
"MIT"
] | 1
|
2022-03-07T10:30:07.000Z
|
2022-03-07T10:30:07.000Z
|
"""
A* grid planning
author: Atsushi Sakai(@Atsushi_twi)
Nikos Kanargias (nkana@tee.gr)
See Wikipedia article (https://en.wikipedia.org/wiki/A*_search_algorithm)
"""
import math
import matplotlib.pyplot as plt
show_animation = True
class AStarPlanner:
def __init__(self, ox, oy, reso, rr):
"""
Initialize grid map for a star planning
ox: x position list of Obstacles [m]
oy: y position list of Obstacles [m]
reso: grid resolution [m]
rr: robot radius[m]
"""
self.reso = reso
self.rr = rr
self.calc_obstacle_map(ox, oy)
self.motion = self.get_motion_model()
class Node:
def __init__(self, x, y, cost, pind):
self.x = x # index of grid
self.y = y # index of grid
self.cost = cost
self.pind = pind
def __str__(self):
return str(self.x) + "," + str(self.y) + "," + str(self.cost) + "," + str(self.pind)
def planning(self, sx, sy, gx, gy):
"""
A star path search
input:
sx: start x position [m]
sy: start y position [m]
gx: goal x position [m]
gy: goal y position [m]
output:
rx: x position list of the final path
ry: y position list of the final path
"""
nstart = self.Node(self.calc_xyindex(sx, self.minx),
self.calc_xyindex(sy, self.miny), 0.0, -1)
ngoal = self.Node(self.calc_xyindex(gx, self.minx),
self.calc_xyindex(gy, self.miny), 0.0, -1)
open_set, closed_set = dict(), dict()
open_set[self.calc_grid_index(nstart)] = nstart
while 1:
if len(open_set) == 0:
print("Open set is empty..")
break
c_id = min(
open_set, key=lambda o: open_set[o].cost + self.calc_heuristic(ngoal, open_set[o]))
current = open_set[c_id]
# show graph
if show_animation: # pragma: no cover
plt.plot(self.calc_grid_position(current.x, self.minx),
self.calc_grid_position(current.y, self.miny), "xc")
# for stopping simulation with the esc key.
plt.gcf().canvas.mpl_connect('key_release_event',
lambda event: [exit(0) if event.key == 'escape' else None])
if len(closed_set.keys()) % 10 == 0:
plt.pause(0.001)
if current.x == ngoal.x and current.y == ngoal.y:
print("Find goal")
ngoal.pind = current.pind
ngoal.cost = current.cost
break
# Remove the item from the open set
del open_set[c_id]
# Add it to the closed set
closed_set[c_id] = current
# expand_grid search grid based on motion model
for i, _ in enumerate(self.motion):
node = self.Node(current.x + self.motion[i][0],
current.y + self.motion[i][1],
current.cost + self.motion[i][2], c_id)
n_id = self.calc_grid_index(node)
# If the node is not safe, do nothing
if not self.verify_node(node):
continue
if n_id in closed_set:
continue
if n_id not in open_set:
open_set[n_id] = node # discovered a new node
else:
if open_set[n_id].cost > node.cost:
# This path is the best until now. record it
open_set[n_id] = node
rx, ry = self.calc_final_path(ngoal, closed_set)
return rx, ry
def calc_final_path(self, ngoal, closedset):
# generate final course
rx, ry = [self.calc_grid_position(ngoal.x, self.minx)], [
self.calc_grid_position(ngoal.y, self.miny)]
pind = ngoal.pind
while pind != -1:
n = closedset[pind]
rx.append(self.calc_grid_position(n.x, self.minx))
ry.append(self.calc_grid_position(n.y, self.miny))
pind = n.pind
return rx, ry
@staticmethod
def calc_heuristic(n1, n2):
w = 1.0 # weight of heuristic
d = w * math.hypot(n1.x - n2.x, n1.y - n2.y)
return d
def calc_grid_position(self, index, minp):
"""
calc grid position
:param index:
:param minp:
:return:
"""
pos = index * self.reso + minp
return pos
def calc_xyindex(self, position, min_pos):
return round((position - min_pos) / self.reso)
def calc_grid_index(self, node):
return (node.y - self.miny) * self.xwidth + (node.x - self.minx)
def verify_node(self, node):
px = self.calc_grid_position(node.x, self.minx)
py = self.calc_grid_position(node.y, self.miny)
if px < self.minx:
return False
elif py < self.miny:
return False
elif px >= self.maxx:
return False
elif py >= self.maxy:
return False
# collision check
if self.obmap[node.x][node.y]:
return False
return True
def calc_obstacle_map(self, ox, oy):
self.minx = round(min(ox))
self.miny = round(min(oy))
self.maxx = round(max(ox))
self.maxy = round(max(oy))
print("minx:", self.minx)
print("miny:", self.miny)
print("maxx:", self.maxx)
print("maxy:", self.maxy)
self.xwidth = round((self.maxx - self.minx) / self.reso)
self.ywidth = round((self.maxy - self.miny) / self.reso)
print("xwidth:", self.xwidth)
print("ywidth:", self.ywidth)
# obstacle map generation
self.obmap = [[False for i in range(self.ywidth)]
for i in range(self.xwidth)]
for ix in range(self.xwidth):
x = self.calc_grid_position(ix, self.minx)
for iy in range(self.ywidth):
y = self.calc_grid_position(iy, self.miny)
for iox, ioy in zip(ox, oy):
d = math.hypot(iox - x, ioy - y)
if d <= self.rr:
self.obmap[ix][iy] = True
break
@staticmethod
def get_motion_model():
# dx, dy, cost
motion = [[1, 0, 1],
[0, 1, 1],
[-1, 0, 1],
[0, -1, 1],
[-1, -1, math.sqrt(2)],
[-1, 1, math.sqrt(2)],
[1, -1, math.sqrt(2)],
[1, 1, math.sqrt(2)]]
return motion
def main():
print(__file__ + " start!!")
# start and goal position
sx = 10.0 # [m]
sy = 10.0 # [m]
gx = 50.0 # [m]
gy = 50.0 # [m]
grid_size = 2.0 # [m]
robot_radius = 1.0 # [m]
# set obstable positions
ox, oy = [], []
for i in range(-10, 60):
ox.append(i)
oy.append(-10.0)
for i in range(-10, 60):
ox.append(60.0)
oy.append(i)
for i in range(-10, 61):
ox.append(i)
oy.append(60.0)
for i in range(-10, 61):
ox.append(-10.0)
oy.append(i)
for i in range(-10, 40):
ox.append(20.0)
oy.append(i)
for i in range(0, 40):
ox.append(40.0)
oy.append(60.0 - i)
if show_animation: # pragma: no cover
plt.plot(ox, oy, ".k")
plt.plot(sx, sy, "og")
plt.plot(gx, gy, "xb")
plt.grid(True)
plt.axis("equal")
a_star = AStarPlanner(ox, oy, grid_size, robot_radius)
rx, ry = a_star.planning(sx, sy, gx, gy)
if show_animation: # pragma: no cover
plt.plot(rx, ry, "-r")
plt.show()
if __name__ == '__main__':
main()
| 29.109489
| 99
| 0.503009
|
7d040410ad52288978ee523cffe1c8bda5eea87a
| 1,840
|
py
|
Python
|
retinopathy/models/heads/gwap.py
|
RamsteinWR/Diabetic-Retinopathy-Blindness-Detection
|
24390aeefd197600255a961189872dd4dfc77092
|
[
"MIT"
] | 68
|
2019-09-08T20:04:23.000Z
|
2021-05-05T10:05:14.000Z
|
retinopathy/models/heads/gwap.py
|
RamsteinWR/Diabetic-Retinopathy-Blindness-Detection
|
24390aeefd197600255a961189872dd4dfc77092
|
[
"MIT"
] | 1
|
2019-09-24T06:40:33.000Z
|
2019-10-04T09:13:35.000Z
|
retinopathy/models/heads/gwap.py
|
RamsteinWR/Diabetic-Retinopathy-Blindness-Detection
|
24390aeefd197600255a961189872dd4dfc77092
|
[
"MIT"
] | 25
|
2019-09-09T04:42:51.000Z
|
2022-03-28T15:01:30.000Z
|
from pytorch_toolbelt.modules.pooling import GWAP
from torch import nn
class GlobalWeightedAvgPoolHead(nn.Module):
"""
1) Squeeze last feature map in num_classes
2) Compute global average
"""
def __init__(self, feature_maps, num_classes: int, dropout=0.):
super().__init__()
self.features_size = feature_maps[-1]
self.gwap = GWAP(self.features_size)
self.dropout = nn.Dropout(dropout)
self.logits = nn.Linear(self.features_size, num_classes)
# Regression to grade using SSD-like module
self.regression = nn.Sequential(
nn.Linear(self.features_size, 16),
nn.ELU(inplace=True),
nn.Linear(16, 16),
nn.ELU(inplace=True),
nn.Linear(16, 16),
nn.ELU(inplace=True),
nn.Linear(16, 1),
nn.ELU(inplace=True),
)
self.ordinal = nn.Sequential(
nn.Linear(self.features_size, 16),
nn.ELU(inplace=True),
nn.Linear(16, 16),
nn.ELU(inplace=True),
nn.Linear(16, 16),
nn.ELU(inplace=True),
nn.Linear(16, num_classes - 1),
)
def forward(self, feature_maps):
# Take last feature map
features = feature_maps[-1]
features = self.gwap(features)
features = features.view(features.size(0), features.size(1))
features = self.dropout(features)
logits = self.logits(features)
regression = self.regression(features)
if regression.size(1) == 1:
regression = regression.squeeze(1)
ordinal = self.ordinal(features).sigmoid().sum(dim=1)
return {
'features': features,
'logits': logits,
'regression': regression,
'ordinal': ordinal
}
| 29.677419
| 68
| 0.571739
|
092905028cc957eb5b398e024528b38074f0b84c
| 1,088
|
py
|
Python
|
02-files-lab2/read_write_file.py
|
iproduct/course-robotics-npmg
|
0feb2ded46007ba87b8128f1f2e039036ef274bd
|
[
"Apache-2.0"
] | null | null | null |
02-files-lab2/read_write_file.py
|
iproduct/course-robotics-npmg
|
0feb2ded46007ba87b8128f1f2e039036ef274bd
|
[
"Apache-2.0"
] | null | null | null |
02-files-lab2/read_write_file.py
|
iproduct/course-robotics-npmg
|
0feb2ded46007ba87b8128f1f2e039036ef274bd
|
[
"Apache-2.0"
] | 1
|
2021-03-17T09:08:02.000Z
|
2021-03-17T09:08:02.000Z
|
"""File read write demo script"""
def find_count(substring, string):
"""finds the number of occurences of substring in string"""
counter = 0
index = string.find(substring)
while index >= 0:
counter += 1
index = string.find(substring, index + 1)
return counter
if __name__ == "__main__":
"""main script"""
with open("read_write_file.py", "rt") as f: # read file with automatic closing
with open("comments.txt", "wt") as out:
for line in f: # read one line
start_of_comment = line.find("#")
str_start = line[:start_of_comment]
quote_count = find_count("'", str_start)
double_quote_count = find_count('"', str_start)
find_count('"', str_start) % 2 == 0
if start_of_comment > 0 and quote_count % 2 == 0 and double_quote_count % 2 == 0:
comment = line[start_of_comment:] # get comment only string
print(comment, end="") # print comment
out.write(comment) # write to file
| 41.846154
| 97
| 0.571691
|
be61eced7489304f1a225d0dbe7102f6434c5a6b
| 2,975
|
py
|
Python
|
sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/models/azure_vm_workload_sap_hana_system_workload_item.py
|
SanjayHukumRana/azure-sdk-for-python
|
0669a0f07aaead29852f9d59cce8bc2d6085a7a2
|
[
"MIT"
] | null | null | null |
sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/models/azure_vm_workload_sap_hana_system_workload_item.py
|
SanjayHukumRana/azure-sdk-for-python
|
0669a0f07aaead29852f9d59cce8bc2d6085a7a2
|
[
"MIT"
] | null | null | null |
sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/models/azure_vm_workload_sap_hana_system_workload_item.py
|
SanjayHukumRana/azure-sdk-for-python
|
0669a0f07aaead29852f9d59cce8bc2d6085a7a2
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .azure_vm_workload_item import AzureVmWorkloadItem
class AzureVmWorkloadSAPHanaSystemWorkloadItem(AzureVmWorkloadItem):
"""Azure VM workload-specific workload item representing SAP HANA System.
All required parameters must be populated in order to send to Azure.
:param backup_management_type: Type of backup management to backup an
item.
:type backup_management_type: str
:param workload_type: Type of workload for the backup management
:type workload_type: str
:param friendly_name: Friendly name of the backup item.
:type friendly_name: str
:param protection_state: State of the back up item. Possible values
include: 'Invalid', 'NotProtected', 'Protecting', 'Protected',
'ProtectionFailed'
:type protection_state: str or
~azure.mgmt.recoveryservicesbackup.models.ProtectionStatus
:param workload_item_type: Required. Constant filled by server.
:type workload_item_type: str
:param parent_name: Name for instance or AG
:type parent_name: str
:param server_name: Host/Cluster Name for instance or AG
:type server_name: str
:param is_auto_protectable: Indicates if workload item is auto-protectable
:type is_auto_protectable: bool
:param subinquireditemcount: For instance or AG, indicates number of DB's
present
:type subinquireditemcount: int
:param sub_workload_item_count: For instance or AG, indicates number of
DB's to be protected
:type sub_workload_item_count: int
"""
_validation = {
'workload_item_type': {'required': True},
}
_attribute_map = {
'backup_management_type': {'key': 'backupManagementType', 'type': 'str'},
'workload_type': {'key': 'workloadType', 'type': 'str'},
'friendly_name': {'key': 'friendlyName', 'type': 'str'},
'protection_state': {'key': 'protectionState', 'type': 'str'},
'workload_item_type': {'key': 'workloadItemType', 'type': 'str'},
'parent_name': {'key': 'parentName', 'type': 'str'},
'server_name': {'key': 'serverName', 'type': 'str'},
'is_auto_protectable': {'key': 'isAutoProtectable', 'type': 'bool'},
'subinquireditemcount': {'key': 'subinquireditemcount', 'type': 'int'},
'sub_workload_item_count': {'key': 'subWorkloadItemCount', 'type': 'int'},
}
def __init__(self, **kwargs):
super(AzureVmWorkloadSAPHanaSystemWorkloadItem, self).__init__(**kwargs)
self.workload_item_type = 'SAPHanaSystem'
| 43.75
| 82
| 0.665882
|
3affa5d4bec0cc1e46177bddedea7a476ca9552b
| 9,970
|
py
|
Python
|
game/sprites.py
|
coder489/Freestyle
|
d681bc839dd4b085f31e9b471edc5211388ddf83
|
[
"MIT"
] | null | null | null |
game/sprites.py
|
coder489/Freestyle
|
d681bc839dd4b085f31e9b471edc5211388ddf83
|
[
"MIT"
] | null | null | null |
game/sprites.py
|
coder489/Freestyle
|
d681bc839dd4b085f31e9b471edc5211388ddf83
|
[
"MIT"
] | null | null | null |
import pygame as pg
from settings import *
from os import path
vec = pg.math.Vector2
class Player(pg.sprite.Sprite):
"""
Creates the Player class to provide a template for players in the game.
"""
def __init__(self, game, img):
"""
Initializes (sets up) the player class.
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
game: used to reference items in the game class
img (.png file): png file that has an image for the player
Source: YouTube Videos KidsCanCode provided information needed for initial setup of code, though code was majorly altered to tailor to project
Source Link: https://www.youtube.com/watch?v=uWvb3QzA48c
"""
self.game = game
pg.sprite.Sprite.__init__(self)
self.image = pg.Surface((32,32))
self.image = pg.image.load(path.join(img_dir, img)).convert_alpha()
self.rect = self.image.get_rect()
self.rect.center = (WIDTH / 2, HEIGHT / 2)
self.pos = vec(WIDTH/2, HEIGHT/2)
self.vel = vec(0,0)
self.acc = vec(0,0)
self.health = PLAYER_HEALTH
self.radius = 15
def jump(self):
"""
Defines rules for the player action of jumping.
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
Source: YouTube Videos KidsCanCode provided information needed for initial setup of code, though code was majorly altered to tailor to project
Source Link: https://www.youtube.com/watch?v=uWvb3QzA48c
"""
self.rect.y += 1
hits = pg.sprite.spritecollide(self,self.game.platforms, False)
self.rect.y -= 1
if hits:
self.vel.y = -PLAYER_JUMP
def update(self):
"""
Method to control sprite's behavior (player movement).
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
Source: YouTube Videos KidsCanCode provided information needed for initial setup of code, though code was majorly altered to tailor to project
Source Link: https://www.youtube.com/watch?v=uWvb3QzA48c
"""
self.acc = vec(0,PLAYER_GRAV)
keys = pg.key.get_pressed()
if keys[pg.K_LEFT]:
self.acc.x = -PLAYER_ACC
if keys[pg.K_RIGHT]:
self.acc.x = PLAYER_ACC
# apply friction
self.acc.x += self.vel.x * PLAYER_FRICTION
# equations of motion
self.vel += self.acc
self.pos += self.vel + 0.5 * self.acc
# Nothing passed the sides
self.rect.midbottom = self.pos
if self.pos.x > WIDTH:
self.pos.x = WIDTH
if self.pos.x < 0:
self.pos.x = 0
class Platform(pg.sprite.Sprite):
"""
Creates the Platform class to provide a template for platforms in the game.
"""
def __init__(self, x, y, w, h):
"""
Initializes (sets up) the platform class.
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
x (int): x coordinate of the platform on the screen (changing
the coordinate moves the pltform horizontally)
y (int): y coordinate of the platform on the screen (changing
the coordinate moves the pltform vertically)
w (int): length of the platform (changing the coordinate makes
the platform longer)
h (int): height of the platform (changing the coordinate makes
the platform taller)
Source: YouTube Videos KidsCanCode provided information needed for initial setup of code, though code was majorly altered to tailor to project
Source Link: https://www.youtube.com/watch?v=uWvb3QzA48c
"""
pg.sprite.Sprite.__init__(self)
self.image = pg.Surface((w,h))
self.image.fill(BLACK)
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
class Enemy(pg.sprite.Sprite):
"""
Creates the Enemy class to provide a template for enemies in the game.
"""
def __init__(self,x,y, img):
"""
Initializes (sets up) the enemy class.
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
x (int): x coordinate of the platform on the screen (changing
the coordinate moves the platform horizontally)
y (int): y coordinate of the platform on the screen (changing
the coordinate moves the platform vertically)
img (.png file): png file that has an image for the enemy
"""
pg.sprite.Sprite.__init__(self)
self.image = pg.image.load(path.join(img_dir, img)).convert_alpha()
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
self.health = ENEMY_HEALTH
def update(self):
"""
Method to control sprite's behavior (enemy health).
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
"""
if self.health <= 0:
self.kill()
death_sound = pg.mixer.Sound('game\sounds\explode.ogg')
pg.mixer.Sound.play(death_sound)
def draw_health(self):
"""
Used to draw the enemy health bars.
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
"""
if self.health > 60:
col = GREEN
elif self.health > 30:
col = YELLOW
else:
col = RED
width = int(self.rect.width * self.health/ENEMY_HEALTH)
width2 = int(self.rect.width)
self.health_bar = pg.Rect(0, 0, width, 7)
self.total = pg.Rect(0,0, width2, 7)
if self.health < ENEMY_HEALTH:
pg.draw.rect(self.image, BLACK, self.total)
pg.draw.rect(self.image, col, self.health_bar)
class Arrow(pg.sprite.Sprite):
"""
Creates the Arrow class to provide a template for arrows (player weapons) in the game.
"""
def __init__(self, x, y, img):
"""
Initializes (sets up) the arrow class.
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
x (int): x coordinate of the arrow on the screen
y (int): y coordinate of the arrow on the screen
img (.png file): png file that has an image for the enemy
Source: YouTube Videos KidsCanCode provided information needed for initial setup of code, though code was majorly altered to tailor to project
Source Link: https://www.youtube.com/watch?v=uWvb3QzA48c
"""
pg.sprite.Sprite.__init__(self)
self.image = pg.image.load(path.join(img_dir, img)).convert_alpha()
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.centery = y
self.rect.centerx = x
self.pos = vec(x, y)
self.vel = vec(ARROW_SPEED,-3)
self.acc = vec(0,0)
def update(self):
"""
Method to control sprite's behavior (arrow movement and impact).
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
Source: YouTube Videos KidsCanCode provided information needed for initial setup of code, though code was majorly altered to tailor to project
Source Link: https://www.youtube.com/watch?v=uWvb3QzA48c
"""
# equations of motion
self.acc = vec(0, PLAYER_GRAV)
self.acc.x += self.vel.x
self.vel.y += self.acc.y
self.pos += self.vel + 0.5 * self.acc
self.rect.x = self.pos.x
self.rect.y = self.pos.y - 32
if self.rect.x > WIDTH + 100:
self.kill()
if self.rect.y > HEIGHT + 100:
self.kill()
class Fireball(pg.sprite.Sprite):
"""
Creates the Fireball class to provide a template for fireballs (enemy weapons) in the game.
"""
def __init__(self, x, y, img):
"""
Initializes (sets up) the fireball class.
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
x (int): x coordinate of the fireball on the screen
y (int): y coordinate of the fireball on the screen
img (.png file): png file that has an image for the enemy
"""
pg.sprite.Sprite.__init__(self)
self.image = pg.image.load(path.join(img_dir, img)).convert_alpha()
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.centery = y
self.rect.centerx = x
self.pos = vec(x, y)
self.vel = vec(-FIREBALL_SPEED,0)
self.acc = vec(0,0)
def update(self):
"""
Method to control sprite's behavior (fireball movement and impact).
Parameters:
self (self): keyword we can access the attributes and methods
of the class in python
"""
# equations of motion
self.acc = vec(0, 0.006)
self.acc.x += self.vel.x
self.vel.y += self.acc.y
self.pos += self.vel + 0.5 * self.acc
self.rect.x = self.pos.x
self.rect.y = self.pos.y - 64
| 31.550633
| 150
| 0.577332
|
6c2220b72b2e87c7fb5f4421687f68764fe2beaa
| 4,580
|
py
|
Python
|
modules/tests/org/change_user_roles.py
|
nursix/STL
|
682d8455c8e1c761f48542dad96da08767301923
|
[
"MIT"
] | 1
|
2017-11-16T14:50:19.000Z
|
2017-11-16T14:50:19.000Z
|
modules/tests/org/change_user_roles.py
|
vpccalderara/sahana
|
6eb3f9798879dfa51bbe5d2b84829b1402671499
|
[
"MIT"
] | null | null | null |
modules/tests/org/change_user_roles.py
|
vpccalderara/sahana
|
6eb3f9798879dfa51bbe5d2b84829b1402671499
|
[
"MIT"
] | null | null | null |
""" Sahana Eden Automated Test - ORG010 Change User Roles
@copyright: 2011-2017 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from selenium.webdriver.support.ui import Select
from gluon import current
from tests.web2unittest import SeleniumUnitTest
class ChangeUserRole(SeleniumUnitTest):
"""
Desc: Change User Roles
Case: ORG010
TO DO: Check if works and upgrade to new test system framework.
"""
def org010():
"""
1. Log in as admin
2. Give test user org admin rights over Timor-Leste
3. Give user margarida.martins@redcross.tl some access on Timor-Leste
4. Log in as test user
5. Revoke all access for margarida.martins@redcross.tl on Timor-Leste
"""
as_admin()
logout()
as_orgadmin()
logout()
def as_admin():
"""
Run the tests as an administrator
"""
config = current.test_config
browser = config.browser
driver = browser
login(account='admin')
make_user_orgadmin()
open_organisation_roles()
select_user()
# Set some new access levels
driver.find_element_by_id('role_volvol_reader').click()
driver.find_element_by_id('role_projectproject_data_entry').click()
driver.find_element_by_id('role_projectproject_data_entry').submit()
# @todo: check the values of the matrix
def as_orgadmin():
"""
Run the tests as an org admin
"""
config = current.test_config
browser = config.browser
driver = browser
login()
open_organisation_roles(action="Details")
select_user()
# Reset those access levels back to None
driver.find_element_by_id('role_volNone').click()
driver.find_element_by_id('role_projectNone').click()
driver.find_element_by_id('role_projectNone').submit()
# @todo: check the values of the matrix
def make_user_orgadmin():
config = current.test_config
browser = config.browser
driver = browser
browser.get("%s/admin/user" % config.url)
# Open the roles page for text@example.com user account
dt_filter("test@example.com")
dt_action(action="Roles")
# Give org admin rights to Test User on Timor-Leste Red Cross Society
Select(driver.find_element_by_name("group_id")).select_by_visible_text("Organisation Admin")
Select(driver.find_element_by_name("pe_id")).select_by_visible_text("Timor-Leste Red Cross Society (Organization)")
driver.find_element_by_id("submit_add_button").click()
def open_organisation_roles(action="Open"):
config = current.test_config
browser = config.browser
driver = browser
# Go to the organisation list
browser.get("%s/org/organisation" % config.url)
# Open the Timor-Leste organisation
dt_filter("Timor-Leste")
dt_action(action=action)
# Go to the organisations' User Roles tab
driver.find_element_by_link_text("User Roles").click()
def select_user():
config = current.test_config
browser = config.browser
driver = browser
# Select a user from the drop-down list
Select(driver.find_element_by_name("user")).select_by_visible_text("test@example.com")
driver.find_element_by_xpath("//input[@type='submit']").click()
| 33.925926
| 123
| 0.671397
|
bbd5c01695ae5b038f6fa40a26b2f8255461d01d
| 10,370
|
py
|
Python
|
sympy/functions/special/spherical_harmonics.py
|
ovolve/sympy
|
0a15782f20505673466b940454b33b8014a25c13
|
[
"BSD-3-Clause"
] | 4
|
2018-07-04T17:20:12.000Z
|
2019-07-14T18:07:25.000Z
|
sympy/functions/special/spherical_harmonics.py
|
ovolve/sympy
|
0a15782f20505673466b940454b33b8014a25c13
|
[
"BSD-3-Clause"
] | 7
|
2017-05-01T14:15:32.000Z
|
2017-09-06T20:44:24.000Z
|
sympy/functions/special/spherical_harmonics.py
|
ovolve/sympy
|
0a15782f20505673466b940454b33b8014a25c13
|
[
"BSD-3-Clause"
] | 1
|
2020-09-09T15:20:27.000Z
|
2020-09-09T15:20:27.000Z
|
from __future__ import print_function, division
from sympy import pi, I
from sympy.core.singleton import S
from sympy.core import Dummy, sympify
from sympy.core.function import Function, ArgumentIndexError
from sympy.functions import assoc_legendre
from sympy.functions.elementary.trigonometric import sin, cos, cot
from sympy.functions.combinatorial.factorials import factorial
from sympy.functions.elementary.complexes import Abs
from sympy.functions.elementary.exponential import exp
from sympy.functions.elementary.miscellaneous import sqrt
_x = Dummy("x")
class Ynm(Function):
r"""
Spherical harmonics defined as
.. math::
Y_n^m(\theta, \varphi) := \sqrt{\frac{(2n+1)(n-m)!}{4\pi(n+m)!}}
\exp(i m \varphi)
\mathrm{P}_n^m\left(\cos(\theta)\right)
Ynm() gives the spherical harmonic function of order `n` and `m`
in `\theta` and `\varphi`, `Y_n^m(\theta, \varphi)`. The four
parameters are as follows: `n \geq 0` an integer and `m` an integer
such that `-n \leq m \leq n` holds. The two angles are real-valued
with `\theta \in [0, \pi]` and `\varphi \in [0, 2\pi]`.
Examples
========
>>> from sympy import Ynm, Symbol
>>> from sympy.abc import n,m
>>> theta = Symbol("theta")
>>> phi = Symbol("phi")
>>> Ynm(n, m, theta, phi)
Ynm(n, m, theta, phi)
Several symmetries are known, for the order
>>> from sympy import Ynm, Symbol
>>> from sympy.abc import n,m
>>> theta = Symbol("theta")
>>> phi = Symbol("phi")
>>> Ynm(n, -m, theta, phi)
(-1)**m*exp(-2*I*m*phi)*Ynm(n, m, theta, phi)
as well as for the angles
>>> from sympy import Ynm, Symbol, simplify
>>> from sympy.abc import n,m
>>> theta = Symbol("theta")
>>> phi = Symbol("phi")
>>> Ynm(n, m, -theta, phi)
Ynm(n, m, theta, phi)
>>> Ynm(n, m, theta, -phi)
exp(-2*I*m*phi)*Ynm(n, m, theta, phi)
For specific integers n and m we can evalute the harmonics
to more useful expressions
>>> simplify(Ynm(0, 0, theta, phi).expand(func=True))
1/(2*sqrt(pi))
>>> simplify(Ynm(1, -1, theta, phi).expand(func=True))
sqrt(6)*exp(-I*phi)*sin(theta)/(4*sqrt(pi))
>>> simplify(Ynm(1, 0, theta, phi).expand(func=True))
sqrt(3)*cos(theta)/(2*sqrt(pi))
>>> simplify(Ynm(1, 1, theta, phi).expand(func=True))
-sqrt(6)*exp(I*phi)*sin(theta)/(4*sqrt(pi))
>>> simplify(Ynm(2, -2, theta, phi).expand(func=True))
sqrt(30)*exp(-2*I*phi)*sin(theta)**2/(8*sqrt(pi))
>>> simplify(Ynm(2, -1, theta, phi).expand(func=True))
sqrt(30)*exp(-I*phi)*sin(2*theta)/(8*sqrt(pi))
>>> simplify(Ynm(2, 0, theta, phi).expand(func=True))
sqrt(5)*(3*cos(theta)**2 - 1)/(4*sqrt(pi))
>>> simplify(Ynm(2, 1, theta, phi).expand(func=True))
-sqrt(30)*exp(I*phi)*sin(2*theta)/(8*sqrt(pi))
>>> simplify(Ynm(2, 2, theta, phi).expand(func=True))
sqrt(30)*exp(2*I*phi)*sin(theta)**2/(8*sqrt(pi))
We can differentiate the functions with respect
to both angles
>>> from sympy import Ynm, Symbol, diff
>>> from sympy.abc import n,m
>>> theta = Symbol("theta")
>>> phi = Symbol("phi")
>>> diff(Ynm(n, m, theta, phi), theta)
m*cot(theta)*Ynm(n, m, theta, phi) + sqrt((-m + n)*(m + n + 1))*exp(-I*phi)*Ynm(n, m + 1, theta, phi)
>>> diff(Ynm(n, m, theta, phi), phi)
I*m*Ynm(n, m, theta, phi)
Further we can compute the complex conjugation
>>> from sympy import Ynm, Symbol, conjugate
>>> from sympy.abc import n,m
>>> theta = Symbol("theta")
>>> phi = Symbol("phi")
>>> conjugate(Ynm(n, m, theta, phi))
(-1)**(2*m)*exp(-2*I*m*phi)*Ynm(n, m, theta, phi)
To get back the well known expressions in spherical
coordinates we use full expansion
>>> from sympy import Ynm, Symbol, expand_func
>>> from sympy.abc import n,m
>>> theta = Symbol("theta")
>>> phi = Symbol("phi")
>>> expand_func(Ynm(n, m, theta, phi))
sqrt((2*n + 1)*factorial(-m + n)/factorial(m + n))*exp(I*m*phi)*assoc_legendre(n, m, cos(theta))/(2*sqrt(pi))
See Also
========
Ynm_c, Znm
References
==========
.. [1] http://en.wikipedia.org/wiki/Spherical_harmonics
.. [2] http://mathworld.wolfram.com/SphericalHarmonic.html
.. [3] http://functions.wolfram.com/Polynomials/SphericalHarmonicY/
.. [4] http://dlmf.nist.gov/14.30
"""
@classmethod
def eval(cls, n, m, theta, phi):
n, m, theta, phi = [sympify(x) for x in (n, m, theta, phi)]
# Handle negative index m and arguments theta, phi
if m.could_extract_minus_sign():
m = -m
return S.NegativeOne**m * exp(-2*I*m*phi) * Ynm(n, m, theta, phi)
if theta.could_extract_minus_sign():
theta = -theta
return Ynm(n, m, theta, phi)
if phi.could_extract_minus_sign():
phi = -phi
return exp(-2*I*m*phi) * Ynm(n, m, theta, phi)
# TODO Add more simplififcation here
def _eval_expand_func(self, **hints):
n, m, theta, phi = self.args
rv = (sqrt((2*n + 1)/(4*pi) * factorial(n - m)/factorial(n + m)) *
exp(I*m*phi) * assoc_legendre(n, m, cos(theta)))
# We can do this because of the range of theta
return rv.subs(sqrt(-cos(theta)**2 + 1), sin(theta))
def fdiff(self, argindex=4):
if argindex == 1:
# Diff wrt n
raise ArgumentIndexError(self, argindex)
elif argindex == 2:
# Diff wrt m
raise ArgumentIndexError(self, argindex)
elif argindex == 3:
# Diff wrt theta
n, m, theta, phi = self.args
return (m * cot(theta) * Ynm(n, m, theta, phi) +
sqrt((n - m)*(n + m + 1)) * exp(-I*phi) * Ynm(n, m + 1, theta, phi))
elif argindex == 4:
# Diff wrt phi
n, m, theta, phi = self.args
return I * m * Ynm(n, m, theta, phi)
else:
raise ArgumentIndexError(self, argindex)
def _eval_rewrite_as_polynomial(self, n, m, theta, phi):
# TODO: Make sure n \in N
# TODO: Assert |m| <= n ortherwise we should return 0
return self.expand(func=True)
def _eval_rewrite_as_sin(self, n, m, theta, phi):
return self.rewrite(cos)
def _eval_rewrite_as_cos(self, n, m, theta, phi):
# This method can be expensive due to extensive use of simplification!
from sympy.simplify import simplify, trigsimp
# TODO: Make sure n \in N
# TODO: Assert |m| <= n ortherwise we should return 0
term = simplify(self.expand(func=True))
# We can do this because of the range of theta
term = term.xreplace({Abs(sin(theta)):sin(theta)})
return simplify(trigsimp(term))
def _eval_conjugate(self):
# TODO: Make sure theta \in R and phi \in R
n, m, theta, phi = self.args
return S.NegativeOne**m * self.func(n, -m, theta, phi)
def as_real_imag(self, deep=True, **hints):
# TODO: Handle deep and hints
n, m, theta, phi = self.args
re = (sqrt((2*n + 1)/(4*pi) * factorial(n - m)/factorial(n + m)) *
cos(m*phi) * assoc_legendre(n, m, cos(theta)))
im = (sqrt((2*n + 1)/(4*pi) * factorial(n - m)/factorial(n + m)) *
sin(m*phi) * assoc_legendre(n, m, cos(theta)))
return (re, im)
def _eval_evalf(self, prec):
# Note: works without this function by just calling
# mpmath for Legendre polynomials. But using
# the dedicated function directly is cleaner.
from mpmath import mp, workprec
from sympy import Expr
n = self.args[0]._to_mpmath(prec)
m = self.args[1]._to_mpmath(prec)
theta = self.args[2]._to_mpmath(prec)
phi = self.args[3]._to_mpmath(prec)
with workprec(prec):
res = mp.spherharm(n, m, theta, phi)
return Expr._from_mpmath(res, prec)
def _sage_(self):
import sage.all as sage
return sage.spherical_harmonic(self.args[0]._sage_(),
self.args[1]._sage_(),
self.args[2]._sage_(),
self.args[3]._sage_())
def Ynm_c(n, m, theta, phi):
r"""Conjugate spherical harmonics defined as
.. math::
\overline{Y_n^m(\theta, \varphi)} := (-1)^m Y_n^{-m}(\theta, \varphi)
See Also
========
Ynm, Znm
References
==========
.. [1] http://en.wikipedia.org/wiki/Spherical_harmonics
.. [2] http://mathworld.wolfram.com/SphericalHarmonic.html
.. [3] http://functions.wolfram.com/Polynomials/SphericalHarmonicY/
"""
from sympy import conjugate
return conjugate(Ynm(n, m, theta, phi))
class Znm(Function):
r"""
Real spherical harmonics defined as
.. math::
Z_n^m(\theta, \varphi) :=
\begin{cases}
\frac{Y_n^m(\theta, \varphi) + \overline{Y_n^m(\theta, \varphi)}}{\sqrt{2}} &\quad m > 0 \\
Y_n^m(\theta, \varphi) &\quad m = 0 \\
\frac{Y_n^m(\theta, \varphi) - \overline{Y_n^m(\theta, \varphi)}}{i \sqrt{2}} &\quad m < 0 \\
\end{cases}
which gives in simplified form
.. math::
Z_n^m(\theta, \varphi) =
\begin{cases}
\frac{Y_n^m(\theta, \varphi) + (-1)^m Y_n^{-m}(\theta, \varphi)}{\sqrt{2}} &\quad m > 0 \\
Y_n^m(\theta, \varphi) &\quad m = 0 \\
\frac{Y_n^m(\theta, \varphi) - (-1)^m Y_n^{-m}(\theta, \varphi)}{i \sqrt{2}} &\quad m < 0 \\
\end{cases}
See Also
========
Ynm, Ynm_c
References
==========
.. [1] http://en.wikipedia.org/wiki/Spherical_harmonics
.. [2] http://mathworld.wolfram.com/SphericalHarmonic.html
.. [3] http://functions.wolfram.com/Polynomials/SphericalHarmonicY/
"""
@classmethod
def eval(cls, n, m, theta, phi):
n, m, th, ph = [sympify(x) for x in (n, m, theta, phi)]
if m.is_positive:
zz = (Ynm(n, m, th, ph) + Ynm_c(n, m, th, ph)) / sqrt(2)
return zz
elif m.is_zero:
return Ynm(n, m, th, ph)
elif m.is_negative:
zz = (Ynm(n, m, th, ph) - Ynm_c(n, m, th, ph)) / (sqrt(2)*I)
return zz
| 33.237179
| 113
| 0.563934
|
5679f6a65c616ae6169ecfe361080267cb71eb97
| 1,544
|
gyp
|
Python
|
ui/events/platform/x11/x11_events_platform.gyp
|
Wzzzx/chromium-crosswalk
|
768dde8efa71169f1c1113ca6ef322f1e8c9e7de
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2019-01-28T08:09:58.000Z
|
2021-11-15T15:32:10.000Z
|
ui/events/platform/x11/x11_events_platform.gyp
|
maidiHaitai/haitaibrowser
|
a232a56bcfb177913a14210e7733e0ea83a6b18d
|
[
"BSD-3-Clause"
] | null | null | null |
ui/events/platform/x11/x11_events_platform.gyp
|
maidiHaitai/haitaibrowser
|
a232a56bcfb177913a14210e7733e0ea83a6b18d
|
[
"BSD-3-Clause"
] | 6
|
2020-09-23T08:56:12.000Z
|
2021-11-18T03:40:49.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [{
# GN version: //ui/events/platform/x11
'target_name': 'x11_events_platform',
'type': '<(component)',
'defines': [
'EVENTS_IMPLEMENTATION',
],
'dependencies': [
'../../../../base/base.gyp:base',
'../../../../build/linux/system.gyp:x11',
'../../../gfx/gfx.gyp:gfx',
'../../../gfx/x/gfx_x11.gyp:gfx_x11',
'../../devices/events_devices.gyp:events_devices',
'../../devices/x11/events_devices_x11.gyp:events_devices_x11',
'../../events.gyp:events',
'../../events.gyp:events_base',
'../../x/events_x.gyp:events_x',
'../events_platform.gyp:events_platform',
],
'sources': [
'x11_event_source.cc',
'x11_event_source.h',
'x11_hotplug_event_handler.cc',
'x11_hotplug_event_handler.h',
],
'conditions': [
['use_glib==1', {
'dependencies': [
'../../../../build/linux/system.gyp:glib',
],
'sources': [
'x11_event_source_glib.cc',
'x11_event_source_glib.h',
],
}, {
# use_glib == 0
'sources': [
'x11_event_source_libevent.cc',
'x11_event_source_libevent.h',
],
'dependencies': [
'../../keycodes/events_keycodes.gyp:keycodes_x11',
],
}],
],
}],
}
| 27.571429
| 72
| 0.538212
|
03487e44a28d3a0c86bea22fff1834f5072df4e0
| 1,820
|
py
|
Python
|
internal/notes/builtin-SAVE/packages/xinit/package.py
|
HPCToolkit/hpctest
|
5ff4455582bf39e75530a31badcf6142081b386b
|
[
"BSD-3-Clause"
] | 1
|
2019-01-17T20:07:19.000Z
|
2019-01-17T20:07:19.000Z
|
internal/notes/builtin-SAVE/packages/xinit/package.py
|
HPCToolkit/hpctest
|
5ff4455582bf39e75530a31badcf6142081b386b
|
[
"BSD-3-Clause"
] | null | null | null |
internal/notes/builtin-SAVE/packages/xinit/package.py
|
HPCToolkit/hpctest
|
5ff4455582bf39e75530a31badcf6142081b386b
|
[
"BSD-3-Clause"
] | 2
|
2019-08-06T18:13:57.000Z
|
2021-11-05T18:19:49.000Z
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Xinit(AutotoolsPackage):
"""The xinit program is used to start the X Window System server and a
first client program on systems that are not using a display manager
such as xdm."""
homepage = "http://cgit.freedesktop.org/xorg/app/xinit"
url = "https://www.x.org/archive/individual/app/xinit-1.3.4.tar.gz"
version('1.3.4', '91c5697345016ec7841f5e5fccbe7a4c')
depends_on('libx11')
depends_on('xproto@7.0.17:', type='build')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('util-macros', type='build')
| 42.325581
| 78
| 0.674725
|
1d5702806c84daf7efc2d8c470d6a56aa7c91b9e
| 439
|
py
|
Python
|
terrathings/connection/deployment_status.py
|
elangenhan/terrathings
|
d54c9dce28762602475f5a77a6b07165efd2d5b0
|
[
"MIT"
] | null | null | null |
terrathings/connection/deployment_status.py
|
elangenhan/terrathings
|
d54c9dce28762602475f5a77a6b07165efd2d5b0
|
[
"MIT"
] | null | null | null |
terrathings/connection/deployment_status.py
|
elangenhan/terrathings
|
d54c9dce28762602475f5a77a6b07165efd2d5b0
|
[
"MIT"
] | null | null | null |
class Deployment:
def __init__(self, id: str, sha256: str) -> None:
self.id = id
self.sha256 = sha256
class Runtime:
def __init__(self, id: str, sha256: str) -> None:
self.id = id
self.sha256 = sha256
class Status:
def __init__(
self,
runtime: Runtime,
deployment: Deployment | None,
) -> None:
self.runtime = runtime
self.deployment = deployment
| 20.904762
| 53
| 0.571754
|
51c7fcc1545daf287ecd45a8baeb94709cb59837
| 485
|
py
|
Python
|
src/unicon/plugins/asa/ASAv/service_implementation.py
|
tahigash/unicon.plugins
|
1b43a5a61244ea9312387fd855442ace37c65db9
|
[
"Apache-2.0"
] | null | null | null |
src/unicon/plugins/asa/ASAv/service_implementation.py
|
tahigash/unicon.plugins
|
1b43a5a61244ea9312387fd855442ace37c65db9
|
[
"Apache-2.0"
] | null | null | null |
src/unicon/plugins/asa/ASAv/service_implementation.py
|
tahigash/unicon.plugins
|
1b43a5a61244ea9312387fd855442ace37c65db9
|
[
"Apache-2.0"
] | null | null | null |
from unicon.plugins.generic.service_implementation import Reload
from unicon.eal.dialogs import Dialog, Statement
from unicon.plugins.asa.ASAv.service_statements import asa_reload_stmt_list
class ASAReload(Reload):
def __init__(self, connection, context, **kwargs):
super().__init__(connection, context, **kwargs)
self.start_state = 'enable'
self.end_state = 'enable'
self.service_name = 'reload'
self.dialog = Dialog(asa_reload_stmt_list)
| 40.416667
| 75
| 0.740206
|
d25afc1a1b0b1bbb5107951ac4f4021c58f8de6c
| 420
|
py
|
Python
|
packages/python/plotly/plotly/validators/table/cells/fill/_colorsrc.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/table/cells/fill/_colorsrc.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/table/cells/fill/_colorsrc.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="table.cells.fill", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
)
| 30
| 78
| 0.652381
|
b2c33a10ede2d4331c9365e19ff4fc966dcb5722
| 632
|
py
|
Python
|
setup.py
|
Chitanda-Satou/avocadopy
|
b6287416b5bfd6c283e848c9acdece1a8eade126
|
[
"MIT"
] | null | null | null |
setup.py
|
Chitanda-Satou/avocadopy
|
b6287416b5bfd6c283e848c9acdece1a8eade126
|
[
"MIT"
] | null | null | null |
setup.py
|
Chitanda-Satou/avocadopy
|
b6287416b5bfd6c283e848c9acdece1a8eade126
|
[
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="avocadopy",
version="0.0.2",
author='B Niu',
author_email='shinji006@126.com',
description='Tools for medical statistics.',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/b-niu/avocadopy',
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
| 27.478261
| 50
| 0.655063
|
ba7ffa54998c0e9d9575d3ee2f7b6876bad29405
| 1,731
|
py
|
Python
|
sequniq/records.py
|
standage/sequniq
|
f77430402e52efeed62a6b41f9529d1a2ac4245b
|
[
"0BSD"
] | 2
|
2017-03-24T09:46:50.000Z
|
2017-03-29T16:52:33.000Z
|
sequniq/records.py
|
standage/sequniq
|
f77430402e52efeed62a6b41f9529d1a2ac4245b
|
[
"0BSD"
] | 3
|
2015-06-30T18:01:50.000Z
|
2015-06-30T18:02:22.000Z
|
sequniq/records.py
|
standage/sequniq
|
f77430402e52efeed62a6b41f9529d1a2ac4245b
|
[
"0BSD"
] | null | null | null |
# -----------------------------------------------------------------------------
# Copyright (C) Daniel Standage, 2015. It is licensed under the ISC license,
# see LICENSE.txt. Contact: daniel.standage@gmail.com
# -----------------------------------------------------------------------------
"""
Utilities for working with complete sequence records from a database of
Fast[aq] sequences. SHA1 hash of each sequence is stored in memory, instead of
the sequence itself. Therefore, these utilities are very space efficient when
working with thousands of long(ish) sequences such as scaffolds and contigs.
The memory savings will not be quite as drastic when working with millions of
short sequences, such as from an RNA-seq experiment.
"""
import sequniq
import hashlib
def uniqseqs(seqdata, trimdefline=False, checkrevcom=False, fastq=True,
paired=True):
"""
Given a file of Fast[aq] sequences `seqdata`, retrieve unique sequences.
Generator function yields complete Fast[aq] records.
"""
seqs = {}
parsefunc = sequniq.parse.get_parser(fastq=fastq, paired=paired)
for record in parsefunc(seqdata):
sequniq.parse.check_record(record, fastq=fastq, paired=paired)
seq = record[1]
if paired:
if fastq:
seq += record[4]
else:
seq += record[3]
seqsha = hashlib.sha1(seq).hexdigest()
if seqsha not in seqs:
if checkrevcom:
rseqsha = hashlib.sha1(sequniq.revcomp(seq)).hexdigest()
if rseqsha not in seqs:
seqs[seqsha] = 1
yield record
else:
seqs[seqsha] = 1
yield record
| 36.829787
| 79
| 0.581745
|
262074695f7a0f3b74b5c985eaab8eee3ad89550
| 2,748
|
py
|
Python
|
vp_suite/model_blocks/conv_lstm_hzzone.py
|
angelvillar96/vp-suite
|
3e7c7d852862bad09a771d754fc56a71abf0a25f
|
[
"MIT"
] | null | null | null |
vp_suite/model_blocks/conv_lstm_hzzone.py
|
angelvillar96/vp-suite
|
3e7c7d852862bad09a771d754fc56a71abf0a25f
|
[
"MIT"
] | null | null | null |
vp_suite/model_blocks/conv_lstm_hzzone.py
|
angelvillar96/vp-suite
|
3e7c7d852862bad09a771d754fc56a71abf0a25f
|
[
"MIT"
] | null | null | null |
from torch import nn
import torch
from vp_suite.base.base_model_block import ModelBlock
class ConvLSTM(ModelBlock):
NAME = "ConvLSTM (Shi et al.)"
PAPER_REFERENCE = "https://arxiv.org/abs/1506.04214"
CODE_REFERENCE = "https://github.com/Hzzone/Precipitation-Nowcasting"
MATCHES_REFERENCE = "Yes"
def __init__(self, device, in_c, enc_c, state_h, state_w, kernel_size, stride=1, padding=1):
super().__init__()
self.device = device
self._conv = nn.Conv2d(in_channels=in_c + enc_c,
out_channels=enc_c * 4,
kernel_size=kernel_size,
stride=stride,
padding=padding)
self.state_h = state_h
self.state_w = state_w
# if using requires_grad flag, torch.save will not save parameters in deed although it may be updated every epoch.
# However, if you use declare an optimizer like Adam(model.parameters()),
# parameters will not be updated forever.
self.Wci = nn.Parameter(torch.zeros(1, enc_c, self.state_h, self.state_w)).to(self.device)
self.Wcf = nn.Parameter(torch.zeros(1, enc_c, self.state_h, self.state_w)).to(self.device)
self.Wco = nn.Parameter(torch.zeros(1, enc_c, self.state_h, self.state_w)).to(self.device)
self.in_c = in_c
self.enc_c = enc_c
# inputs and states should not be all none
# inputs: [b, t, c, h, w]
def forward(self, inputs, states, seq_len):
if states is None:
b = inputs.shape[0]
c = torch.zeros((b, self.enc_c, self.state_h, self.state_w),
dtype=torch.float, device=self.device)
h = torch.zeros((b, self.enc_c, self.state_h, self.state_w),
dtype=torch.float, device=self.device)
else:
h, c = states
b = h.shape[0]
T = seq_len
outputs = []
for t in range(T):
# initial inputs
if inputs is None:
x = torch.zeros((b, self.in_c, self.state_h,
self.state_w), dtype=torch.float, device=self.device)
else:
x = inputs[:, t] # mustn't be None. Should be zero on first decoder step
cat_x = torch.cat([x, h], dim=1)
conv_x = self._conv(cat_x)
i, f, tmp_c, o = torch.chunk(conv_x, 4, dim=1)
i = torch.sigmoid(i+self.Wci*c)
f = torch.sigmoid(f+self.Wcf*c)
c = f*c + i*torch.tanh(tmp_c)
o = torch.sigmoid(o+self.Wco*c)
h = o*torch.tanh(c)
outputs.append(h)
return torch.stack(outputs, dim=1), (h, c)
| 40.411765
| 122
| 0.561863
|
ec4a45263b13b7d7ddc5e6f6a6c9d7aa3ffb460f
| 78,553
|
py
|
Python
|
constants/commands.py
|
Pure-Peace/gulag
|
d46c3c54ae8d224b62141c5e237b8ea988033864
|
[
"MIT"
] | 1
|
2021-09-22T03:15:01.000Z
|
2021-09-22T03:15:01.000Z
|
constants/commands.py
|
Pure-Peace/gulag
|
d46c3c54ae8d224b62141c5e237b8ea988033864
|
[
"MIT"
] | null | null | null |
constants/commands.py
|
Pure-Peace/gulag
|
d46c3c54ae8d224b62141c5e237b8ea988033864
|
[
"MIT"
] | 1
|
2022-02-07T02:32:00.000Z
|
2022-02-07T02:32:00.000Z
|
# -*- coding: utf-8 -*-
import asyncio
import copy
import importlib
import os
import pprint
import random
import secrets
import signal
import struct
import time
import uuid
from collections import Counter
from dataclasses import dataclass
from datetime import datetime
from importlib.metadata import version as pkg_version
from time import perf_counter_ns as clock_ns
from typing import Callable
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import TYPE_CHECKING
from typing import Union
from pathlib import Path
import aiomysql
import cmyui.utils
import psutil
from peace_performance_python import Beatmap as PeaceBeatmap, Calculator
import packets
import utils.misc
from constants import regexes, mixed_calculator
from constants.gamemodes import GameMode
from constants.mods import Mods
from constants.mods import SPEED_CHANGING_MODS
from constants.privileges import Privileges
from objects import glob
from objects.beatmap import Beatmap
from objects.beatmap import ensure_local_osu_file
from objects.beatmap import RankedStatus
from objects.clan import Clan
from objects.clan import ClanPrivileges
from objects.match import Match
from objects.match import MapPool
from objects.match import MatchTeams
from objects.match import MatchTeamTypes
from objects.match import MatchWinConditions
from objects.match import SlotStatus
from objects.player import Player
from objects.score import SubmissionStatus
from utils.misc import seconds_readable
if TYPE_CHECKING:
from objects.channel import Channel
BEATMAPS_PATH = Path.cwd() / '.data/osu'
Messageable = Union['Channel', Player]
CommandResponse = dict[str, str]
@dataclass
class Context:
player: Player
trigger: str
args: Sequence[str]
recipient: Optional[Messageable] = None
match: Optional[Match] = None
class Command(NamedTuple):
triggers: list[str]
callback: Callable[[Context], str]
priv: Privileges
hidden: bool
doc: str
class CommandSet:
__slots__ = ('trigger', 'doc', 'commands')
def __init__(self, trigger: str, doc: str) -> None:
self.trigger = trigger
self.doc = doc
self.commands: list[Command] = []
def add(self, priv: Privileges, aliases: list[str] = [],
hidden: bool = False) -> Callable:
def wrapper(f: Callable):
self.commands.append(Command(
# NOTE: this method assumes that functions without any
# triggers will be named like '{self.trigger}_{trigger}'.
triggers = (
[f.__name__.removeprefix(f'{self.trigger}_').strip()] +
aliases
),
callback = f, priv = priv,
hidden = hidden, doc = f.__doc__
))
return f
return wrapper
# TODO: refactor help commands into some base ver
# since they're all the same anyways lol.
# not sure if this should be in glob or not,
# trying to think of some use cases lol..
regular_commands = []
command_sets = [
mp_commands := CommandSet('mp', 'Multiplayer commands.'),
pool_commands := CommandSet('pool', 'Mappool commands.'),
clan_commands := CommandSet('clan', 'Clan commands.')
]
glob.commands = {
'regular': regular_commands,
'sets': command_sets
}
def command(priv: Privileges, aliases: list[str] = [],
hidden: bool = False) -> Callable:
def wrapper(f: Callable):
regular_commands.append(Command(
callback = f,
priv = priv,
hidden = hidden,
triggers = [f.__name__.strip('_')] + aliases,
doc = f.__doc__
))
return f
return wrapper
""" User commands
# The commands below are not considered dangerous,
# and are granted to any unbanned players.
"""
@command(Privileges.Normal, aliases=['', 'h'], hidden=True)
async def _help(ctx: Context) -> str:
"""Show all documented commands the player can access."""
prefix = glob.config.command_prefix
l = ['Individual commands',
'-----------']
for cmd in regular_commands:
if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv:
# no doc, or insufficient permissions.
continue
l.append(f'{prefix}{cmd.triggers[0]}: {cmd.doc}')
l.append('') # newline
l.extend(['Command sets',
'-----------'])
for cmd_set in command_sets:
l.append(f'{prefix}{cmd_set.trigger}: {cmd_set.doc}')
return '\n'.join(l)
@command(Privileges.Normal)
async def roll(ctx: Context) -> str:
"""Roll an n-sided die where n is the number you write (100 default)."""
if ctx.args and ctx.args[0].isdecimal():
max_roll = min(int(ctx.args[0]), 0x7fff)
else:
max_roll = 100
if max_roll == 0:
return "Roll what?"
points = random.randrange(0, max_roll)
return f'{ctx.player.name} rolls {points} points!'
@command(Privileges.Normal, hidden=True)
async def block(ctx: Context) -> str:
"""Block another user from communicating with you."""
target = await glob.players.get_ensure(name=' '.join(ctx.args))
if not target:
return 'User not found.'
if (
target is glob.bot or
target is ctx.player
):
return 'What?'
if target.id in ctx.player.blocks:
return f'{target.name} already blocked!'
if target.id in ctx.player.friends:
ctx.player.friends.remove(target.id)
await ctx.player.add_block(target)
return f'Added {target.name} to blocked users.'
@command(Privileges.Normal, hidden=True)
async def unblock(ctx: Context) -> str:
"""Unblock another user from communicating with you."""
target = await glob.players.get_ensure(name=' '.join(ctx.args))
if not target:
return 'User not found.'
if (
target is glob.bot or
target is ctx.player
):
return 'What?'
if target.id not in ctx.player.blocks:
return f'{target.name} not blocked!'
await ctx.player.remove_block(target)
return f'Removed {target.name} from blocked users.'
@command(Privileges.Normal)
async def reconnect(ctx: Context) -> str:
"""Disconnect and reconnect to the server."""
ctx.player.logout()
@command(Privileges.Normal)
async def changename(ctx: Context) -> str:
"""Change your username."""
name = ' '.join(ctx.args).strip()
if not regexes.username.match(name):
return 'Must be 2-15 characters in length.'
if '_' in name and ' ' in name:
return 'May contain "_" and " ", but not both.'
if name in glob.config.disallowed_names:
return 'Disallowed username; pick another.'
if await glob.db.fetch('SELECT 1 FROM users WHERE name = %s', [name]):
return 'Username already taken by another player.'
# all checks passed, update their name
safe_name = name.lower().replace(' ', '_')
await glob.db.execute(
'UPDATE users '
'SET name = %s, safe_name = %s '
'WHERE id = %s',
[name, safe_name, ctx.player.id]
)
ctx.player.enqueue(
packets.notification(f'Your username has been changed to {name}!')
)
ctx.player.logout()
@command(Privileges.Normal, aliases=['bloodcat', 'beatconnect', 'chimu', 'q'])
async def maplink(ctx: Context) -> str:
"""Return a download link to the user's current map (situation dependant)."""
bmap = None
# priority: multiplayer -> spectator -> last np
match = ctx.player.match
spectating = ctx.player.spectating
if match and match.map_id:
bmap = await Beatmap.from_md5(match.map_md5)
elif spectating and spectating.status.map_id:
bmap = await Beatmap.from_md5(spectating.status.map_md5)
elif time.time() < ctx.player.last_np['timeout']:
bmap = ctx.player.last_np['bmap']
else:
return 'No map found!'
# gatari.pw & nerina.pw are pretty much the only
# reliable mirrors i know of? perhaps beatconnect
return f'[https://osu.gatari.pw/d/{bmap.set_id} {bmap.full}]'
@command(Privileges.Normal, aliases=['last', 'r'])
async def recent(ctx: Context) -> str:
"""Show information about your most recent score."""
if ctx.args:
if not (target := glob.players.get(name=' '.join(ctx.args))):
return 'Player not found.'
else:
target = ctx.player
if not (s := target.recent_score):
return 'No scores found :o (only saves per play session)'
l = [f'[{s.mode!r}] {s.bmap.embed}', f'{s.acc:.2f}%']
if s.mods:
l.insert(1, f'+{s.mods!r}')
l = [' '.join(l)]
if s.passed:
rank = s.rank if s.status == SubmissionStatus.BEST else 'NA'
l.append(f'PASS {{{s.pp:.2f}pp #{rank}}}')
else:
# XXX: prior to v3.2.0, gulag didn't parse total_length from
# the osu!api, and thus this can do some zerodivision moments.
# this can probably be removed in the future, or better yet
# replaced with a better system to fix the maps.
if s.bmap.total_length != 0:
completion = s.time_elapsed / (s.bmap.total_length * 1000)
l.append(f'FAIL {{{completion * 100:.2f}% complete}})')
else:
l.append('FAIL')
return ' | '.join(l)
# TODO: !top (get top #1 score)
# TODO: !compare (compare to previous !last/!top post's map)
@command(Privileges.Normal, aliases=['w'], hidden=True)
async def _with(ctx: Context) -> str:
"""Specify custom accuracy & mod combinations with `/np`."""
if ctx.recipient is not glob.bot:
return 'This command can only be used in DM with bot.'
if time.time() >= ctx.player.last_np['timeout']:
return 'Please /np a map first!'
bmap: Beatmap = ctx.player.last_np['bmap']
osu_file_path = BEATMAPS_PATH / f'{bmap.id}.osu'
if not await ensure_local_osu_file(osu_file_path, bmap.id, bmap.md5):
return ('Mapfile could not be found; '
'this incident has been reported.')
mode_vn = ctx.player.last_np['mode_vn']
if mode_vn in (0, 1): # osu, taiko
if not ctx.args or len(ctx.args) > 4:
return 'Invalid syntax: !with <acc/nmiss/combo/mods ...>'
# !with 95% 1m 429x hddt
acc = mods = combo = nmiss = None
# parse acc, misses, combo and mods from arguments.
# tried to balance complexity vs correctness here
for arg in map(str.lower, ctx.args):
# mandatory suffix, combo & nmiss
if (
combo is None and
arg.endswith('x') and
arg[:-1].isdecimal()
):
combo = int(arg[:-1])
if combo > bmap.max_combo:
return 'Invalid combo.'
elif (
nmiss is None and
arg.endswith('m') and
arg[:-1].isdecimal()
):
nmiss = int(arg[:-1])
# TODO: store nobjects?
if nmiss > bmap.max_combo:
return 'Invalid misscount.'
else:
# optional prefix/suffix, mods & accuracy
arg_stripped = arg.removeprefix('+').removesuffix('%')
if (
mods is None and
arg_stripped.isalpha() and
len(arg_stripped) % 2 == 0
):
mods = Mods.from_modstr(arg_stripped)
mods = mods.filter_invalid_combos(mode_vn)
elif (
acc is None and
arg_stripped.replace('.', '', 1).isdecimal()
):
acc = float(arg_stripped)
if not 0 <= acc <= 100:
return 'Invalid accuracy.'
else:
return f'Unknown argument: {arg}'
msg = []
combo = bmap.max_combo
c = Calculator()
if mods is not None:
c.set_mods(int(mods))
msg.append(f'{mods!r}')
if nmiss is not None:
c.set_miss(nmiss)
msg.append(f'{nmiss}m')
if combo is not None:
c.set_combo(combo)
msg.append(f'{combo}x')
if acc is not None:
c.set_acc(acc)
msg.append(f'{acc:.2f}%')
result = mixed_calculator.simple_calculate(mode_vn, nmiss, combo, acc, mods, osu_file_path, 0)
pp, sr = result[0], result[1]
return f"{' '.join(msg)}: {pp:.2f}pp ({sr:.2f}*)"
else: # mania
if not ctx.args or len(ctx.args) > 2:
return 'Invalid syntax: !with <score/mods ...>'
score = 1000
mods = Mods.NOMOD
for param in (p.strip('+k') for p in ctx.args):
if param.isdecimal(): # acc
if not 0 <= (score := int(param)) <= 1000:
return 'Invalid score.'
if score <= 500:
return '<=500k score is always 0pp.'
elif len(param) % 2 == 0:
mods = Mods.from_modstr(param)
mods = mods.filter_invalid_combos(mode_vn)
else:
return 'Invalid syntax: !with <score/mods ...>'
result = mixed_calculator.simple_calculate(mode_vn, 0, 0, 0, mods, osu_file_path, score)
return f'{score}k {mods!r}: {result[0]:.2f}pp ({result[1]:.2f}*)'
@command(Privileges.Normal, aliases=['req'])
async def request(ctx: Context) -> str:
"""Request a beatmap for nomination."""
if ctx.args:
return 'Invalid syntax: !request'
if time.time() >= ctx.player.last_np['timeout']:
return 'Please /np a map first!'
bmap = ctx.player.last_np['bmap']
if bmap.status != RankedStatus.Pending:
return 'Only pending maps may be requested for status change.'
await glob.db.execute(
'INSERT INTO map_requests '
'(map_id, player_id, datetime, active) '
'VALUES (%s, %s, NOW(), 1)',
[bmap.id, ctx.player.id]
)
return 'Request submitted.'
@command(Privileges.Normal)
async def get_apikey(ctx: Context) -> str:
"""Generate a new api key & assign it to the player."""
if ctx.recipient is not glob.bot:
return f'Command only available in DMs with {glob.bot.name}.'
# remove old token
if ctx.player.api_key:
glob.api_keys.pop(ctx.player.api_key)
# generate new token
ctx.player.api_key = str(uuid.uuid4())
await glob.db.execute(
'UPDATE users '
'SET api_key = %s '
'WHERE id = %s',
[ctx.player.api_key, ctx.player.id]
)
glob.api_keys[ctx.player.api_key] = ctx.player.id
ctx.player.enqueue(packets.notification('/savelog & click popup for an easy copy.'))
return f'Your API key is now: {ctx.player.api_key}'
""" Nominator commands
# The commands below allow users to
# manage the server's state of beatmaps.
"""
@command(Privileges.Nominator, aliases=['reqs'], hidden=True)
async def requests(ctx: Context) -> str:
"""Check the nomination request queue."""
if ctx.args:
return 'Invalid syntax: !requests'
res = await glob.db.fetchall(
'SELECT map_id, player_id, datetime '
'FROM map_requests WHERE active = 1',
_dict=False # return rows as tuples
)
if not res:
return 'The queue is clean! (0 map request(s))'
l = [f'Total requests: {len(res)}']
for (map_id, player_id, dt) in res:
# find player & map for each row, and add to output.
if not (p := await glob.players.get_ensure(id=player_id)):
l.append(f'Failed to find requesting player ({player_id})?')
continue
if not (bmap := await Beatmap.from_bid(map_id)):
l.append(f'Failed to find requested map ({map_id})?')
continue
l.append(f'[{p.embed} @ {dt:%b %d %I:%M%p}] {bmap.embed}.')
return '\n'.join(l)
_status_str_to_int_map = {
'unrank': 0,
'rank': 2,
'love': 5
}
def status_to_id(s: str) -> int:
return _status_str_to_int_map[s]
@command(Privileges.Nominator)
async def _map(ctx: Context) -> str:
"""Changes the ranked status of the most recently /np'ed map."""
if (
len(ctx.args) != 2 or
ctx.args[0] not in ('rank', 'unrank', 'love') or
ctx.args[1] not in ('set', 'map')
):
return 'Invalid syntax: !map <rank/unrank/love> <map/set>'
if time.time() >= ctx.player.last_np['timeout']:
return 'Please /np a map first!'
bmap = ctx.player.last_np['bmap']
new_status = RankedStatus(status_to_id(ctx.args[0]))
if bmap.status == new_status:
return f'{bmap.embed} is already {new_status!s}!'
# update sql & cache based on scope
# XXX: not sure if getting md5s from sql
# for updating cache would be faster?
# surely this will not scale as well..
async with glob.db.pool.acquire() as conn:
async with conn.cursor() as db_cursor:
if ctx.args[1] == 'set':
# update whole set
await db_cursor.execute(
'UPDATE maps SET status = %s, '
'frozen = 1 WHERE set_id = %s',
[new_status, bmap.set_id]
)
# select all map ids for clearing map requests.
await db_cursor.execute(
'SELECT id FROM maps '
'WHERE set_id = %s',
[bmap.set_id]
)
map_ids = [row[0] async for row in db_cursor]
for bmap in glob.cache['beatmapset'][bmap.set_id].maps:
bmap.status = new_status
else:
# update only map
await db_cursor.execute(
'UPDATE maps SET status = %s, '
'frozen = 1 WHERE id = %s',
[new_status, bmap.id]
)
map_ids = [bmap.id]
if bmap.md5 in glob.cache['beatmap']:
glob.cache['beatmap'][bmap.md5].status = new_status
# deactivate rank requests for all ids
for map_id in map_ids:
await db_cursor.execute(
'UPDATE map_requests '
'SET active = 0 '
'WHERE map_id = %s',
[map_id]
)
return f'{bmap.embed} updated to {new_status!s}.'
""" Mod commands
# The commands below are somewhat dangerous,
# and are generally for managing players.
"""
@command(Privileges.Mod, hidden=True)
async def notes(ctx: Context) -> str:
"""Retrieve the logs of a specified player by name."""
if len(ctx.args) != 2 or not ctx.args[1].isdecimal():
return 'Invalid syntax: !notes <name> <days_back>'
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return f'"{ctx.args[0]}" not found.'
days = int(ctx.args[1])
if days > 365:
return 'Please contact a developer to fetch >365 day old information.'
elif days <= 0:
return 'Invalid syntax: !notes <name> <days_back>'
res = await glob.db.fetchall(
'SELECT `msg`, `time` '
'FROM `logs` WHERE `to` = %s '
'AND UNIX_TIMESTAMP(`time`) >= UNIX_TIMESTAMP(NOW()) - %s '
'ORDER BY `time` ASC',
[t.id, days * 86400]
)
if not res:
return f'No notes found on {t} in the past {days} days.'
return '\n'.join(['[{time}] {msg}'.format(**row) for row in res])
@command(Privileges.Mod, hidden=True)
async def addnote(ctx: Context) -> str:
"""Add a note to a specified player by name."""
if len(ctx.args) < 2:
return 'Invalid syntax: !addnote <name> <note ...>'
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return f'"{ctx.args[0]}" not found.'
log_msg = f'{ctx.player} added note: {" ".join(ctx.args[1:])}'
await glob.db.execute(
'INSERT INTO logs '
'(`from`, `to`, `msg`, `time`) '
'VALUES (%s, %s, %s, NOW())',
[ctx.player.id, t.id, log_msg]
)
return f'Added note to {t}.'
# some shorthands that can be used as
# reasons in many moderative commands.
SHORTHAND_REASONS = {
'aa': 'having their appeal accepted',
'cc': 'using a modified osu! client',
'3p': 'using 3rd party programs',
'rx': 'using 3rd party programs (relax)',
'tw': 'using 3rd party programs (timewarp)',
'au': 'using 3rd party programs (auto play)'
}
DURATION_MULTIPLIERS = {
's': 1, 'm': 60, 'h': 3600,
'd': 86400, 'w': 604800
}
@command(Privileges.Mod, hidden=True)
async def silence(ctx: Context) -> str:
"""Silence a specified player with a specified duration & reason."""
if len(ctx.args) < 3:
return 'Invalid syntax: !silence <name> <duration> <reason>'
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return f'"{ctx.args[0]}" not found.'
if (
t.priv & Privileges.Staff and
not ctx.player.priv & Privileges.Dangerous
):
return 'Only developers can manage staff members.'
if not (r_match := regexes.scaled_duration.match(ctx.args[1])):
return 'Invalid syntax: !silence <name> <duration> <reason>'
multiplier = DURATION_MULTIPLIERS[r_match['scale']]
duration = int(r_match['duration']) * multiplier
reason = ' '.join(ctx.args[2:])
if reason in SHORTHAND_REASONS:
reason = SHORTHAND_REASONS[reason]
await t.silence(ctx.player, duration, reason)
return f'{t} was silenced.'
@command(Privileges.Mod, hidden=True)
async def unsilence(ctx: Context) -> str:
"""Unsilence a specified player."""
if len(ctx.args) != 1:
return 'Invalid syntax: !unsilence <name>'
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return f'"{ctx.args[0]}" not found.'
if not t.silenced:
return f'{t} is not silenced.'
if (
t.priv & Privileges.Staff and
not ctx.player.priv & Privileges.Dangerous
):
return 'Only developers can manage staff members.'
await t.unsilence(ctx.player)
return f'{t} was unsilenced.'
""" Admin commands
# The commands below are relatively dangerous,
# and are generally for managing players.
"""
@command(Privileges.Admin, aliases=['u'], hidden=True)
async def user(ctx: Context) -> str:
"""Return general information about a given user."""
if not ctx.args:
# no username specified, use ctx.player
p = ctx.player
else:
# username given, fetch the player
p = await glob.players.get_ensure(name=' '.join(ctx.args))
if not p:
return 'Player not found.'
priv_readable = '|'.join(reversed([
priv.name for priv in Privileges
if p.priv & priv and bin(priv).count('1') == 1
]))
current_time = time.time()
login_delta = current_time - p.login_time
last_recv_delta = current_time - p.last_recv_time
if current_time < p.last_np['timeout']:
last_np = p.last_np['bmap'].embed
else:
last_np = None
return '\n'.join((
f'[{"Bot" if p.bot_client else "Player"}] {p.full_name} ({p.id})',
f'Privileges: {priv_readable}',
f'Channels: {[p._name for p in p.channels]}',
f'Logged in: {login_delta:.2f} sec ago',
f'Last server interaction: {last_recv_delta:.2f} sec ago',
f'osu! build: {p.osu_ver} | Tourney: {p.tourney_client}',
f'Silenced: {p.silenced} | Spectating: {p.spectating}',
f'Last /np: {last_np}',
f'Recent score: {p.recent_score}',
f'Match: {p.match}',
f'Spectators: {p.spectators}'
))
@command(Privileges.Admin, hidden=True)
async def restrict(ctx: Context) -> str:
"""Restrict a specified player's account, with a reason."""
if len(ctx.args) < 2:
return 'Invalid syntax: !restrict <name> <reason>'
# find any user matching (including offline).
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return f'"{ctx.args[0]}" not found.'
if (
t.priv & Privileges.Staff and
not ctx.player.priv & Privileges.Dangerous
):
return 'Only developers can manage staff members.'
if t.restricted:
return f'{t} is already restricted!'
reason = ' '.join(ctx.args[1:])
if reason in SHORTHAND_REASONS:
reason = SHORTHAND_REASONS[reason]
await t.restrict(admin=ctx.player, reason=reason)
return f'{t} was restricted.'
@command(Privileges.Admin, hidden=True)
async def unrestrict(ctx: Context) -> str:
"""Unrestrict a specified player's account, with a reason."""
if len(ctx.args) < 2:
return 'Invalid syntax: !unrestrict <name> <reason>'
# find any user matching (including offline).
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return f'"{ctx.args[0]}" not found.'
if (
t.priv & Privileges.Staff and
not ctx.player.priv & Privileges.Dangerous
):
return 'Only developers can manage staff members.'
if not t.restricted:
return f'{t} is not restricted!'
reason = ' '.join(ctx.args[1:])
if reason in SHORTHAND_REASONS:
reason = SHORTHAND_REASONS[reason]
await t.unrestrict(ctx.player, reason)
return f'{t} was unrestricted.'
@command(Privileges.Admin, hidden=True)
async def alert(ctx: Context) -> str:
"""Send a notification to all players."""
if len(ctx.args) < 1:
return 'Invalid syntax: !alert <msg>'
notif_txt = ' '.join(ctx.args)
glob.players.enqueue(packets.notification(notif_txt))
return 'Alert sent.'
@command(Privileges.Admin, aliases=['alertu'], hidden=True)
async def alertuser(ctx: Context) -> str:
"""Send a notification to a specified player by name."""
if len(ctx.args) < 2:
return 'Invalid syntax: !alertu <name> <msg>'
if not (t := glob.players.get(name=ctx.args[0])):
return 'Could not find a user by that name.'
notif_txt = ' '.join(ctx.args[1:])
t.enqueue(packets.notification(notif_txt))
return 'Alert sent.'
# NOTE: this is pretty useless since it doesn't switch anything other
# than the c[e4-6].ppy.sh domains; it exists on bancho as a tournament
# server switch mechanism, perhaps we could leverage this in the future.
@command(Privileges.Admin, hidden=True)
async def switchserv(ctx: Context) -> str:
"""Switch your client's internal endpoints to a specified IP address."""
if len(ctx.args) != 1:
return 'Invalid syntax: !switch <endpoint>'
new_bancho_ip = ctx.args[0]
ctx.player.enqueue(packets.switchTournamentServer(new_bancho_ip))
return 'Have a nice journey..'
@command(Privileges.Admin, aliases=['restart'])
async def shutdown(ctx: Context) -> str:
"""Gracefully shutdown the server."""
if ctx.trigger == 'restart':
_signal = signal.SIGUSR1
else:
_signal = signal.SIGTERM
if ctx.args: # shutdown after a delay
if not (r_match := regexes.scaled_duration.match(ctx.args[0])):
return f'Invalid syntax: !{ctx.trigger} <delay> <msg ...>'
multiplier = DURATION_MULTIPLIERS[r_match['scale']]
delay = int(r_match['duration']) * multiplier
if delay < 15:
return 'Minimum delay is 15 seconds.'
if len(ctx.args) > 1:
# alert all online players of the reboot.
alert_msg = (f'The server will {ctx.trigger} in {ctx.args[0]}.\n\n'
f'Reason: {" ".join(ctx.args[1:])}')
glob.players.enqueue(packets.notification(alert_msg))
glob.loop.call_later(delay, os.kill, os.getpid(), _signal)
return f'Enqueued {ctx.trigger}.'
else: # shutdown immediately
os.kill(os.getpid(), _signal)
return ':D'
""" Developer commands
# The commands below are either dangerous or
# simply not useful for any other roles.
"""
_fake_users = []
@command(Privileges.Dangerous, aliases=['fu'])
async def fakeusers(ctx: Context) -> str:
"""Add fake users to the online player list (for testing)."""
# NOTE: this is mostly just for speedtesting things
# regarding presences/stats. it's implementation is
# indeed quite cursed, but rather efficient.
if (
len(ctx.args) != 2 or
ctx.args[0] not in ('add', 'rm') or
not ctx.args[1].isdecimal()
):
return 'Invalid syntax: !fakeusers <add/rm> <amount>'
action = ctx.args[0]
amount = int(ctx.args[1])
if not 0 < amount <= 100_000:
return 'Amount must be in range 0-100k.'
# we start at half way through
# the i32 space for fake user ids.
FAKE_ID_START = 0x7fffffff >> 1
# data to send to clients (all new user info)
# we'll send all the packets together at end (more efficient)
data = bytearray()
if action == 'add':
const_uinfo = { # non important stuff
'utc_offset': 0,
'osu_ver': 'dn',
'pm_private': False,
'clan': None,
'clan_priv': None,
'priv': Privileges.Normal | Privileges.Verified,
'silence_end': 0,
'login_time': 0x7fffffff # never auto-dc
}
_stats = packets.userStats(ctx.player)
if _fake_users:
current_fakes = max([x.id for x in _fake_users]) - (FAKE_ID_START - 1)
else:
current_fakes = 0
start_id = FAKE_ID_START + current_fakes
end_id = start_id + amount
vn_std = GameMode.vn_std
base_player = Player(id=0, name='', **const_uinfo)
base_player.stats[vn_std] = copy.copy(ctx.player.stats[vn_std])
new_fakes = []
# static part of the presence packet,
# no need to redo this every iteration.
static_presence = struct.pack(
'<BBBffi',
19, # -5 (EST) + 24
38, # country (canada)
0b11111, # all in-game privs
0.0, 0.0, # lat, lon
1 # rank #1
)
for i in range(start_id, end_id):
# create new fake player from base
name = f'fake #{i - (FAKE_ID_START - 1)}'
fake = copy.copy(base_player)
fake.id = i
fake.name = name
# append userpresence packet
data += struct.pack(
'<HxIi',
83, # packetid
21 + len(name), # packet len
i # userid
)
data += f'\x0b{chr(len(name))}{name}'.encode()
data += static_presence
data += _stats
new_fakes.append(fake)
# extend all added fakes to the real list
_fake_users.extend(new_fakes)
glob.players.extend(new_fakes)
del new_fakes
msg = 'Added.'
else: # remove
len_fake_users = len(_fake_users)
if amount > len_fake_users:
return f'Too many! only {len_fake_users} remaining.'
to_remove = _fake_users[len_fake_users - amount:]
logout_packet_header = b'\x0c\x00\x00\x05\x00\x00\x00'
for fake in to_remove:
if not fake.online:
# already auto-dced
_fake_users.remove(fake)
continue
data += logout_packet_header
data += fake.id.to_bytes(4, 'little') # 4 bytes pid
data += b'\x00' # 1 byte 0
glob.players.remove(fake)
_fake_users.remove(fake)
msg = 'Removed.'
data = bytes(data) # bytearray -> bytes
# only enqueue data to real users.
for o in [x for x in glob.players if x.id < FAKE_ID_START]:
o.enqueue(data)
return msg
@command(Privileges.Dangerous)
async def stealth(ctx: Context) -> str:
"""Toggle the developer's stealth, allowing them to be hidden."""
# NOTE: this command is a large work in progress and currently
# half works; eventually it will be moved to the Admin level.
ctx.player.stealth = not ctx.player.stealth
return f'Stealth {"enabled" if ctx.player.stealth else "disabled"}.'
@command(Privileges.Dangerous)
async def recalc(ctx: Context) -> str:
"""Recalculate pp for a given map, or all maps."""
# NOTE: at the moment this command isn't very optimal and re-parses
# the beatmap file each iteration; this will be heavily improved.
if len(ctx.args) != 1 or ctx.args[0] not in ('map', 'all'):
return 'Invalid syntax: !recalc <map/all>'
if ctx.args[0] == 'map':
# by specific map, use their last /np
if time.time() >= ctx.player.last_np['timeout']:
return 'Please /np a map first!'
bmap: Beatmap = ctx.player.last_np['bmap']
osu_file_path = BEATMAPS_PATH / f'{bmap.id}.osu'
if not await ensure_local_osu_file(osu_file_path, bmap.id, bmap.md5):
return ('Mapfile could not be found; '
'this incident has been reported.')
async with glob.db.pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as select_cursor, conn.cursor(aiomysql.cursors.Cursor) as update_cursor :
for table in ('scores_vn', 'scores_rx', 'scores_ap'):
await select_cursor.execute(
'SELECT id, acc, mods, max_combo, nmiss, mode, score '
f'FROM {table} '
'WHERE map_md5 = %s',
[bmap.md5]
)
async for row in select_cursor:
result = mixed_calculator.simple_calculate(row['mode'], row['nmiss'], row['max_combo'], row['acc'], row['mods'], osu_file_path, row['score'])
await update_cursor.execute(
f'UPDATE {table} '
'SET pp = %s '
'WHERE id = %s',
[result[0], row['id']]
)
return 'Map recalculated.'
else:
# recalc all plays on the server, on all maps
staff_chan = glob.channels['#staff'] # log any errs here
async def recalc_all() -> None:
staff_chan.send_bot(f'{ctx.player} started a full recalculation.')
st = time.time()
async with glob.db.pool.acquire() as conn:
async with conn.cursor(aiomysql.Cursor) as bmap_select_cursor, conn.cursor(aiomysql.DictCursor) as score_select_cursor, conn.cursor(aiomysql.Cursor) as update_cursor:
await bmap_select_cursor.execute(
'SELECT id, md5 '
'FROM maps '
'WHERE plays > 0'
)
map_count = bmap_select_cursor.rowcount
staff_chan.send_bot(f'Recalculating {map_count} maps.')
async for bmap_row in bmap_select_cursor:
bmap_id, bmap_md5 = bmap_row
osu_file_path = BEATMAPS_PATH / f'{bmap_id}.osu'
''' for a better performance
if not await ensure_local_osu_file(osu_file_path, bmap_id, bmap_md5):
staff_chan.send_bot("[Recalc] Couldn't find "
f"{bmap_id} / {bmap_md5}")
continue
'''
for table in ('scores_vn', 'scores_rx', 'scores_ap'):
await score_select_cursor.execute(
'SELECT id, acc, mods, max_combo, nmiss, mode, score '
f'FROM {table} '
'WHERE map_md5 = %s AND status = 2',
[bmap_md5]
)
async for row in score_select_cursor:
result = mixed_calculator.simple_calculate(row['mode'], row['nmiss'], row['max_combo'], row['acc'], row['mods'], osu_file_path, row['score'])
await update_cursor.execute(
f'UPDATE {table} '
'SET pp = %s '
'WHERE id = %s',
[result[0], row['id']]
)
# leave at least 1/100th of
# a second for handling conns.
await asyncio.sleep(0.01)
elapsed = utils.misc.seconds_readable(int(time.time() - st))
staff_chan.send_bot(f'Recalculation complete. | Elapsed: {elapsed}')
glob.loop.create_task(recalc_all())
return 'Starting a full recalculation.'
@command(Privileges.Dangerous, hidden=True)
async def debug(ctx: Context) -> str:
"""Toggle the console's debug setting."""
glob.app.debug = not glob.app.debug
return f"Toggled {'on' if glob.app.debug else 'off'}."
# NOTE: these commands will likely be removed
# with the addition of a good frontend.
str_priv_dict = {
'normal': Privileges.Normal,
'verified': Privileges.Verified,
'whitelisted': Privileges.Whitelisted,
'supporter': Privileges.Supporter,
'premium': Privileges.Premium,
'alumni': Privileges.Alumni,
'tournament': Privileges.Tournament,
'nominator': Privileges.Nominator,
'mod': Privileges.Mod,
'admin': Privileges.Admin,
'dangerous': Privileges.Dangerous
}
@command(Privileges.Dangerous, hidden=True)
async def addpriv(ctx: Context) -> str:
"""Set privileges for a specified player (by name)."""
if len(ctx.args) < 2:
return 'Invalid syntax: !addpriv <name> <role1 role2 role3 ...>'
bits = Privileges(0)
for m in [m.lower() for m in ctx.args[1:]]:
if m not in str_priv_dict:
return f'Not found: {m}.'
bits |= str_priv_dict[m]
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return 'Could not find user.'
await t.add_privs(bits)
return f"Updated {t}'s privileges."
@command(Privileges.Dangerous, hidden=True)
async def rmpriv(ctx: Context) -> str:
"""Set privileges for a specified player (by name)."""
if len(ctx.args) < 2:
return 'Invalid syntax: !rmpriv <name> <role1 role2 role3 ...>'
bits = Privileges(0)
for m in [m.lower() for m in ctx.args[1:]]:
if m not in str_priv_dict:
return f'Not found: {m}.'
bits |= str_priv_dict[m]
if not (t := await glob.players.get_ensure(name=ctx.args[0])):
return 'Could not find user.'
await t.remove_privs(bits)
return f"Updated {t}'s privileges."
@command(Privileges.Dangerous)
async def wipemap(ctx: Context) -> str:
if ctx.args:
return 'Invalid syntax: !wipemap'
if time.time() >= ctx.player.last_np['timeout']:
return 'Please /np a map first!'
map_md5 = ctx.player.last_np['bmap'].md5
# delete scores from all tables
async with glob.db.pool.acquire() as conn:
async with conn.cursor() as db_cursor:
for t in ('vn', 'rx', 'ap'):
await db_cursor.execute(
f'DELETE FROM scores_{t} '
'WHERE map_md5 = %s',
[map_md5]
)
return 'Scores wiped.'
@command(Privileges.Dangerous, hidden=True)
async def menu(ctx: Context) -> str:
"""Temporary command to illustrate the menu option idea."""
ctx.player.send_current_menu()
@command(Privileges.Dangerous, aliases=['re'])
async def reload(ctx: Context) -> str:
"""Reload a python module."""
if len(ctx.args) != 1:
return 'Invalid syntax: !reload <module>'
parent, *children = ctx.args[0].split('.')
try:
mod = __import__(parent)
except ModuleNotFoundError:
return 'Module not found.'
try:
for child in children:
mod = getattr(mod, child)
except AttributeError:
return f'Failed at {child}.'
try:
mod = importlib.reload(mod)
except TypeError as exc:
return f'{exc.args[0]}.'
return f'Reloaded {mod.__name__}'
@command(Privileges.Normal)
async def server(ctx: Context) -> str:
"""Retrieve performance data about the server."""
build_str = f'gulag v{glob.version!r} ({glob.config.domain})'
# get info about this process
proc = psutil.Process(os.getpid())
uptime = int(time.time() - proc.create_time())
# get info about our cpu
with open('/proc/cpuinfo') as f:
header = 'model name\t: '
trailer = '\n'
model_names = Counter(
line[len(header):-len(trailer)]
for line in f.readlines()
if line.startswith('model name')
)
# list of all cpus installed with thread count
cpus_info = ' | '.join([f'{v}x {k}' for k, v in model_names.most_common()])
# get system-wide ram usage
sys_ram = psutil.virtual_memory()
# output ram usage as `{gulag_used}MB / {sys_used}MB / {sys_total}MB`
gulag_ram = proc.memory_info()[0]
ram_values = (gulag_ram, sys_ram.used, sys_ram.total)
ram_info = ' / '.join([f'{v // 1024 ** 2}MB' for v in ram_values])
# divide up pkg versions, 3 displayed per line, e.g.
# aiohttp v3.6.3 | aiomysql v0.0.21 | bcrypt v3.2.0
# cmyui v1.7.3 | datadog v0.40.1 | geoip2 v4.1.0
# mysql-connector-python v8.0.23 | orjson v3.5.1
# psutil v5.8.0 | py3rijndael v0.3.3 | uvloop v0.15.2
reqs = (Path.cwd() / 'ext/requirements.txt').read_text().splitlines()
pkg_sections = [reqs[i:i+3] for i in range(0, len(reqs), 3)]
mirror_url = glob.config.mirror
using_osuapi = glob.config.osu_api_key != ''
advanced_mode = glob.config.advanced
auto_logging = glob.config.automatically_report_problems
return '\n'.join([
f'{build_str} | uptime: {seconds_readable(uptime)}',
f'cpu(s): {cpus_info}',
f'ram: {ram_info}',
f'mirror: {mirror_url} | osu!api connection: {using_osuapi}',
f'advanced mode: {advanced_mode} | auto logging: {auto_logging}',
'',
'requirements',
'\n'.join([' | '.join([
f'{pkg} v{pkg_version(pkg)}'
for pkg in section
]) for section in pkg_sections])
])
""" Advanced commands (only allowed with `advanced = True` in config) """
# NOTE: some of these commands are potentially dangerous, and only
# really intended for advanced users looking for access to lower level
# utilities. Some may give direct access to utilties that could perform
# harmful tasks to the underlying machine, so use at your own risk.
if glob.config.advanced:
from sys import modules as installed_mods
__py_namespace = globals() | {
mod: __import__(mod) for mod in (
'asyncio', 'dis', 'os', 'sys', 'struct', 'discord',
'cmyui', 'datetime', 'time', 'inspect', 'math',
'importlib'
) if mod in installed_mods
}
@command(Privileges.Dangerous)
async def py(ctx: Context) -> str:
"""Allow for (async) access to the python interpreter."""
# This can be very good for getting used to gulag's API; just look
# around the codebase and find things to play with in your server.
# Ex: !py return (await glob.players.get(name='cmyui')).status.action
if not ctx.args:
return 'owo'
# turn our input args into a coroutine definition string.
definition = '\n '.join([
'async def __py(ctx):',
' '.join(ctx.args)
])
try: # def __py(ctx)
exec(definition, __py_namespace) # add to namespace
ret = await __py_namespace['__py'](ctx) # await it's return
except Exception as exc: # return exception in osu! chat
ret = f'{exc.__class__}: {exc}'
if '__py' in __py_namespace:
del __py_namespace['__py']
if ret is None:
return 'Success'
# TODO: perhaps size checks?
if not isinstance(ret, str):
ret = pprint.pformat(ret, compact=True)
return ret
""" Multiplayer commands
# The commands below for multiplayer match management.
# Most commands are open to player usage.
"""
@mp_commands.add(Privileges.Normal, aliases=['h'])
async def mp_help(ctx: Context) -> str:
"""Show all documented multiplayer commands the player can access."""
prefix = glob.config.command_prefix
cmds = []
for cmd in mp_commands.commands:
if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv:
# no doc, or insufficient permissions.
continue
cmds.append(f'{prefix}mp {cmd.triggers[0]}: {cmd.doc}')
return '\n'.join(cmds)
@mp_commands.add(Privileges.Normal, aliases=['st'])
async def mp_start(ctx: Context) -> str:
"""Start the current multiplayer match, with any players ready."""
if len(ctx.args) > 1:
return 'Invalid syntax: !mp start <force/seconds>'
# this command can be used in a few different ways;
# !mp start: start the match now (make sure all players are ready)
# !mp start force: start the match now (don't check for ready)
# !mp start N: start the match in N seconds (don't check for ready)
# !mp start cancel: cancel the current match start timer
if not ctx.args:
# !mp start
if ctx.match.starting['start'] is not None:
time_remaining = int(ctx.match.starting['time'] - time.time())
return f'Match starting in {time_remaining} seconds.'
if any([s.status == SlotStatus.not_ready for s in ctx.match.slots]):
return 'Not all players are ready (`!mp start force` to override).'
else:
if ctx.args[0].isdecimal():
# !mp start N
if ctx.match.starting['start'] is not None:
time_remaining = int(ctx.match.starting['time'] - time.time())
return f'Match starting in {time_remaining} seconds.'
# !mp start <seconds>
duration = int(ctx.args[0])
if not 0 < duration <= 300:
return 'Timer range is 1-300 seconds.'
def _start() -> None:
"""Remove any pending timers & start the match."""
# remove start & alert timers
ctx.match.starting['start'] = None
ctx.match.starting['alerts'] = None
ctx.match.starting['time'] = None
# make sure player didn't leave the
# match since queueing this start lol..
if ctx.player not in ctx.match:
ctx.match.chat.send_bot('Player left match? (cancelled)')
return
ctx.match.start()
ctx.match.chat.send_bot('Starting match.')
def _alert_start(t: int) -> None:
"""Alert the match of the impending start."""
ctx.match.chat.send_bot(f'Match starting in {t} seconds.')
# add timers to our match object,
# so we can cancel them if needed.
ctx.match.starting['start'] = glob.loop.call_later(duration, _start)
ctx.match.starting['alerts'] = [
glob.loop.call_later(duration - t, lambda t=t: _alert_start(t))
for t in (60, 30, 10, 5, 4, 3, 2, 1) if t < duration
]
ctx.match.starting['time'] = time.time() + duration
return f'Match will start in {duration} seconds.'
elif ctx.args[0] in ('cancel', 'c'):
# !mp start cancel
if ctx.match.starting['start'] is None:
return 'Match timer not active!'
ctx.match.starting['start'].cancel()
for alert in ctx.match.starting['alerts']:
alert.cancel()
ctx.match.starting['start'] = None
ctx.match.starting['alerts'] = None
ctx.match.starting['time'] = None
return 'Match timer cancelled.'
elif ctx.args[0] not in ('force', 'f'):
return 'Invalid syntax: !mp start <force/seconds>'
# !mp start force simply passes through
ctx.match.start()
return 'Good luck!'
@mp_commands.add(Privileges.Normal, aliases=['a'])
async def mp_abort(ctx: Context) -> str:
"""Abort the current in-progress multiplayer match."""
if not ctx.match.in_progress:
return 'Abort what?'
ctx.match.unready_players(expected=SlotStatus.playing)
ctx.match.in_progress = False
ctx.match.enqueue(packets.matchAbort())
ctx.match.enqueue_state()
return 'Match aborted.'
@mp_commands.add(Privileges.Normal)
async def mp_map(ctx: Context) -> str:
"""Set the current match's current map by id."""
if len(ctx.args) != 1 or not ctx.args[0].isdecimal():
return 'Invalid syntax: !mp map <beatmapid>'
map_id = int(ctx.args[0])
if map_id == ctx.match.map_id:
return 'Map already selected.'
if not (bmap := await Beatmap.from_bid(map_id)):
return 'Beatmap not found.'
ctx.match.map_id = bmap.id
ctx.match.map_md5 = bmap.md5
ctx.match.map_name = bmap.full
ctx.match.mode = bmap.mode
ctx.match.enqueue_state()
return f'Selected: {bmap.embed}.'
@mp_commands.add(Privileges.Normal)
async def mp_mods(ctx: Context) -> str:
"""Set the current match's mods, from string form."""
if len(ctx.args) != 1 or len(ctx.args[0]) % 2 != 0:
return 'Invalid syntax: !mp mods <mods>'
mods = Mods.from_modstr(ctx.args[0])
mods = mods.filter_invalid_combos(ctx.match.mode.as_vanilla)
if ctx.match.freemods:
if ctx.player is ctx.match.host:
# allow host to set speed-changing mods.
ctx.match.mods = mods & SPEED_CHANGING_MODS
# set slot mods
ctx.match.get_slot(ctx.player).mods = mods & ~SPEED_CHANGING_MODS
else:
# not freemods, set match mods.
ctx.match.mods = mods
ctx.match.enqueue_state()
return 'Match mods updated.'
@mp_commands.add(Privileges.Normal, aliases=['fm', 'fmods'])
async def mp_freemods(ctx: Context) -> str:
"""Toggle freemods status for the match."""
if len(ctx.args) != 1 or ctx.args[0] not in ('on', 'off'):
return 'Invalid syntax: !mp freemods <on/off>'
if ctx.args[0] == 'on':
# central mods -> all players mods.
ctx.match.freemods = True
for s in ctx.match.slots:
if s.status & SlotStatus.has_player:
# the slot takes any non-speed
# changing mods from the match.
s.mods = ctx.match.mods & ~SPEED_CHANGING_MODS
ctx.match.mods &= SPEED_CHANGING_MODS
else:
# host mods -> central mods.
ctx.match.freemods = False
host = ctx.match.get_host_slot() # should always exist
# the match keeps any speed-changing mods,
# and also takes any mods the host has enabled.
ctx.match.mods &= SPEED_CHANGING_MODS
ctx.match.mods |= host.mods
for s in ctx.match.slots:
if s.status & SlotStatus.has_player:
s.mods = Mods.NOMOD
ctx.match.enqueue_state()
return 'Match freemod status updated.'
@mp_commands.add(Privileges.Normal)
async def mp_host(ctx: Context) -> str:
"""Set the current match's current host by id."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp host <name>'
if not (t := glob.players.get(name=ctx.args[0])):
return 'Could not find a user by that name.'
if t is ctx.match.host:
return "They're already host, silly!"
if t not in ctx.match:
return 'Found no such player in the match.'
ctx.match.host = t
ctx.match.host.enqueue(packets.matchTransferHost())
ctx.match.enqueue_state(lobby=False)
return 'Match host updated.'
@mp_commands.add(Privileges.Normal)
async def mp_randpw(ctx: Context) -> str:
"""Randomize the current match's password."""
ctx.match.passwd = secrets.token_hex(8)
return 'Match password randomized.'
@mp_commands.add(Privileges.Normal, aliases=['inv'])
async def mp_invite(ctx: Context) -> str:
"""Invite a player to the current match by name."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp invite <name>'
if not (t := glob.players.get(name=ctx.args[0])):
return 'Could not find a user by that name.'
if t is glob.bot:
return "I'm too busy!"
if t is ctx.player:
return "You can't invite yourself!"
t.enqueue(packets.matchInvite(ctx.player, t.name))
return f'Invited {t} to the match.'
@mp_commands.add(Privileges.Normal)
async def mp_addref(ctx: Context) -> str:
"""Add a referee to the current match by name."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp addref <name>'
if not (t := glob.players.get(name=ctx.args[0])):
return 'Could not find a user by that name.'
if t not in ctx.match:
return 'User must be in the current match!'
if t in ctx.match.refs:
return f'{t} is already a match referee!'
ctx.match._refs.add(t)
return f'{t.name} added to match referees.'
@mp_commands.add(Privileges.Normal)
async def mp_rmref(ctx: Context) -> str:
"""Remove a referee from the current match by name."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp addref <name>'
if not (t := glob.players.get(name=ctx.args[0])):
return 'Could not find a user by that name.'
if t not in ctx.match.refs:
return f'{t} is not a match referee!'
if t is ctx.match.host:
return 'The host is always a referee!'
ctx.match._refs.remove(t)
return f'{t.name} removed from match referees.'
@mp_commands.add(Privileges.Normal)
async def mp_listref(ctx: Context) -> str:
"""List all referees from the current match."""
return ', '.join(map(str, ctx.match.refs)) + '.'
@mp_commands.add(Privileges.Normal)
async def mp_lock(ctx: Context) -> str:
"""Lock all unused slots in the current match."""
for slot in ctx.match.slots:
if slot.status == SlotStatus.open:
slot.status = SlotStatus.locked
ctx.match.enqueue_state()
return 'All unused slots locked.'
@mp_commands.add(Privileges.Normal)
async def mp_unlock(ctx: Context) -> str:
"""Unlock locked slots in the current match."""
for slot in ctx.match.slots:
if slot.status == SlotStatus.locked:
slot.status = SlotStatus.open
ctx.match.enqueue_state()
return 'All locked slots unlocked.'
@mp_commands.add(Privileges.Normal)
async def mp_teams(ctx: Context) -> str:
"""Change the team type for the current match."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp teams <type>'
team_type = ctx.args[0]
if team_type in ('ffa', 'freeforall', 'head-to-head'):
ctx.match.team_type = MatchTeamTypes.head_to_head
elif team_type in ('tag', 'coop', 'co-op', 'tag-coop'):
ctx.match.team_type = MatchTeamTypes.tag_coop
elif team_type in ('teams', 'team-vs', 'teams-vs'):
ctx.match.team_type = MatchTeamTypes.team_vs
elif team_type in ('tag-teams', 'tag-team-vs', 'tag-teams-vs'):
ctx.match.team_type = MatchTeamTypes.tag_team_vs
else:
return 'Unknown team type. (ffa, tag, teams, tag-teams)'
# find the new appropriate default team.
# defaults are (ffa: neutral, teams: red).
if ctx.match.team_type in (
MatchTeamTypes.head_to_head,
MatchTeamTypes.tag_coop
):
new_t = MatchTeams.neutral
else:
new_t = MatchTeams.red
# change each active slots team to
# fit the correspoding team type.
for s in ctx.match.slots:
if s.status & SlotStatus.has_player:
s.team = new_t
if ctx.match.is_scrimming:
# reset score if scrimming.
ctx.match.reset_scrim()
ctx.match.enqueue_state()
return 'Match team type updated.'
@mp_commands.add(Privileges.Normal, aliases=['cond'])
async def mp_condition(ctx: Context) -> str:
"""Change the win condition for the match."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp condition <type>'
cond = ctx.args[0]
if cond == 'pp':
# special case - pp can't actually be used as an ingame
# win condition, but gulag allows it to be passed into
# this command during a scrims to use pp as a win cond.
if not ctx.match.is_scrimming:
return 'PP is only useful as a win condition during scrims.'
if ctx.match.use_pp_scoring:
return 'PP scoring already enabled.'
ctx.match.use_pp_scoring = True
else:
if ctx.match.use_pp_scoring:
ctx.match.use_pp_scoring = False
if cond == 'score':
ctx.match.win_condition = MatchWinConditions.score
elif cond in ('accuracy', 'acc'):
ctx.match.win_condition = MatchWinConditions.accuracy
elif cond == 'combo':
ctx.match.win_condition = MatchWinConditions.combo
elif cond in ('scorev2', 'v2'):
ctx.match.win_condition = MatchWinConditions.scorev2
else:
return 'Invalid win condition. (score, acc, combo, scorev2, *pp)'
ctx.match.enqueue_state(lobby=False)
return 'Match win condition updated.'
@mp_commands.add(Privileges.Normal, aliases=['autoref'])
async def mp_scrim(ctx: Context) -> str:
"""Start a scrim in the current match."""
if (
len(ctx.args) != 1 or
not (r_match := regexes.best_of.fullmatch(ctx.args[0]))
):
return 'Invalid syntax: !mp scrim <bo#>'
if not 0 <= (best_of := int(r_match[1])) < 16:
return 'Best of must be in range 0-15.'
winning_pts = (best_of // 2) + 1
if winning_pts != 0:
# setting to real num
if ctx.match.is_scrimming:
return 'Already scrimming!'
if best_of % 2 == 0:
return 'Best of must be an odd number!'
ctx.match.is_scrimming = True
msg = (f'A scrimmage has been started by {ctx.player.name}; '
f'first to {winning_pts} points wins. Best of luck!')
else:
# setting to 0
if not ctx.match.is_scrimming:
return 'Not currently scrimming!'
ctx.match.is_scrimming = False
ctx.match.reset_scrim()
msg = 'Scrimming cancelled.'
ctx.match.winning_pts = winning_pts
return msg
@mp_commands.add(Privileges.Normal, aliases=['end'])
async def mp_endscrim(ctx: Context) -> str:
"""End the current matches ongoing scrim."""
if not ctx.match.is_scrimming:
return 'Not currently scrimming!'
ctx.match.is_scrimming = False
ctx.match.reset_scrim()
return 'Scrimmage ended.' # TODO: final score (get_score method?)
@mp_commands.add(Privileges.Normal, aliases=['rm'])
async def mp_rematch(ctx: Context) -> str:
"""Restart a scrim, or roll back previous match point."""
if ctx.args:
return 'Invalid syntax: !mp rematch'
if ctx.player is not ctx.match.host:
return 'Only available to the host.'
if not ctx.match.is_scrimming:
if ctx.match.winning_pts == 0:
msg = 'No scrim to rematch; to start one, use !mp scrim.'
else:
# re-start scrimming with old points
ctx.match.is_scrimming = True
msg = (
f'A rematch has been started by {ctx.player.name}; '
f'first to {ctx.match.winning_pts} points wins. Best of luck!'
)
else:
# reset the last match point awarded
if not ctx.match.winners:
return "No match points have yet been awarded!"
if (recent_winner := ctx.match.winners[-1]) is None:
return 'The last point was a tie!'
ctx.match.match_points[recent_winner] -= 1 # TODO: team name
ctx.match.winners.pop()
msg = f'A point has been deducted from {recent_winner}.'
return msg
@mp_commands.add(Privileges.Admin, aliases=['f'], hidden=True)
async def mp_force(ctx: Context) -> str:
"""Force a player into the current match by name."""
# NOTE: this overrides any limits such as silences or passwd.
if len(ctx.args) != 1:
return 'Invalid syntax: !mp force <name>'
if not (t := glob.players.get(name=ctx.args[0])):
return 'Could not find a user by that name.'
t.join_match(ctx.match, ctx.match.passwd)
return 'Welcome.'
# mappool-related mp commands
@mp_commands.add(Privileges.Normal, aliases=['lp'])
async def mp_loadpool(ctx: Context) -> str:
"""Load a mappool into the current match."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp loadpool <name>'
if ctx.player is not ctx.match.host:
return 'Only available to the host.'
name = ctx.args[0]
if not (pool := glob.pools.get(name)):
return 'Could not find a pool by that name!'
if ctx.match.pool is pool:
return f'{pool!r} already selected!'
ctx.match.pool = pool
return f'{pool!r} selected.'
@mp_commands.add(Privileges.Normal, aliases=['ulp'])
async def mp_unloadpool(ctx: Context) -> str:
"""Unload the current matches mappool."""
if ctx.args:
return 'Invalid syntax: !mp unloadpool'
if ctx.player is not ctx.match.host:
return 'Only available to the host.'
if not ctx.match.pool:
return 'No mappool currently selected!'
ctx.match.pool = None
return 'Mappool unloaded.'
@mp_commands.add(Privileges.Normal)
async def mp_ban(ctx: Context) -> str:
"""Ban a pick in the currently loaded mappool."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp ban <pick>'
if not ctx.match.pool:
return 'No pool currently selected!'
mods_slot = ctx.args[0]
# separate mods & slot
if not (r_match := regexes.mappool_pick.fullmatch(mods_slot)):
return 'Invalid pick syntax; correct example: HD2'
# not calling mods.filter_invalid_combos here intentionally.
mods = Mods.from_modstr(r_match[1])
slot = int(r_match[2])
if (mods, slot) not in ctx.match.pool.maps:
return f'Found no {mods_slot} pick in the pool.'
if (mods, slot) in ctx.match.bans:
return 'That pick is already banned!'
ctx.match.bans.add((mods, slot))
return f'{mods_slot} banned.'
@mp_commands.add(Privileges.Normal)
async def mp_unban(ctx: Context) -> str:
"""Unban a pick in the currently loaded mappool."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp unban <pick>'
if not ctx.match.pool:
return 'No pool currently selected!'
mods_slot = ctx.args[0]
# separate mods & slot
if not (r_match := regexes.mappool_pick.fullmatch(mods_slot)):
return 'Invalid pick syntax; correct example: HD2'
# not calling mods.filter_invalid_combos here intentionally.
mods = Mods.from_modstr(r_match[1])
slot = int(r_match[2])
if (mods, slot) not in ctx.match.pool.maps:
return f'Found no {mods_slot} pick in the pool.'
if (mods, slot) not in ctx.match.bans:
return 'That pick is not currently banned!'
ctx.match.bans.remove((mods, slot))
return f'{mods_slot} unbanned.'
@mp_commands.add(Privileges.Normal)
async def mp_pick(ctx: Context) -> str:
"""Pick a map from the currently loaded mappool."""
if len(ctx.args) != 1:
return 'Invalid syntax: !mp pick <pick>'
if not ctx.match.pool:
return 'No pool currently loaded!'
mods_slot = ctx.args[0]
# separate mods & slot
if not (r_match := regexes.mappool_pick.fullmatch(mods_slot)):
return 'Invalid pick syntax; correct example: HD2'
# not calling mods.filter_invalid_combos here intentionally.
mods = Mods.from_modstr(r_match[1])
slot = int(r_match[2])
if (mods, slot) not in ctx.match.pool.maps:
return f'Found no {mods_slot} pick in the pool.'
if (mods, slot) in ctx.match.bans:
return f'{mods_slot} has been banned from being picked.'
# update match beatmap to the picked map.
bmap = ctx.match.pool.maps[(mods, slot)]
ctx.match.map_md5 = bmap.md5
ctx.match.map_id = bmap.id
ctx.match.map_name = bmap.full
# TODO: some kind of abstraction allowing
# for something like !mp pick fm.
if ctx.match.freemods:
# if freemods are enabled, disable them.
ctx.match.freemods = False
for s in ctx.match.slots:
if s.status & SlotStatus.has_player:
s.mods = Mods.NOMOD
# update match mods to the picked map.
ctx.match.mods = mods
ctx.match.enqueue_state()
return f'Picked {bmap.embed}. ({mods_slot})'
""" Mappool management commands
# The commands below are for event managers
# and tournament hosts/referees to help automate
# tedious processes of running tournaments.
"""
@pool_commands.add(Privileges.Tournament, aliases=['h'], hidden=True)
async def pool_help(ctx: Context) -> str:
"""Show all documented mappool commands the player can access."""
prefix = glob.config.command_prefix
cmds = []
for cmd in pool_commands.commands:
if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv:
# no doc, or insufficient permissions.
continue
cmds.append(f'{prefix}pool {cmd.triggers[0]}: {cmd.doc}')
return '\n'.join(cmds)
@pool_commands.add(Privileges.Tournament, aliases=['c'], hidden=True)
async def pool_create(ctx: Context) -> str:
"""Add a new mappool to the database."""
if len(ctx.args) != 1:
return 'Invalid syntax: !pool create <name>'
name = ctx.args[0]
if glob.pools.get(name):
return 'Pool already exists by that name!'
# insert pool into db
await glob.db.execute(
'INSERT INTO tourney_pools '
'(name, created_at, created_by) '
'VALUES (%s, NOW(), %s)',
[name, ctx.player.id]
)
# add to cache (get from sql for id & time)
res = await glob.db.fetch('SELECT * FROM tourney_pools '
'WHERE name = %s', [name])
res['created_by'] = await glob.players.get_ensure(id=res['created_by'])
glob.pools.append(MapPool(**res))
return f'{name} created.'
@pool_commands.add(Privileges.Tournament, aliases=['del', 'd'], hidden=True)
async def pool_delete(ctx: Context) -> str:
"""Remove a mappool from the database."""
if len(ctx.args) != 1:
return 'Invalid syntax: !pool delete <name>'
name = ctx.args[0]
if not (pool := glob.pools.get(name)):
return 'Could not find a pool by that name!'
# delete from db
await glob.db.execute(
'DELETE FROM tourney_pools '
'WHERE name = %s',
[name]
)
# remove from cache
glob.pools.remove(pool)
return f'{name} deleted.'
@pool_commands.add(Privileges.Tournament, aliases=['a'], hidden=True)
async def pool_add(ctx: Context) -> str:
"""Add a new map to a mappool in the database."""
if len(ctx.args) != 2:
return 'Invalid syntax: !pool add <name> <pick>'
if time.time() >= ctx.player.last_np['timeout']:
return 'Please /np a map first!'
name, mods_slot = ctx.args
mods_slot = mods_slot.upper() # ocd
bmap = ctx.player.last_np['bmap']
# separate mods & slot
if not (r_match := regexes.mappool_pick.fullmatch(mods_slot)):
return 'Invalid pick syntax; correct example: HD2'
if len(r_match[1]) % 2 != 0:
return 'Invalid mods.'
# not calling mods.filter_invalid_combos here intentionally.
mods = Mods.from_modstr(r_match[1])
slot = int(r_match[2])
if not (pool := glob.pools.get(name)):
return 'Could not find a pool by that name!'
if (mods, slot) in pool.maps:
return f'{mods_slot} is already {pool.maps[(mods, slot)].embed}!'
if bmap in pool.maps.values():
return 'Map is already in the pool!'
# insert into db
await glob.db.execute(
'INSERT INTO tourney_pool_maps '
'(map_id, pool_id, mods, slot) '
'VALUES (%s, %s, %s, %s)',
[bmap.id, pool.id, mods, slot]
)
# add to cache
pool.maps[(mods, slot)] = bmap
return f'{bmap.embed} added to {name}.'
@pool_commands.add(Privileges.Tournament, aliases=['rm', 'r'], hidden=True)
async def pool_remove(ctx: Context) -> str:
"""Remove a map from a mappool in the database."""
if len(ctx.args) != 2:
return 'Invalid syntax: !pool remove <name> <pick>'
name, mods_slot = ctx.args
mods_slot = mods_slot.upper() # ocd
# separate mods & slot
if not (r_match := regexes.mappool_pick.fullmatch(mods_slot)):
return 'Invalid pick syntax; correct example: HD2'
# not calling mods.filter_invalid_combos here intentionally.
mods = Mods.from_modstr(r_match[1])
slot = int(r_match[2])
if not (pool := glob.pools.get(name)):
return 'Could not find a pool by that name!'
if (mods, slot) not in pool.maps:
return f'Found no {mods_slot} pick in the pool.'
# delete from db
await glob.db.execute(
'DELETE FROM tourney_pool_maps '
'WHERE mods = %s AND slot = %s',
[mods, slot]
)
# remove from cache
del pool.maps[(mods, slot)]
return f'{mods_slot} removed from {name}.'
@pool_commands.add(Privileges.Tournament, aliases=['l'], hidden=True)
async def pool_list(ctx: Context) -> str:
"""List all existing mappools information."""
if not (pools := glob.pools):
return 'There are currently no pools!'
l = [f'Mappools ({len(pools)})']
for pool in pools:
l.append(
f'[{pool.created_at:%Y-%m-%d}] {pool.id}. '
f'{pool.name}, by {pool.created_by}.'
)
return '\n'.join(l)
@pool_commands.add(Privileges.Tournament, aliases=['i'], hidden=True)
async def pool_info(ctx: Context) -> str:
"""Get all information for a specific mappool."""
if len(ctx.args) != 1:
return 'Invalid syntax: !pool info <name>'
name = ctx.args[0]
if not (pool := glob.pools.get(name)):
return 'Could not find a pool by that name!'
_time = pool.created_at.strftime('%H:%M:%S%p')
_date = pool.created_at.strftime('%Y-%m-%d')
datetime_fmt = f'Created at {_time} on {_date}'
l = [f'{pool.id}. {pool.name}, by {pool.created_by} | {datetime_fmt}.']
for (mods, slot), bmap in pool.maps.items():
l.append(f'{mods!r}{slot}: {bmap.embed}')
return '\n'.join(l)
""" Clan managment commands
# The commands below are for managing gulag
# clans, for users, clan staff, and server staff.
"""
@clan_commands.add(Privileges.Normal, aliases=['h'])
async def clan_help(ctx: Context) -> str:
"""Show all documented clan commands the player can access."""
prefix = glob.config.command_prefix
cmds = []
for cmd in clan_commands.commands:
if not cmd.doc or ctx.player.priv & cmd.priv != cmd.priv:
# no doc, or insufficient permissions.
continue
cmds.append(f'{prefix}clan {cmd.triggers[0]}: {cmd.doc}')
return '\n'.join(cmds)
@clan_commands.add(Privileges.Normal, aliases=['c'])
async def clan_create(ctx: Context) -> str:
"""Create a clan with a given tag & name."""
if len(ctx.args) < 2:
return 'Invalid syntax: !clan create <tag> <name>'
if not 1 <= len(tag := ctx.args[0].upper()) <= 6:
return 'Clan tag may be 1-6 characters long.'
if not 2 <= len(name := ' '.join(ctx.args[1:])) <= 16:
return 'Clan name may be 2-16 characters long.'
if ctx.player.clan:
return f"You're already a member of {ctx.player.clan}!"
if glob.clans.get(name=name):
return 'That name has already been claimed by another clan.'
if glob.clans.get(tag=tag):
return 'That tag has already been claimed by another clan.'
created_at = datetime.now()
# add clan to sql (generates id)
clan_id = await glob.db.execute(
'INSERT INTO clans '
'(name, tag, created_at, owner) '
'VALUES (%s, %s, %s, %s)',
[name, tag, created_at, ctx.player.id]
)
# add clan to cache
clan = Clan(id=clan_id, name=name, tag=tag,
created_at=created_at, owner=ctx.player.id)
glob.clans.append(clan)
# set owner's clan & clan priv (cache & sql)
ctx.player.clan = clan
ctx.player.clan_priv = ClanPrivileges.Owner
clan.owner = ctx.player.id
clan.members.add(ctx.player.id)
if 'full_name' in ctx.player.__dict__:
del ctx.player.full_name # wipe cached_property
await glob.db.execute(
'UPDATE users '
'SET clan_id = %s, '
'clan_priv = 3 ' # ClanPrivileges.Owner
'WHERE id = %s',
[clan_id, ctx.player.id]
)
# announce clan creation
if announce_chan := glob.channels['#announce']:
msg = f'\x01ACTION founded {clan!r}.'
announce_chan.send(msg, sender=ctx.player, to_self=True)
return f'{clan!r} created.'
@clan_commands.add(Privileges.Normal, aliases=['delete', 'd'])
async def clan_disband(ctx: Context) -> str:
"""Disband a clan (admins may disband others clans)."""
if ctx.args:
# disband a specified clan by tag
if ctx.player not in glob.players.staff:
return 'Only staff members may disband the clans of others.'
if not (clan := glob.clans.get(tag=' '.join(ctx.args).upper())):
return 'Could not find a clan by that tag.'
else:
# disband the player's clan
if not (clan := ctx.player.clan):
return "You're not a member of a clan!"
# delete clan from sql
await glob.db.execute(
'DELETE FROM clans '
'WHERE id = %s',
[clan.id]
)
# remove all members from the clan,
# reset their clan privs (cache & sql).
# NOTE: only online players need be to be uncached.
for member_id in clan.members:
if member := glob.players.get(id=member_id):
member.clan = None
member.clan_priv = None
if 'full_name' in member.__dict__:
del member.full_name # wipe cached_property
await glob.db.execute(
'UPDATE users '
'SET clan_id = 0, '
'clan_priv = 0 '
'WHERE clan_id = %s',
[clan.id]
)
# remove clan from cache
glob.clans.remove(clan)
# announce clan disbanding
if announce_chan := glob.channels['#announce']:
msg = f'\x01ACTION disbanded {clan!r}.'
announce_chan.send(msg, sender=ctx.player, to_self=True)
return f'{clan!r} disbanded.'
@clan_commands.add(Privileges.Normal, aliases=['i'])
async def clan_info(ctx: Context) -> str:
"""Lookup information of a clan by tag."""
if not ctx.args:
return 'Invalid syntax: !clan info <tag>'
if not (clan := glob.clans.get(tag=' '.join(ctx.args).upper())):
return 'Could not find a clan by that tag.'
msg = [f"{clan!r} | Founded {clan.created_at:%b %d, %Y}."]
# get members privs from sql
res = await glob.db.fetchall(
'SELECT name, clan_priv '
'FROM users '
'WHERE clan_id = %s '
'ORDER BY clan_priv DESC',
[clan.id], _dict=False
)
for member_name, clan_priv in res:
priv_str = ('Member', 'Officer', 'Owner')[clan_priv - 1]
msg.append(f'[{priv_str}] {member_name}')
return '\n'.join(msg)
# TODO: !clan inv, !clan join, !clan leave
@clan_commands.add(Privileges.Normal, aliases=['l'])
async def clan_list(ctx: Context) -> str:
"""List all existing clans information."""
if ctx.args:
if len(ctx.args) != 1 or not ctx.args[0].isdecimal():
return 'Invalid syntax: !clan list (page)'
else:
offset = 25 * int(ctx.args[0])
else:
offset = 0
if offset >= (total_clans := len(glob.clans)):
return 'No clans found.'
msg = [f'gulag clans listing ({total_clans} total).']
for idx, clan in enumerate(glob.clans, offset):
msg.append(f'{idx + 1}. {clan!r}')
return '\n'.join(msg)
async def process_commands(p: Player, t: Messageable,
msg: str) -> Optional[CommandResponse]:
# response is either a CommandResponse if we hit a command,
# or simply False if we don't have any command hits.
start_time = clock_ns()
prefix_len = len(glob.config.command_prefix)
trigger, *args = msg[prefix_len:].strip().split(' ')
# case-insensitive triggers
trigger = trigger.lower()
for cmd_set in command_sets:
# check if any command sets match.
if trigger == cmd_set.trigger:
# matching set found;
if not args:
args = ['help']
if trigger == 'mp':
# multi set is a bit of a special case,
# as we do some additional checks.
if not (m := p.match):
# player not in a match
return
if t is not m.chat:
# message not in match channel
return
if args[0] != 'help' and (p not in m.refs and
not p.priv & Privileges.Tournament):
# doesn't have privs to use !mp commands (allow help).
return
t = m # send match for mp commands instead of chan
trigger, *args = args # get subcommand
# case-insensitive triggers
trigger = trigger.lower()
commands = cmd_set.commands
break
else:
# no set commands matched, check normal commands.
commands = regular_commands
for cmd in commands:
if (
trigger in cmd.triggers and
p.priv & cmd.priv == cmd.priv
):
# found matching trigger with sufficient privs
ctx = Context(player=p, trigger=trigger, args=args)
if isinstance(t, Match):
ctx.match = t
else:
ctx.recipient = t
# command found & we have privileges, run it.
if res := await cmd.callback(ctx):
elapsed = cmyui.utils.magnitude_fmt_time(clock_ns() - start_time)
return {
'resp': f'{res} | Elapsed: {elapsed}',
'hidden': cmd.hidden
}
return {'hidden': False}
| 32.730417
| 182
| 0.597495
|
fa4a994e9123f80a551a9ea97895f95e25326e4d
| 12,805
|
py
|
Python
|
tests/test_create.py
|
fschleich/mhl
|
5ae1f083c05f44a4e9751dc233f7d393b01e2210
|
[
"MIT"
] | null | null | null |
tests/test_create.py
|
fschleich/mhl
|
5ae1f083c05f44a4e9751dc233f7d393b01e2210
|
[
"MIT"
] | null | null | null |
tests/test_create.py
|
fschleich/mhl
|
5ae1f083c05f44a4e9751dc233f7d393b01e2210
|
[
"MIT"
] | null | null | null |
"""
__author__ = "Alexander Sahm"
__copyright__ = "Copyright 2020, Pomfort GmbH"
__license__ = "MIT"
__maintainer__ = "Patrick Renner, Alexander Sahm"
__email__ = "opensource@pomfort.com"
"""
import os
from freezegun import freeze_time
from click.testing import CliRunner
from ascmhl.history import MHLHistory
import ascmhl.commands
scenario_output_path = "examples/scenarios/Output"
fake_ref_path = "/ref"
@freeze_time("2020-01-16 09:15:00")
def test_create_succeed(fs):
fs.create_file("/root/Stuff.txt", contents="stuff\n")
fs.create_file("/root/A/A1.txt", contents="A1\n")
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root", "-h", "xxh64", "-v"])
assert not result.exception
assert os.path.exists("/root/ascmhl/0001_root_2020-01-16_091500.mhl")
# with open('/root/ascmhl/0001_root_2020-01-16_091500.mhl', 'r') as fin:
# print(fin.read())
assert os.path.exists("/root/ascmhl/ascmhl_chain.xml")
@freeze_time("2020-01-16 09:15:00")
def test_create_directory_hashes(fs):
fs.create_file("/root/Stuff.txt", contents="stuff\n")
fs.create_file("/root/A/A1.txt", contents="A1\n")
result = CliRunner().invoke(ascmhl.commands.create, ["/root", "-h", "xxh64", "-v"])
assert result.exit_code == 0
# a directory hash for the folder A was created
hash_list = MHLHistory.load_from_path("/root").hash_lists[0]
assert hash_list.find_media_hash_for_path("A").is_directory
assert hash_list.find_media_hash_for_path("A").hash_entries[0].hash_string == "95e230e90be29dd6"
# and the directory hash of the root folder is set in the header
assert hash_list.process_info.root_media_hash.hash_entries[0].hash_string == "36e824bc313f3b77"
# test that the directory-hash command creates the same directory hashes
# FIXME: command doesn't exist any more, replace with tests of verify directory hashes command?
# result = CliRunner().invoke(ascmhl.commands.directory_hash, ["/root", "-v"])
# assert result.exit_code == 0
# assert "directory hash for: /root/A xxh64: ee2c3b94b6eecb8d" in result.output
# assert "root hash: xxh64: 15ef0ade91fff267" in result.output
# add some more files and folders
fs.create_file("/root/B/B1.txt", contents="B1\n")
fs.create_file("/root/A/A2.txt", contents="A2\n")
fs.create_file("/root/A/AA/AA1.txt", contents="AA1\n")
os.mkdir("/root/emptyFolderA")
os.mkdir("/root/emptyFolderB")
os.mkdir("/root/emptyFolderC")
os.mkdir("/root/emptyFolderC/emptyFolderCA")
os.mkdir("/root/emptyFolderC/emptyFolderCB")
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root", "-v", "-h", "xxh64"])
assert result.exit_code == 0
hash_list = MHLHistory.load_from_path("/root").hash_lists[-1]
# due to the additional content the directory hash of folder A and the root folder changed
assert hash_list.find_media_hash_for_path("A").hash_entries[0].hash_string == "a8d0ad812ab102bd"
assert hash_list.process_info.root_media_hash.hash_entries[0].hash_string == "d6b881fed0b325bd"
# empty folder all have the same directory hash
assert hash_list.find_media_hash_for_path("emptyFolderA").hash_entries[0].hash_string == "ef46db3751d8e999"
assert hash_list.find_media_hash_for_path("emptyFolderB").hash_entries[0].hash_string == "ef46db3751d8e999"
# but since we also contain the file names in the dir hashes an empty folder that contains other empty folders
# has a different directory structure hash
assert (
hash_list.find_media_hash_for_path("emptyFolderC").hash_entries[0].structure_hash_string == "a5e6b8f95dfe2762"
)
# the content hash stays the same
assert hash_list.find_media_hash_for_path("emptyFolderC").hash_entries[0].hash_string == "ef46db3751d8e999"
# test that the directory-hash command creates the same directory hashes
# FIXME: command doesn't exist any more, replace with tests of verify directory hashes command?
# result = CliRunner().invoke(ascmhl.commands.directory_hash, ["/root"])
# assert result.exit_code == 0
# assert " calculated root hash: xxh64: 5f4af3b3fd736415" in result.output
# altering the content of one file
with open("/root/A/A2.txt", "a") as file:
file.write("!!")
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root", "-v", "-h", "xxh64"])
assert "ERROR: hash mismatch for A/A2.txt" in result.output
hash_list = MHLHistory.load_from_path("/root").hash_lists[-1]
# an altered file leads to a different root directory hash
assert hash_list.process_info.root_media_hash.hash_entries[0].hash_string == "cae6659fc7b34c2f"
# structure hash stays the same
assert hash_list.process_info.root_media_hash.hash_entries[0].structure_hash_string == "2c99e94e8fa7d90c"
# test that the directory-hash command creates the same root hash
# FIXME: command doesn't exist any more, replace with tests of verify directory hashes command?
# result = CliRunner().invoke(ascmhl.commands.directory_hash, ["/root"])
# assert result.exit_code == 0
# assert "root hash: xxh64: adf18c910489663c" in result.output
assert hash_list.find_media_hash_for_path("B").hash_entries[0].hash_string == "51fb8fb099e92821"
assert hash_list.find_media_hash_for_path("B").hash_entries[0].structure_hash_string == "945ecf443295ffbd"
assert hash_list.process_info.root_media_hash.hash_entries[0].hash_string == "cae6659fc7b34c2f"
assert hash_list.process_info.root_media_hash.hash_entries[0].structure_hash_string == "2c99e94e8fa7d90c"
# rename one file
os.rename("/root/B/B1.txt", "/root/B/B2.txt")
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root", "-v", "-h", "xxh64"])
assert "ERROR: hash mismatch for A/A2.txt" in result.output
# in addition to the failing verification we also have a missing file B1/B1.txt
assert "missing file(s):\n B/B1.txt" in result.output
hash_list = MHLHistory.load_from_path("/root").hash_lists[-1]
# the file name is part of the structure directory hash of the containing directory so it's hash changes
assert hash_list.find_media_hash_for_path("B").hash_entries[0].structure_hash_string == "fa4e99472911e118"
# .. and the content hash stays the same
assert hash_list.find_media_hash_for_path("B").hash_entries[0].hash_string == "51fb8fb099e92821"
# a renamed file also leads to a different root structure directory hash
assert hash_list.process_info.root_media_hash.hash_entries[0].structure_hash_string == "b758c9b165fb6c2a"
# and an unchanged content hash
assert hash_list.process_info.root_media_hash.hash_entries[0].hash_string == "cae6659fc7b34c2f"
# test that the directory-hash command creates the same root hash
# FIXME: command doesn't exist any more, replace with tests of verify directory hashes command?
# result = CliRunner().invoke(ascmhl.commands.directory_hash, ["/root"])
# assert result.exit_code == 0
# assert "root hash: xxh64: 01441cdf1803e2b8" in result.output
@freeze_time("2020-01-16 09:15:00")
def test_create_no_directory_hashes(fs):
fs.create_file("/root/Stuff.txt", contents="stuff\n")
fs.create_file("/root/A/A1.txt", contents="A1\n")
os.mkdir("/root/emptyFolder")
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root", "-v", "-n"])
assert result.exit_code == 0
# a directory entry without hash was created for the folder A
hash_list = MHLHistory.load_from_path("/root").hash_lists[0]
assert hash_list.find_media_hash_for_path("A").is_directory
assert len(hash_list.find_media_hash_for_path("A").hash_entries) == 0
# and no directory hash of the root folder is set in the header
assert len(hash_list.process_info.root_media_hash.hash_entries) == 0
# the empty folder is still referenced even if not creating directory hashes
assert hash_list.find_media_hash_for_path("emptyFolder").is_directory
# removing an empty folder will cause creating a new generation to fail
os.removedirs("/root/emptyFolder")
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root", "-v", "-n"])
assert result.exit_code == 15
assert "1 missing file(s):\n emptyFolder" in result.output
def test_create_fail_altered_file(fs, simple_mhl_history):
# alter a file
with open("/root/Stuff.txt", "a") as file:
file.write("!!")
result = CliRunner().invoke(ascmhl.commands.create, ["/root"])
assert result.exit_code == 12
assert "Stuff.txt" in result.output
# since the file is still altered every other seal will fail as well since we compare to the original hash
result = CliRunner().invoke(ascmhl.commands.create, ["/root"])
assert result.exit_code == 12
assert "Stuff.txt" in result.output
# when we now choose a new hash format we still fail but will add the new hash in the new format
result = CliRunner().invoke(ascmhl.commands.create, ["/root", "-h", "md5"])
assert result.exit_code == 12
assert "Stuff.txt" in result.output
root_history = MHLHistory.load_from_path("/root")
stuff_txt_latest_media_hash = root_history.hash_lists[-1].find_media_hash_for_path("Stuff.txt")
# the media hash for the Stuff.txt in the latest generation contains the failed xxh64 hash of the altered file
assert stuff_txt_latest_media_hash.hash_entries[0].hash_format == "xxh64"
assert stuff_txt_latest_media_hash.hash_entries[0].hash_string == "2346e97eb08788cc"
assert stuff_txt_latest_media_hash.hash_entries[0].action == "failed"
# and it contains NO new md5 hash value of the altered file
assert len(stuff_txt_latest_media_hash.hash_entries) == 1
# since we didn't add a new md5 hash for the failing file before creating a new generation will still fail for the altered file
result = CliRunner().invoke(ascmhl.commands.create, ["/root", "-h", "md5"])
assert result.exit_code == 12
assert "Stuff.txt" in result.output
def test_create_fail_missing_file(fs, nested_mhl_histories):
"""
test that creating a new generation fails if there is a file missing on the file system that is referenced by one of the histories
"""
root_history = MHLHistory.load_from_path("/root")
paths = root_history.set_of_file_paths()
assert paths == {"/root/B/B1.txt", "/root/B/BB/BB1.txt", "/root/Stuff.txt", "/root/A/AA/AA1.txt"}
os.remove("/root/A/AA/AA1.txt")
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root"])
assert result.exit_code == 15
assert "1 missing file(s):\n A/AA/AA1.txt" in result.output
# the actual seal has been written to disk anyways we expect the history to contain
# the new not yet referenced files (/root/B/BA/BA1.txt and /root/A/AB/AB1.txt) as well now
root_history = MHLHistory.load_from_path("/root")
paths = root_history.set_of_file_paths()
# since we scan all generations for file paths we now get old files, missing files and new files here
# as well as all entries for the directories
assert paths == {
"/root/B/B1.txt",
"/root/B/BA/BA1.txt",
"/root/B",
"/root/A/AA",
"/root/A/AB/AB1.txt",
"/root/B/BA",
"/root/A/AA/AA1.txt",
"/root/A/AB",
"/root/Stuff.txt",
"/root/B/BB",
"/root/A",
"/root/B/BB/BB1.txt",
}
# since the file /root/A/AA/AA1.txt is still missing all further seal attempts will still fail
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root"])
assert result.exit_code == 15
assert "1 missing file(s):\n A/AA/AA1.txt" in result.output
def test_create_nested_new_format(fs, nested_mhl_histories):
"""
test that ensures that hasehs in a new format are also verified in child histories
used to verify fix of bug: https://github.com/ascmitc/mhl/issues/48
"""
runner = CliRunner()
result = runner.invoke(ascmhl.commands.create, ["/root", "-h", "md5"])
assert result.exit_code == 0
# load one of the the nested histories and check the first media hash of the last generation
nested_history = MHLHistory.load_from_path("/root/A/AA")
media_hash = nested_history.hash_lists[-1].media_hashes[0]
# assure that the first hash entry is the verification of the original hash
assert media_hash.hash_entries[0].action == "verified"
assert media_hash.hash_entries[0].hash_format == "xxh64"
# assure that the second hash entry is the new md5 hash
assert media_hash.hash_entries[1].action == "verified" # formerly 'new'
assert media_hash.hash_entries[1].hash_format == "md5"
| 47.425926
| 134
| 0.713081
|
b4906b3427dc03099d4bd0d3667aa733e3e4e636
| 6,818
|
py
|
Python
|
reinforcement_learning/rl_traveling_salesman_vehicle_routing_coach/src/TSP_env.py
|
jpmarques19/tensorflwo-test
|
0ff8b06e0415075c7269820d080284a42595bb2e
|
[
"Apache-2.0"
] | 5
|
2019-01-19T23:53:35.000Z
|
2022-01-29T14:04:31.000Z
|
reinforcement_learning/rl_traveling_salesman_vehicle_routing_coach/src/TSP_env.py
|
jpmarques19/tensorflwo-test
|
0ff8b06e0415075c7269820d080284a42595bb2e
|
[
"Apache-2.0"
] | 6
|
2020-01-28T23:08:49.000Z
|
2022-02-10T00:27:19.000Z
|
reinforcement_learning/rl_traveling_salesman_vehicle_routing_coach/src/TSP_env.py
|
jpmarques19/tensorflwo-test
|
0ff8b06e0415075c7269820d080284a42595bb2e
|
[
"Apache-2.0"
] | 8
|
2020-12-14T15:49:24.000Z
|
2022-03-23T18:38:36.000Z
|
import gym
import numpy as np
from gym import spaces
from TSP_view_2D import TSPView2D
class TSPEasyEnv(gym.Env):
def render(self, mode="human", close=False):
if self.tsp_view is None:
self.tsp_view = TSPView2D(self.n_orders, self.map_quad, grid_size=25)
return self.tsp_view.update(self.agt_at_restaurant, self.restaurant_x, self.restaurant_y, self.o_delivery,
self.o_x, self.o_y, self.agt_x, self.agt_y, mode)
def __init__(self, n_orders=4, map_quad=(2, 2), max_time=50,
randomized_orders=False):
self.tsp_view = None
self.map_quad = map_quad
self.o_y = []
self.o_x = []
self.randomized_orders = randomized_orders
self.n_orders = n_orders
self.restaurant_x = 0
self.restaurant_y = 0
self.agt_x = None
self.agt_y = None
self.o_delivery = []
self.o_time = []
self.agt_at_restaurant = None
self.agt_time = None
self.max_time = max_time
self.map_min_x = - map_quad[0]
self.map_max_x = + map_quad[0]
self.map_min_y = - map_quad[1]
self.map_max_y = + map_quad[1]
# agent x,
agt_x_min = [self.map_min_x]
agt_x_max = [self.map_max_x]
# agent y,
agt_y_min = [self.map_min_y]
agt_y_max = [self.map_max_y]
# n_orders for x positions of orders,
o_x_min = [self.map_min_x for i in range(n_orders)]
o_x_max = [self.map_max_x for i in range(n_orders)]
# n_orders for y positions of orders,
o_y_min = [self.map_min_y for i in range(n_orders)]
o_y_max = [self.map_max_y for i in range(n_orders)]
# whether delivered or not, 0 not delivered, 1 delivered
o_delivery_min = [0] * n_orders
o_delivery_max = [1] * n_orders
# whether agent is at restaurant or not
agt_at_restaurant_max = 1
agt_at_restaurant_min = 0
# Time since orders have been placed
o_time_min = [0] * n_orders
o_time_max = [max_time] * n_orders
# Time since start
agt_time_min = 0
agt_time_max = max_time
self.observation_space = spaces.Box(
low=np.array(
agt_x_min + agt_y_min + o_x_min + o_y_min + [0] + [0] + o_delivery_min + [
agt_at_restaurant_min] + o_time_min + [
agt_time_min] + [0]),
high=np.array(
agt_x_max + agt_y_max + o_x_max + o_y_max + [0] + [0] + o_delivery_max + [
agt_at_restaurant_max] + o_time_max + [
agt_time_max] + [self.max_time]),
dtype=np.int16
)
# Action space, UP, DOWN, LEFT, RIGHT
self.action_space = spaces.Discrete(4)
def reset(self):
self.restaurant_x = 0
self.restaurant_y = 0
self.agt_x = self.restaurant_x
self.agt_y = self.restaurant_y
if self.randomized_orders:
# Enforce uniqueness of orders, to prevent multiple orders being placed on the same points
# And ensure actual orders in the episode are always == n_orders as expected
orders=[]
while len(orders) != self.n_orders:
orders += [self.__receive_order()]
orders = list(set(orders))
else:
orders = [(-2, -2), (1,1), (2,0), (0, -2)]
self.o_x = [x for x, y in orders]
self.o_y = [y for x, y in orders]
self.o_delivery = [0] * self.n_orders
self.o_time = [0] * self.n_orders
self.agt_at_restaurant = 1
self.agt_time = 0
return self.__compute_state()
def step(self, action):
done = False
reward_before_action = self.__compute_reward()
self.__play_action(action)
reward = self.__compute_reward() - reward_before_action
# If agent completed the route and returned back to start, give additional reward
if (np.sum(self.o_delivery) == self.n_orders) and self.agt_at_restaurant:
done = True
reward += self.max_time * 0.1
# If there is timeout, no additional reward
if self.agt_time >= self.max_time:
done = True
info = {}
return self.__compute_state(), reward, done, info
def __play_action(self, action):
if action == 0: # UP
self.agt_y = min(self.map_max_y, self.agt_y + 1)
elif action == 1: # DOWN
self.agt_y = max(self.map_min_y, self.agt_y - 1)
elif action == 2: # LEFT
self.agt_x = max(self.map_min_x, self.agt_x - 1)
elif action == 3: # RIGHT
self.agt_x = min(self.map_max_x, self.agt_x + 1)
else:
raise Exception("action: {action} is invalid")
# Check for deliveries
for ix in range(self.n_orders):
if self.o_delivery[ix] == 0:
if (self.o_x[ix] == self.agt_x) and (self.o_y[ix] == self.agt_y):
self.o_delivery[ix] = 1
# Update the time for the waiting orders
for ix in range(self.n_orders):
if self.o_delivery[ix] == 0:
self.o_time[ix] += 1
# Update time since agent left restaurant
self.agt_time += 1
# Check if agent is at restaurant
self.agt_at_restaurant = int((self.agt_x == self.restaurant_x) and (self.agt_y == self.restaurant_y))
def __compute_state(self):
return [self.agt_x] + [self.agt_y] + self.o_x + self.o_y + [self.restaurant_x] + [
self.restaurant_y] + self.o_delivery + [
self.agt_at_restaurant] + self.o_time + [
self.agt_time] + [(self.max_time - self.agt_time)]
def __receive_order(self):
# Generate a single order, not at the center (where the restaurant is)
self.order_x = \
np.random.choice([i for i in range(self.map_min_x, self.map_max_x + 1) if i != self.restaurant_x], 1)[0]
self.order_y = \
np.random.choice([i for i in range(self.map_min_y, self.map_max_y + 1) if i != self.restaurant_y], 1)[0]
return self.order_x, self.order_y
def __compute_reward(self):
return np.sum(np.asarray(self.o_delivery) * self.max_time / (np.asarray(self.o_time) + 0.0001)) \
- self.agt_time
class TSPMediumEnv(TSPEasyEnv):
def __init__(self, n_orders=4, map_quad=(2, 2), max_time=50, randomized_orders=True):
super().__init__(n_orders, map_quad, max_time, randomized_orders)
class TSPHardEnv(TSPEasyEnv):
def __init__(self, n_orders=10, map_quad=(10, 10), max_time=5000, randomized_orders=True):
super().__init__(n_orders, map_quad, max_time, randomized_orders)
| 35.510417
| 116
| 0.588589
|
25a6bff3eb2f21d4cc8d8685672e3e4d3ec6bda2
| 451
|
py
|
Python
|
nautilus/graphql/query.py
|
LeptoSpira/nautilus-chambers
|
5aafd9eb599ed35d3e90c3ef7b84a25d28e60922
|
[
"MIT"
] | 1
|
2020-05-12T03:01:58.000Z
|
2020-05-12T03:01:58.000Z
|
nautilus/graphql/query.py
|
LeptoFlare/nautilus-chambers
|
5aafd9eb599ed35d3e90c3ef7b84a25d28e60922
|
[
"MIT"
] | 13
|
2020-05-05T01:06:01.000Z
|
2020-07-19T07:17:31.000Z
|
nautilus/graphql/query.py
|
LeptoFlare/nautilus-chambers
|
5aafd9eb599ed35d3e90c3ef7b84a25d28e60922
|
[
"MIT"
] | 1
|
2019-08-16T02:35:17.000Z
|
2019-08-16T02:35:17.000Z
|
"""Contains query resolvers."""
from ariadne import QueryType
from nautilus import utils
query_type = QueryType()
@query_type.field("readProfile")
def resolve_read_profile(*_, discord):
"""query readProfile"""
utils.logger.debug(f"readProfile | discord={discord}")
if not (error := utils.errors.check_for([utils.errors.missing], discord)):
return utils.dbh.find_profile(discord)
return {"status": False, "errors": [error]}
| 28.1875
| 78
| 0.711752
|
70a5578ba6fed6590e22243adcf2db40376f8e99
| 110
|
py
|
Python
|
1065-pares-entre-cinco-numeros.py
|
ErickSimoes/URI-Online-Judge
|
7e6f141db2647b1d0d69951b064bd95b0ce4ba1a
|
[
"MIT"
] | null | null | null |
1065-pares-entre-cinco-numeros.py
|
ErickSimoes/URI-Online-Judge
|
7e6f141db2647b1d0d69951b064bd95b0ce4ba1a
|
[
"MIT"
] | null | null | null |
1065-pares-entre-cinco-numeros.py
|
ErickSimoes/URI-Online-Judge
|
7e6f141db2647b1d0d69951b064bd95b0ce4ba1a
|
[
"MIT"
] | 1
|
2019-10-29T16:51:29.000Z
|
2019-10-29T16:51:29.000Z
|
par = 0
for _ in range(5):
num = int(input())
if num % 2 == 0:
par += 1
print(par, 'valores pares')
| 12.222222
| 27
| 0.527273
|
755448555b6e9730432590407eb688e632243c9c
| 1,204
|
py
|
Python
|
perfkitbenchmarker/linux_packages/iperf3.py
|
Nowasky/PerfKitBenchmarker
|
cfa88e269eb373780910896ed4bdc8db09469753
|
[
"Apache-2.0"
] | 3
|
2018-04-28T13:06:14.000Z
|
2020-06-09T02:39:44.000Z
|
perfkitbenchmarker/linux_packages/iperf3.py
|
Nowasky/PerfKitBenchmarker
|
cfa88e269eb373780910896ed4bdc8db09469753
|
[
"Apache-2.0"
] | 1
|
2021-09-09T07:43:25.000Z
|
2021-09-09T10:47:56.000Z
|
perfkitbenchmarker/linux_packages/iperf3.py
|
Nowasky/PerfKitBenchmarker
|
cfa88e269eb373780910896ed4bdc8db09469753
|
[
"Apache-2.0"
] | 6
|
2019-06-11T18:59:57.000Z
|
2021-03-02T19:14:42.000Z
|
# Copyright 2021 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing iperf3 installation functions."""
def _Install(vm):
"""Install Cloud Harmory iperf benchmark on VM."""
# Follows instructions from https://software.es.net/iperf/obtaining.html.
vm.Install('wget')
vm.Install('build_tools')
vm.InstallPackages('lib32z1')
vm.RemoteCommand(
'wget https://downloads.es.net/pub/iperf/iperf-3.9.tar.gz --no-check-certificate'
)
vm.RemoteCommand('tar -xf iperf-3.9.tar.gz')
vm.RemoteCommand('cd iperf-3.9 && ./configure')
vm.RemoteCommand('cd iperf-3.9 && make')
vm.RemoteCommand('cd iperf-3.9 && sudo make install')
| 38.83871
| 87
| 0.737542
|
7bfed55a579da789231c48e03ad08b1ac860b67c
| 25,763
|
py
|
Python
|
keras/layers/wrappers.py
|
chasebrignac/keras
|
2ad932ba4ea501af7c3163573fce994ef878d8ef
|
[
"MIT"
] | 1
|
2020-03-03T08:56:34.000Z
|
2020-03-03T08:56:34.000Z
|
keras/layers/wrappers.py
|
chasebrignac/keras
|
2ad932ba4ea501af7c3163573fce994ef878d8ef
|
[
"MIT"
] | 10
|
2018-09-27T23:03:18.000Z
|
2018-12-05T23:32:33.000Z
|
keras/layers/wrappers.py
|
chasebrignac/keras
|
2ad932ba4ea501af7c3163573fce994ef878d8ef
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Layers that augment the functionality of a base layer.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from ..engine.base_layer import Layer
from ..engine.base_layer import InputSpec
from ..utils.generic_utils import has_arg
from ..utils.generic_utils import object_list_uid
from .. import backend as K
from . import recurrent
class Wrapper(Layer):
"""Abstract wrapper base class.
Wrappers take another layer and augment it in various ways.
Do not use this class as a layer, it is only an abstract base class.
Two usable wrappers are the `TimeDistributed` and `Bidirectional` wrappers.
# Arguments
layer: The layer to be wrapped.
"""
def __init__(self, layer, **kwargs):
self.layer = layer
# Tracks mapping of Wrapper inputs to inner layer inputs. Useful when
# the inner layer has update ops that depend on its inputs (as opposed
# to the inputs to the Wrapper layer).
self._input_map = {}
super(Wrapper, self).__init__(**kwargs)
def build(self, input_shape=None):
self.built = True
@property
def activity_regularizer(self):
if hasattr(self.layer, 'activity_regularizer'):
return self.layer.activity_regularizer
else:
return None
@property
def trainable(self):
return self.layer.trainable
@trainable.setter
def trainable(self, value):
self.layer.trainable = value
@property
def trainable_weights(self):
return self.layer.trainable_weights
@property
def non_trainable_weights(self):
return self.layer.non_trainable_weights
@property
def updates(self):
if hasattr(self.layer, 'updates'):
return self.layer.updates
return []
def get_updates_for(self, inputs=None):
# If the wrapper modifies the inputs, use the modified inputs to
# get the updates from the inner layer.
inner_inputs = inputs
if inputs is not None:
uid = object_list_uid(inputs)
if uid in self._input_map:
inner_inputs = self._input_map[uid]
updates = self.layer.get_updates_for(inner_inputs)
updates += super(Wrapper, self).get_updates_for(inputs)
return updates
@property
def losses(self):
if hasattr(self.layer, 'losses'):
return self.layer.losses
return []
def get_losses_for(self, inputs=None):
if inputs is None:
losses = self.layer.get_losses_for(None)
return losses + super(Wrapper, self).get_losses_for(None)
return super(Wrapper, self).get_losses_for(inputs)
def get_weights(self):
return self.layer.get_weights()
def set_weights(self, weights):
self.layer.set_weights(weights)
def get_config(self):
config = {'layer': {'class_name': self.layer.__class__.__name__,
'config': self.layer.get_config()}}
base_config = super(Wrapper, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
from . import deserialize as deserialize_layer
layer = deserialize_layer(config.pop('layer'),
custom_objects=custom_objects)
return cls(layer, **config)
class TimeDistributed(Wrapper):
"""This wrapper applies a layer to every temporal slice of an input.
The input should be at least 3D, and the dimension of index one
will be considered to be the temporal dimension.
Consider a batch of 32 samples,
where each sample is a sequence of 10 vectors of 16 dimensions.
The batch input shape of the layer is then `(32, 10, 16)`,
and the `input_shape`, not including the samples dimension, is `(10, 16)`.
You can then use `TimeDistributed` to apply a `Dense` layer
to each of the 10 timesteps, independently:
```python
# as the first layer in a model
model = Sequential()
model.add(TimeDistributed(Dense(8), input_shape=(10, 16)))
# now model.output_shape == (None, 10, 8)
```
The output will then have shape `(32, 10, 8)`.
In subsequent layers, there is no need for the `input_shape`:
```python
model.add(TimeDistributed(Dense(32)))
# now model.output_shape == (None, 10, 32)
```
The output will then have shape `(32, 10, 32)`.
`TimeDistributed` can be used with arbitrary layers, not just `Dense`,
for instance with a `Conv2D` layer:
```python
model = Sequential()
model.add(TimeDistributed(Conv2D(64, (3, 3)),
input_shape=(10, 299, 299, 3)))
```
# Arguments
layer: a layer instance.
"""
def __init__(self, layer, **kwargs):
super(TimeDistributed, self).__init__(layer, **kwargs)
self.supports_masking = True
def _get_shape_tuple(self, init_tuple, tensor, start_idx, int_shape=None):
"""Finds non-specific dimensions in the static shapes
and replaces them by the corresponding dynamic shapes of the tensor.
# Arguments
init_tuple: a tuple, the first part of the output shape
tensor: the tensor from which to get the (static and dynamic) shapes
as the last part of the output shape
start_idx: int, which indicate the first dimension to take from
the static shape of the tensor
int_shape: an alternative static shape to take as the last part
of the output shape
# Returns
The new int_shape with the first part from init_tuple
and the last part from either `int_shape` (if provided)
or K.int_shape(tensor), where every `None` is replaced by
the corresponding dimension from K.shape(tensor)
"""
# replace all None in int_shape by K.shape
if int_shape is None:
int_shape = K.int_shape(tensor)[start_idx:]
if not any(not s for s in int_shape):
return init_tuple + int_shape
tensor_shape = K.shape(tensor)
int_shape = list(int_shape)
for i, s in enumerate(int_shape):
if not s:
int_shape[i] = tensor_shape[start_idx + i]
return init_tuple + tuple(int_shape)
def build(self, input_shape):
assert len(input_shape) >= 3
self.input_spec = InputSpec(shape=input_shape)
child_input_shape = (input_shape[0],) + input_shape[2:]
if not self.layer.built:
self.layer.build(child_input_shape)
self.layer.built = True
super(TimeDistributed, self).build()
def compute_output_shape(self, input_shape):
child_input_shape = (input_shape[0],) + input_shape[2:]
child_output_shape = self.layer.compute_output_shape(child_input_shape)
timesteps = input_shape[1]
return (child_output_shape[0], timesteps) + child_output_shape[1:]
def call(self, inputs, training=None, mask=None):
kwargs = {}
if has_arg(self.layer.call, 'training'):
kwargs['training'] = training
uses_learning_phase = False
input_shape = K.int_shape(inputs)
if input_shape[0]:
# batch size matters, use rnn-based implementation
def step(x, _):
global uses_learning_phase
output = self.layer.call(x, **kwargs)
if hasattr(output, '_uses_learning_phase'):
uses_learning_phase = (output._uses_learning_phase or
uses_learning_phase)
return output, []
_, outputs, _ = K.rnn(step, inputs,
initial_states=[],
input_length=input_shape[1],
unroll=False)
y = outputs
else:
# No batch size specified, therefore the layer will be able
# to process batches of any size.
# We can go with reshape-based implementation for performance.
input_length = input_shape[1]
if not input_length:
input_length = K.shape(inputs)[1]
inner_input_shape = self._get_shape_tuple((-1,), inputs, 2)
# Shape: (num_samples * timesteps, ...). And track the
# transformation in self._input_map.
input_uid = object_list_uid(inputs)
inputs = K.reshape(inputs, inner_input_shape)
self._input_map[input_uid] = inputs
# (num_samples * timesteps, ...)
if has_arg(self.layer.call, 'mask') and mask is not None:
inner_mask_shape = self._get_shape_tuple((-1,), mask, 2)
kwargs['mask'] = K.reshape(mask, inner_mask_shape)
y = self.layer.call(inputs, **kwargs)
if hasattr(y, '_uses_learning_phase'):
uses_learning_phase = y._uses_learning_phase
# Shape: (num_samples, timesteps, ...)
output_shape = self.compute_output_shape(input_shape)
output_shape = self._get_shape_tuple(
(-1, input_length), y, 1, output_shape[2:])
y = K.reshape(y, output_shape)
# Apply activity regularizer if any:
if (hasattr(self.layer, 'activity_regularizer') and
self.layer.activity_regularizer is not None):
regularization_loss = self.layer.activity_regularizer(y)
self.add_loss(regularization_loss, inputs)
if uses_learning_phase:
y._uses_learning_phase = True
return y
def compute_mask(self, inputs, mask=None):
"""Computes an output mask tensor for Embedding layer
based on the inputs, mask, and the inner layer.
If batch size is specified:
Simply return the input `mask`. (An rnn-based implementation with
more than one rnn inputs is required but not supported in Keras yet.)
Otherwise we call `compute_mask` of the inner layer at each time step.
If the output mask at each time step is not `None`:
(E.g., inner layer is Masking or RNN)
Concatenate all of them and return the concatenation.
If the output mask at each time step is `None` and
the input mask is not `None`:
(E.g., inner layer is Dense)
Reduce the input_mask to 2 dimensions and return it.
Otherwise (both the output mask and the input mask are `None`):
(E.g., `mask` is not used at all)
Return `None`.
# Arguments
inputs: Tensor
mask: Tensor
# Returns
None or a tensor
"""
# cases need to call the layer.compute_mask when input_mask is None:
# Masking layer and Embedding layer with mask_zero
input_shape = K.int_shape(inputs)
if input_shape[0]:
# batch size matters, we currently do not handle mask explicitly
return mask
inner_mask = mask
if inner_mask is not None:
inner_mask_shape = self._get_shape_tuple((-1,), mask, 2)
inner_mask = K.reshape(inner_mask, inner_mask_shape)
input_uid = object_list_uid(inputs)
inner_inputs = self._input_map[input_uid]
output_mask = self.layer.compute_mask(inner_inputs, inner_mask)
if output_mask is None:
if mask is None:
return None
# input_mask is not None, and output_mask is None:
# we should return a not-None mask
output_mask = mask
for _ in range(2, len(K.int_shape(mask))):
output_mask = K.any(output_mask, axis=-1)
else:
# output_mask is not None. We need to reshape it
input_length = input_shape[1]
if not input_length:
input_length = K.shape(inputs)[1]
output_mask_int_shape = K.int_shape(output_mask)
if output_mask_int_shape is None:
# if the output_mask does not have a static shape,
# its shape must be the same as mask's
if mask is not None:
output_mask_int_shape = K.int_shape(mask)
else:
output_mask_int_shape = K.compute_output_shape(input_shape)[:-1]
output_mask_shape = self._get_shape_tuple(
(-1, input_length), output_mask, 1, output_mask_int_shape[1:])
output_mask = K.reshape(output_mask, output_mask_shape)
return output_mask
class Bidirectional(Wrapper):
"""Bidirectional wrapper for RNNs.
# Arguments
layer: `Recurrent` instance.
merge_mode: Mode by which outputs of the
forward and backward RNNs will be combined.
One of {'sum', 'mul', 'concat', 'ave', None}.
If None, the outputs will not be combined,
they will be returned as a list.
# Raises
ValueError: In case of invalid `merge_mode` argument.
# Examples
```python
model = Sequential()
model.add(Bidirectional(LSTM(10, return_sequences=True),
input_shape=(5, 10)))
model.add(Bidirectional(LSTM(10)))
model.add(Dense(5))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
```
"""
def __init__(self, layer, merge_mode='concat', weights=None, **kwargs):
if merge_mode not in ['sum', 'mul', 'ave', 'concat', None]:
raise ValueError('Invalid merge mode. '
'Merge mode should be one of '
'{"sum", "mul", "ave", "concat", None}')
self.forward_layer = copy.copy(layer)
config = layer.get_config()
config['go_backwards'] = not config['go_backwards']
self.backward_layer = layer.__class__.from_config(config)
self.forward_layer.name = 'forward_' + self.forward_layer.name
self.backward_layer.name = 'backward_' + self.backward_layer.name
self.merge_mode = merge_mode
if weights:
nw = len(weights)
self.forward_layer.initial_weights = weights[:nw // 2]
self.backward_layer.initial_weights = weights[nw // 2:]
self.stateful = layer.stateful
self.return_sequences = layer.return_sequences
self.return_state = layer.return_state
self.supports_masking = True
self._trainable = True
super(Bidirectional, self).__init__(layer, **kwargs)
self.input_spec = layer.input_spec
self._num_constants = None
@property
def trainable(self):
return self._trainable
@trainable.setter
def trainable(self, value):
self._trainable = value
self.forward_layer.trainable = value
self.backward_layer.trainable = value
def get_weights(self):
return self.forward_layer.get_weights() + self.backward_layer.get_weights()
def set_weights(self, weights):
nw = len(weights)
self.forward_layer.set_weights(weights[:nw // 2])
self.backward_layer.set_weights(weights[nw // 2:])
def compute_output_shape(self, input_shape):
output_shape = self.forward_layer.compute_output_shape(input_shape)
if self.return_state:
state_shape = output_shape[1:]
output_shape = output_shape[0]
if self.merge_mode == 'concat':
output_shape = list(output_shape)
output_shape[-1] *= 2
output_shape = tuple(output_shape)
elif self.merge_mode is None:
output_shape = [output_shape, copy.copy(output_shape)]
if self.return_state:
if self.merge_mode is None:
return output_shape + state_shape + copy.copy(state_shape)
return [output_shape] + state_shape + copy.copy(state_shape)
return output_shape
def __call__(self, inputs, initial_state=None, constants=None, **kwargs):
inputs, initial_state, constants = recurrent._standardize_args(
inputs, initial_state, constants, self._num_constants)
if initial_state is None and constants is None:
return super(Bidirectional, self).__call__(inputs, **kwargs)
# Applies the same workaround as in `RNN.__call__`
additional_inputs = []
additional_specs = []
if initial_state is not None:
# Check if `initial_state` can be splitted into half
num_states = len(initial_state)
if num_states % 2 > 0:
raise ValueError(
'When passing `initial_state` to a Bidirectional RNN, '
'the state should be a list containing the states of '
'the underlying RNNs. '
'Found: ' + str(initial_state))
kwargs['initial_state'] = initial_state
additional_inputs += initial_state
state_specs = [InputSpec(shape=K.int_shape(state))
for state in initial_state]
self.forward_layer.state_spec = state_specs[:num_states // 2]
self.backward_layer.state_spec = state_specs[num_states // 2:]
additional_specs += state_specs
if constants is not None:
kwargs['constants'] = constants
additional_inputs += constants
constants_spec = [InputSpec(shape=K.int_shape(constant))
for constant in constants]
self.forward_layer.constants_spec = constants_spec
self.backward_layer.constants_spec = constants_spec
additional_specs += constants_spec
self._num_constants = len(constants)
self.forward_layer._num_constants = self._num_constants
self.backward_layer._num_constants = self._num_constants
is_keras_tensor = K.is_keras_tensor(additional_inputs[0])
for tensor in additional_inputs:
if K.is_keras_tensor(tensor) != is_keras_tensor:
raise ValueError('The initial state of a Bidirectional'
' layer cannot be specified with a mix of'
' Keras tensors and non-Keras tensors'
' (a "Keras tensor" is a tensor that was'
' returned by a Keras layer, or by `Input`)')
if is_keras_tensor:
# Compute the full input spec, including state
full_input = [inputs] + additional_inputs
full_input_spec = self.input_spec + additional_specs
# Perform the call with temporarily replaced input_spec
original_input_spec = self.input_spec
self.input_spec = full_input_spec
output = super(Bidirectional, self).__call__(full_input, **kwargs)
self.input_spec = original_input_spec
return output
else:
return super(Bidirectional, self).__call__(inputs, **kwargs)
def call(self,
inputs,
mask=None,
training=None,
initial_state=None,
constants=None):
kwargs = {}
if has_arg(self.layer.call, 'training'):
kwargs['training'] = training
if has_arg(self.layer.call, 'mask'):
kwargs['mask'] = mask
if has_arg(self.layer.call, 'constants'):
kwargs['constants'] = constants
if initial_state is not None and has_arg(self.layer.call, 'initial_state'):
forward_inputs = [inputs[0]]
backward_inputs = [inputs[0]]
pivot = len(initial_state) // 2 + 1
# add forward initial state
forward_state = inputs[1:pivot]
forward_inputs += forward_state
if self._num_constants is None:
# add backward initial state
backward_state = inputs[pivot:]
backward_inputs += backward_state
else:
# add backward initial state
backward_state = inputs[pivot:-self._num_constants]
backward_inputs += backward_state
# add constants for forward and backward layers
forward_inputs += inputs[-self._num_constants:]
backward_inputs += inputs[-self._num_constants:]
y = self.forward_layer.call(forward_inputs,
initial_state=forward_state, **kwargs)
y_rev = self.backward_layer.call(backward_inputs,
initial_state=backward_state, **kwargs)
else:
y = self.forward_layer.call(inputs, **kwargs)
y_rev = self.backward_layer.call(inputs, **kwargs)
if self.return_state:
states = y[1:] + y_rev[1:]
y = y[0]
y_rev = y_rev[0]
if self.return_sequences:
y_rev = K.reverse(y_rev, 1)
if self.merge_mode == 'concat':
output = K.concatenate([y, y_rev])
elif self.merge_mode == 'sum':
output = y + y_rev
elif self.merge_mode == 'ave':
output = (y + y_rev) / 2
elif self.merge_mode == 'mul':
output = y * y_rev
elif self.merge_mode is None:
output = [y, y_rev]
else:
raise ValueError('Unrecognized value for argument merge_mode: %s' % (self.merge_mode))
# Properly set learning phase
if (getattr(y, '_uses_learning_phase', False) or
getattr(y_rev, '_uses_learning_phase', False)):
if self.merge_mode is None:
for out in output:
out._uses_learning_phase = True
else:
output._uses_learning_phase = True
if self.return_state:
if self.merge_mode is None:
return output + states
return [output] + states
return output
def reset_states(self):
self.forward_layer.reset_states()
self.backward_layer.reset_states()
def build(self, input_shape):
with K.name_scope(self.forward_layer.name):
self.forward_layer.build(input_shape)
with K.name_scope(self.backward_layer.name):
self.backward_layer.build(input_shape)
self.built = True
def compute_mask(self, inputs, mask):
if isinstance(mask, list):
mask = mask[0]
if self.return_sequences:
if not self.merge_mode:
output_mask = [mask, mask]
else:
output_mask = mask
else:
output_mask = [None, None] if not self.merge_mode else None
if self.return_state:
states = self.forward_layer.states
state_mask = [None for _ in states]
if isinstance(output_mask, list):
return output_mask + state_mask * 2
return [output_mask] + state_mask * 2
return output_mask
@property
def trainable_weights(self):
if hasattr(self.forward_layer, 'trainable_weights'):
return (self.forward_layer.trainable_weights +
self.backward_layer.trainable_weights)
return []
@property
def non_trainable_weights(self):
if hasattr(self.forward_layer, 'non_trainable_weights'):
return (self.forward_layer.non_trainable_weights +
self.backward_layer.non_trainable_weights)
return []
@property
def updates(self):
if hasattr(self.forward_layer, 'updates'):
return self.forward_layer.updates + self.backward_layer.updates
return []
def get_updates_for(self, inputs=None):
forward_updates = self.forward_layer.get_updates_for(inputs)
backward_updates = self.backward_layer.get_updates_for(inputs)
return (super(Wrapper, self).get_updates_for(inputs) +
forward_updates + backward_updates)
@property
def losses(self):
if hasattr(self.forward_layer, 'losses'):
return self.forward_layer.losses + self.backward_layer.losses
return []
def get_losses_for(self, inputs=None):
forward_losses = self.forward_layer.get_losses_for(inputs)
backward_losses = self.backward_layer.get_losses_for(inputs)
return (super(Wrapper, self).get_losses_for(inputs) +
forward_losses + backward_losses)
@property
def constraints(self):
constraints = {}
if hasattr(self.forward_layer, 'constraints'):
constraints.update(self.forward_layer.constraints)
constraints.update(self.backward_layer.constraints)
return constraints
def get_config(self):
config = {'merge_mode': self.merge_mode}
if self._num_constants is not None:
config['num_constants'] = self._num_constants
base_config = super(Bidirectional, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
from . import deserialize as deserialize_layer
rnn_layer = deserialize_layer(config.pop('layer'),
custom_objects=custom_objects)
num_constants = config.pop('num_constants', None)
layer = cls(rnn_layer, **config)
layer._num_constants = num_constants
return layer
| 39.392966
| 98
| 0.608237
|
7d2eb81d14d6f696ce047ce29601b12652e9b220
| 286
|
py
|
Python
|
test/lex_doc1.py
|
pyarnold/ply
|
98bb0e095d72c8aed9de01c15b65fa096c745ce3
|
[
"Unlicense"
] | 1
|
2020-12-18T01:07:42.000Z
|
2020-12-18T01:07:42.000Z
|
test/lex_doc1.py
|
pyarnold/ply
|
98bb0e095d72c8aed9de01c15b65fa096c745ce3
|
[
"Unlicense"
] | null | null | null |
test/lex_doc1.py
|
pyarnold/ply
|
98bb0e095d72c8aed9de01c15b65fa096c745ce3
|
[
"Unlicense"
] | null | null | null |
# lex_doc1.py
#
# Missing documentation string
import sys
if ".." not in sys.path:
sys.path.insert(0, "..")
import ply.lex as lex
tokens = [
"PLUS",
"MINUS",
"NUMBER",
]
t_PLUS = r'\+'
t_MINUS = r'-'
def t_NUMBER(t):
pass
def t_error(t):
pass
lex.lex()
| 9.862069
| 30
| 0.56993
|
844419c97ccd0ed050ed5004ca38582cf0766427
| 2,779
|
py
|
Python
|
samples/basic/executor/models/ietf/ietf-netconf-monitoring/nc-execute-ietf-netconf-monitoring-20-ydk.py
|
deom119/ydk-py-samples
|
1ad6cc2b798f358ff835df93d12924df308b85fc
|
[
"Apache-2.0"
] | 104
|
2016-03-15T17:04:01.000Z
|
2021-12-31T06:09:35.000Z
|
samples/basic/executor/models/ietf/ietf-netconf-monitoring/nc-execute-ietf-netconf-monitoring-20-ydk.py
|
https-maxus-github-com/ydk-py-samples
|
1ad6cc2b798f358ff835df93d12924df308b85fc
|
[
"Apache-2.0"
] | 15
|
2016-03-15T23:09:47.000Z
|
2020-08-13T12:13:18.000Z
|
samples/basic/executor/models/ietf/ietf-netconf-monitoring/nc-execute-ietf-netconf-monitoring-20-ydk.py
|
https-maxus-github-com/ydk-py-samples
|
1ad6cc2b798f358ff835df93d12924df308b85fc
|
[
"Apache-2.0"
] | 87
|
2016-04-15T16:59:23.000Z
|
2021-09-18T18:05:47.000Z
|
#!/usr/bin/env python
#
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Execute RPC for model ietf-netconf-monitoring.
usage: nc-execute-ietf-netconf-monitoring-20-ydk.py [-h] [-v] device
positional arguments:
device NETCONF device (ssh://user:password@host:port)
optional arguments:
-h, --help show this help message and exit
-v, --verbose print debugging messages
"""
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.services import ExecutorService
from ydk.providers import NetconfServiceProvider
from ydk.models.ietf import ietf_netconf_monitoring \
as ietf_netconf_monitoring
import logging
def prepare_get_schema(get_schema):
"""Add RPC input data to get_schema object."""
get_schema.input.identifier = "openconfig-bgp"
if __name__ == "__main__":
"""Execute main program."""
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="NETCONF device (ssh://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
# log debug messages if verbose argument specified
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
# create NETCONF provider
provider = NetconfServiceProvider(address=device.hostname,
port=device.port,
username=device.username,
password=device.password,
protocol=device.scheme)
# create executor service
executor = ExecutorService()
get_schema = ietf_netconf_monitoring.GetSchema() # create object
prepare_get_schema(get_schema) # add RPC input
# execute RPC on NETCONF device
print(executor.execute_rpc(provider, get_schema))
exit()
# End of script
| 33.481928
| 78
| 0.666067
|
d061354971c3542ad5859e145a366e0886c3430a
| 21,752
|
py
|
Python
|
dabl/plot/utils.py
|
j1c/dabl
|
c70fef7ca276a9d2378d84183b1df2d56008b187
|
[
"BSD-3-Clause"
] | null | null | null |
dabl/plot/utils.py
|
j1c/dabl
|
c70fef7ca276a9d2378d84183b1df2d56008b187
|
[
"BSD-3-Clause"
] | null | null | null |
dabl/plot/utils.py
|
j1c/dabl
|
c70fef7ca276a9d2378d84183b1df2d56008b187
|
[
"BSD-3-Clause"
] | null | null | null |
from warnings import warn
from functools import reduce
import itertools
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
from seaborn.utils import despine
# from sklearn.dummy import DummyClassifier
# from sklearn.metrics import recall_score
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import roc_curve
from sklearn.model_selection import cross_val_score, StratifiedShuffleSplit
from ..preprocessing import detect_types
def find_pretty_grid(n_plots, max_cols=5):
"""Determine a good grid shape for subplots.
Tries to find a way to arange n_plots many subplots on a grid in a way
that fills as many grid-cells as possible, while keeping the number
of rows low and the number of columns below max_cols.
Parameters
----------
n_plots : int
Number of plots to arrange.
max_cols : int, default=5
Maximum number of columns.
Returns
-------
n_rows : int
Number of rows in grid.
n_cols : int
Number of columns in grid.
Examples
--------
>>> find_pretty_grid(16, 5)
(4, 4)
>>> find_pretty_grid(11, 5)
(3, 4)
>>> find_pretty_grid(10, 5)
(2, 5)
"""
# we could probably do something with prime numbers here
# but looks like that becomes a combinatorial problem again?
if n_plots % max_cols == 0:
# perfect fit!
# if max_cols is 6 do we prefer 6x1 over 3x2?
return int(n_plots / max_cols), max_cols
# min number of rows needed
min_rows = int(np.ceil(n_plots / max_cols))
best_empty = max_cols
best_cols = max_cols
for cols in range(max_cols, min_rows - 1, -1):
# we only allow getting narrower if we have more cols than rows
remainder = (n_plots % cols)
empty = cols - remainder if remainder != 0 else 0
if empty == 0:
return int(n_plots / cols), cols
if empty < best_empty:
best_empty = empty
best_cols = cols
return int(np.ceil(n_plots / best_cols)), best_cols
def plot_coefficients(coefficients, feature_names, n_top_features=10,
classname=None, ax=None):
"""Visualize coefficients of a linear model.
Parameters
----------
coefficients : nd-array, shape (n_features,)
Model coefficients.
feature_names : list or nd-array of strings, shape (n_features,)
Feature names for labeling the coefficients.
n_top_features : int, default=10
How many features to show. The function will show the largest (most
positive) and smallest (most negative) n_top_features coefficients,
for a total of 2 * n_top_features coefficients.
"""
coefficients = coefficients.squeeze()
feature_names = np.asarray(feature_names)
if coefficients.ndim > 1:
# this is not a row or column vector
raise ValueError("coefficients must be 1d array or column vector, got"
" shape {}".format(coefficients.shape))
coefficients = coefficients.ravel()
if len(coefficients) != len(feature_names):
raise ValueError("Number of coefficients {} doesn't match number of"
"feature names {}.".format(len(coefficients),
len(feature_names)))
# get coefficients with large absolute values
coef = coefficients.ravel()
mask = coef != 0
coef = coef[mask]
feature_names = feature_names[mask]
# FIXME this could be easier with pandas by sorting by a column
interesting_coefficients = np.argsort(np.abs(coef))[-n_top_features:]
new_inds = np.argsort(coef[interesting_coefficients])
interesting_coefficients = interesting_coefficients[new_inds]
# plot them
if ax is None:
plt.figure(figsize=(len(interesting_coefficients), 5))
ax = plt.gca()
colors = ['red' if c < 0 else 'blue'
for c in coef[interesting_coefficients]]
ax.bar(np.arange(len(interesting_coefficients)),
coef[interesting_coefficients],
color=colors)
feature_names = np.array(feature_names)
ax.set_xticks(np.arange(0, len(interesting_coefficients)))
ax.set_xticklabels(feature_names[interesting_coefficients], rotation=60,
ha="right")
ax.set_ylabel("Coefficient magnitude")
ax.set_xlabel("Feature")
ax.set_title(classname)
return feature_names[interesting_coefficients]
def heatmap(values, xlabel, ylabel, xticklabels, yticklabels, cmap=None,
vmin=None, vmax=None, ax=None, fmt="%0.2f", origin='lower'):
if ax is None:
ax = plt.gca()
img = ax.pcolor(values, cmap=cmap, vmin=vmin, vmax=vmax)
img.update_scalarmappable()
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_xticks(np.arange(len(xticklabels)) + .5)
ax.set_yticks(np.arange(len(yticklabels)) + .5)
ax.set_xticklabels(xticklabels)
ax.set_yticklabels(yticklabels)
ax.set_aspect(1)
if origin == 'upper':
ylim = ax.get_ylim()
ax.set_ylim(ylim[::-1])
for p, color, value in zip(img.get_paths(), img.get_facecolors(),
img.get_array()):
x, y = p.vertices[:-2, :].mean(0)
if np.mean(color[:3]) > 0.5:
c = 'k'
else:
c = 'w'
ax.text(x, y, fmt % value, color=c, ha="center", va="center")
return img
def _shortname(some_string, maxlen=20):
"""Shorten a string given a maximum length.
Longer strings will be shortened and the rest replaced by ...
Parameters
----------
some_string : string
Input string to shorten
maxlen : int, default=20
Returns
-------
return_string : string
Output string of size ``min(len(some_string), maxlen)``.
"""
some_string = str(some_string)
if len(some_string) > maxlen:
return some_string[:maxlen - 3] + "..."
else:
return some_string
def mosaic_plot(data, rows, cols, vary_lightness=False, ax=None):
"""Create a mosaic plot from a dataframe.
Right now only horizontal mosaic plots are supported,
i.e. rows are prioritized over columns.
Parameters
----------
data : pandas data frame
Data to tabulate.
rows : column specifier
Column in data to tabulate across rows.
cols : column specifier
Column in data to use to subpartition rows.
vary_lightness : bool, default=False
Whether to vary lightness across categories.
ax : matplotlib axes or None
Axes to plot into.
"""
cont = pd.crosstab(data[cols], data[rows])
sort = np.argsort((cont / cont.sum()).iloc[0])
cont = cont.iloc[:, sort]
if ax is None:
ax = plt.gca()
pos_y = 0
positions_y = []
n_cols = cont.shape[1]
for i, col in enumerate(cont.columns):
height = cont[col].sum()
positions_y.append(pos_y + height / 2)
pos_x = 0
for j, row in enumerate(cont[col]):
width = row / height
color = plt.cm.tab10(j)
if vary_lightness:
color = _lighten_color(color, (i + 1) / (n_cols + 1))
rect = Rectangle((pos_x, pos_y), width, height, edgecolor='k',
facecolor=color)
pos_x += width
ax.add_patch(rect)
pos_y += height
ax.set_ylim(0, pos_y)
ax.set_yticks(positions_y)
ax.set_yticklabels(cont.columns)
def _lighten_color(color, amount=0.5):
"""
Lightens the given color by multiplying (1-luminosity) by the given amount.
Input can be matplotlib color string, hex string, or RGB tuple.
https://stackoverflow.com/questions/37765197/darken-or-lighten-a-color-in-matplotlib
Examples:
>> lighten_color('g', 0.3)
>> lighten_color('#F034A3', 0.6)
>> lighten_color((.3,.55,.1), 0.5)
"""
import matplotlib.colors as mc
import colorsys
c = color
amount += 0.5
c = colorsys.rgb_to_hls(*mc.to_rgb(c))
return colorsys.hls_to_rgb(c[0], 1 - amount * (1 - c[1]), c[2])
def _get_n_top(features, name):
if features.shape[1] > 20:
print("Showing only top 10 of {} {} features".format(
features.shape[1], name))
# too many features, show just top 10
show_top = 10
else:
show_top = features.shape[1]
return show_top
def _prune_categories(series, max_categories=10):
series = series.astype('category')
small_categories = series.value_counts()[max_categories:].index
res = series.cat.remove_categories(small_categories)
res = res.cat.add_categories(['dabl_other']).fillna("dabl_other")
return res
def _prune_category_make_X(X, col, target_col, max_categories=20):
col_values = X[col]
if col_values.nunique() > max_categories:
# keep only top 10 categories if there are more than 20
col_values = _prune_categories(col_values,
max_categories=min(10, max_categories))
X_new = X[[target_col]].copy()
X_new[col] = col_values
else:
X_new = X.copy()
X_new[col] = X_new[col].astype('category')
return X_new
def _fill_missing_categorical(X):
# fill in missing values in categorical variables with new category
# ensure we use strings for object columns and number for integers
X = X.copy()
max_value = X.max(numeric_only=True).max()
for col in X.columns:
if X[col].dtype == 'object':
X[col].fillna("dabl_missing", inplace=True)
else:
X[col].fillna(max_value + 1, inplace=True)
return X
def _make_subplots(n_plots, max_cols=5, row_height=3):
"""Create a harmonious subplot grid.
"""
n_rows, n_cols = find_pretty_grid(n_plots, max_cols=max_cols)
fig, axes = plt.subplots(n_rows, n_cols,
figsize=(4 * n_cols, row_height * n_rows),
constrained_layout=True)
# we don't want ravel to fail, this is awkward!
axes = np.atleast_2d(axes)
return fig, axes
def _check_X_target_col(X, target_col, types=None, type_hints=None, task=None):
if types is None:
types = detect_types(X, type_hints=type_hints)
if (not isinstance(target_col, str) and hasattr(target_col, '__len__') and
len(target_col) > 1):
raise ValueError("target_col should be a column in X, "
"got {}".format(target_col))
if target_col not in X.columns:
raise ValueError("{} is not a valid column of X".format(target_col))
if X[target_col].nunique() < 2:
raise ValueError("Less than two classes present, {}, need at least two"
" for classification.".format(X.loc[0, target_col]))
# FIXME we get target types here with detect_types,
# but in the estimator with type_of_target
if task == "classification" and not types.loc[target_col, 'categorical']:
raise ValueError("Type for target column {} detected as {},"
" need categorical for classification.".format(
target_col, types.T.idxmax()[target_col]))
if task == "regression" and (not types.loc[target_col, 'continuous']):
raise ValueError("Type for target column {} detected as {},"
" need continuous for regression.".format(
target_col, types.T.idxmax()[target_col]))
return types
def _short_tick_names(ax):
ax.set_yticklabels([_shortname(t.get_text(), maxlen=10)
for t in ax.get_yticklabels()])
ax.set_xlabel(_shortname(ax.get_xlabel(), maxlen=20))
ax.set_ylabel(_shortname(ax.get_ylabel(), maxlen=20))
def _find_scatter_plots_classification(X, target, how_many=3,
random_state=None):
# input is continuous
# look at all pairs of features, find most promising ones
# dummy = DummyClassifier(strategy='prior').fit(X, target)
# baseline_score = recall_score(target, dummy.predict(X), average='macro')
scores = []
# converting to int here might save some time
_, target = np.unique(target, return_inverse=True)
# limit to 2000 training points for speed?
train_size = min(2000, int(.9 * X.shape[0]))
cv = StratifiedShuffleSplit(n_splits=3, train_size=train_size,
random_state=random_state)
for i, j in itertools.combinations(np.arange(X.shape[1]), 2):
this_X = X[:, [i, j]]
# assume this tree is simple enough so not be able to overfit in 2d
# so we don't bother with train/test split
tree = DecisionTreeClassifier(max_leaf_nodes=8)
scores.append((i, j, np.mean(cross_val_score(
tree, this_X, target, cv=cv, scoring='recall_macro'))))
scores = pd.DataFrame(scores, columns=['feature0', 'feature1', 'score'])
top = scores.sort_values(by='score').iloc[-how_many:][::-1]
return top
def discrete_scatter(x, y, c, unique_c=None, legend='first',
clip_outliers=True,
alpha='auto', s='auto', ax=None, **kwargs):
"""Scatter plot for categories.
Creates a scatter plot for x and y grouped by c.
Parameters
----------
x : array-like
x coordinates to scatter
y : array-like
y coordinates to scatter
c : array-like
Grouping of samples (similar to hue in seaborn)
legend : bool, or "first", default="first"
Whether to create a legend. "first" mean only the
first one in a given gridspec.
scatter_alpha : float, default='auto'
Alpha values for scatter plots. 'auto' is dirty hacks.
scatter_size : float, default='auto'.
Marker size for scatter plots. 'auto' is dirty hacks.
ax : matplotlib axes, default=None
Axes to plot into
kwargs :
Passed through to plt.scatter
"""
alpha = _get_scatter_alpha(alpha, x)
s = _get_scatter_size(s, x)
if ax is None:
ax = plt.gca()
if legend == "first":
legend = (ax.get_geometry()[2] == 1)
if unique_c is None:
unique_c = np.unique(c)
for i in unique_c:
mask = c == i
ax.scatter(x[mask], y[mask], label=i, s=s, alpha=alpha, **kwargs)
if clip_outliers:
x_low, x_high = _inlier_range(x)
y_low, y_high = _inlier_range(y)
xlims = ax.get_xlim()
ylims = ax.get_ylim()
ax.set_xlim(max(x_low, xlims[0]), min(x_high, xlims[1]))
ax.set_ylim(max(y_low, ylims[0]), min(y_high, ylims[1]))
if legend:
props = {}
if len(unique_c) > 15:
props['size'] = 6
legend = ax.legend(prop=props)
for handle in legend.legendHandles:
handle.set_alpha(1)
handle.set_sizes((100,))
def class_hists(data, column, target, bins="auto", ax=None, legend=False,
scale_separately=True):
"""Grouped univariate histograms.
Parameters
----------
data : pandas DataFrame
Input data to plot
column : column specifier
Column in the data to compute histograms over (must be continuous).
target : column specifier
Target column in data, must be categorical.
bins : string, int or array-like
Number of bins, 'auto' or bin edges. Passed to np.histogram_bin_edges.
We always show at least 5 bins for now.
ax : matplotlib axes
Axes to plot into
legend : boolean, default=False
Whether to create a legend.
scale_separately : boolean, default=True
Whether to scale each class separately.
Examples
--------
>>> from dabl.datasets import load_adult
>>> data = load_adult()
>>> class_hists(data, "age", "gender", legend=True)
"""
col_data = data[column].dropna()
if ax is None:
ax = plt.gca()
if col_data.nunique() > 10:
ordinal = False
# histograms
bin_edges = np.histogram_bin_edges(col_data, bins=bins)
if len(bin_edges) > 30:
bin_edges = np.histogram_bin_edges(col_data, bins=30)
counts = {}
for name, group in data.groupby(target)[column]:
this_counts, _ = np.histogram(group, bins=bin_edges)
counts[name] = this_counts
counts = pd.DataFrame(counts)
else:
ordinal = True
# ordinal data, count distinct values
counts = data.groupby(target)[column].value_counts().unstack(target)
if scale_separately:
# normalize by maximum
counts = counts / counts.max()
bottom = counts.max().max() * 1.1
for i, name in enumerate(counts.columns):
if ordinal:
ax.bar(range(counts.shape[0]), counts[name], width=.9,
bottom=bottom * i, tick_label=counts.index, linewidth=2,
edgecolor='k')
xmin, xmax = 0 - .5, counts.shape[0] - .5
else:
ax.bar(bin_edges[:-1], counts[name], bottom=bottom * i, label=name,
align='edge', width=(bin_edges[1] - bin_edges[0]) * .9)
xmin, xmax = bin_edges[0], bin_edges[-1]
ax.hlines(bottom * i, xmin=xmin, xmax=xmax,
linewidth=1)
if legend:
ax.legend()
ax.set_yticks(())
ax.set_xlabel(column)
return ax
def pairplot(data, target_col, columns=None, scatter_alpha='auto',
scatter_size='auto'):
"""Pairplot (scattermatrix)
Because there's already too many implementations of this.
This is meant for classification only.
This is very bare-bones right now :-/
Parameters
----------
data : pandas dataframe
Input data
target_col : column specifier
Target column in data.
columns : column specifiers, default=None.
Columns in data to include. None means all.
scatter_alpha : float, default='auto'
Alpha values for scatter plots. 'auto' is dirty hacks.
scatter_size : float, default='auto'.
Marker size for scatter plots. 'auto' is dirty hacks.
"""
if columns is None:
columns = data.columns.drop(target_col)
n_features = len(columns)
fig, axes = plt.subplots(n_features, n_features,
figsize=(n_features * 3, n_features * 3))
axes = np.atleast_2d(axes)
for ax, (i, j) in zip(axes.ravel(),
itertools.product(range(n_features), repeat=2)):
legend = i == 0 and j == n_features - 1
if i == j:
class_hists(data, columns[i], target_col, ax=ax.twinx())
else:
discrete_scatter(data[columns[j]], data[columns[i]],
c=data[target_col], legend=legend, ax=ax,
alpha=scatter_alpha,
s=scatter_size)
if j == 0:
ax.set_ylabel(columns[i])
else:
ax.set_ylabel("")
ax.set_yticklabels(())
if i == n_features - 1:
ax.set_xlabel(columns[j])
else:
ax.set_xlabel("")
ax.set_xticklabels(())
despine(fig)
if n_features > 1:
axes[0, 0].set_yticks(axes[0, 1].get_yticks())
axes[0, 0].set_ylim(axes[0, 1].get_ylim())
return axes
def _inlier_range(series):
low = np.nanquantile(series, 0.01)
high = np.nanquantile(series, 0.99)
assert low <= high
# the two is a complete hack
inner_range = (high - low) / 2
return low - inner_range, high + inner_range
def _find_inliers(series):
low, high = _inlier_range(series)
mask = series.between(low, high)
mask = mask | series.isna()
dropped = len(mask) - mask.sum()
if dropped > 0:
warn("Dropped {} outliers in column {}.".format(
int(dropped), series.name), UserWarning)
return mask
def _clean_outliers(data):
def _find_outliers_series(series):
series = series.dropna()
low = series.quantile(0.01)
high = series.quantile(0.99)
# the two is a complete hack
inner_range = (high - low) / 2
return series.between(low - inner_range, high + inner_range)
mask = data.apply(_find_outliers_series)
mask = mask.apply(lambda x: reduce(np.logical_and, x), axis=1).fillna(True)
dropped = len(mask) - mask.sum()
if dropped > 0:
warn("Dropped {} outliers.".format(int(dropped)), UserWarning)
return mask
return None
def _get_scatter_alpha(scatter_alpha, x):
if scatter_alpha != "auto":
return scatter_alpha
if x.shape[0] < 100:
return .9
elif x.shape[0] < 1000:
return .5
elif x.shape[0] < 10000:
return .2
else:
return .1
def _get_scatter_size(scatter_size, x):
if scatter_size != "auto":
return scatter_size
if x.shape[0] < 100:
return 30
elif x.shape[0] < 1000:
return 30
elif x.shape[0] < 2000:
return 10
elif x.shape[0] < 10000:
return 2
else:
return 1
def plot_multiclass_roc_curve(estimator, X_val, y_val):
if len(estimator.classes_) < 3:
raise ValueError("Only for multi-class")
try:
y_score = estimator.predict_proba(X_val)
except AttributeError:
y_score = estimator.decision_function(X_val)
fig, axes = _make_subplots(len(estimator.classes_))
for i, (ax, c) in enumerate(zip(axes.ravel(), estimator.classes_)):
fpr, tpr, _ = roc_curve(y_val == c, y_score[:, i])
ax.plot(fpr, tpr)
ax.set_xlabel("False Positive Rate")
ax.set_ylabel("True Positive Rate (recall)")
ax.set_title("ROC curve for class {}".format(c))
| 34.363349
| 88
| 0.614426
|
c903650b451dbd241027420860a19244192fd8e9
| 1,233
|
py
|
Python
|
com.nast.email/src/main/resources/com/ur/urcap/mail/impl/daemon/MailDaemon.py
|
nagmik/URCap-EMail
|
fcf3fb269c06b4a8b66989bbf7a5b0c7b203d519
|
[
"Apache-2.0"
] | 3
|
2020-01-05T14:59:49.000Z
|
2020-12-14T21:06:10.000Z
|
com.nast.email/src/main/resources/com/ur/urcap/mail/impl/daemon/MailDaemon.py
|
nagmik/URCap-EMail
|
fcf3fb269c06b4a8b66989bbf7a5b0c7b203d519
|
[
"Apache-2.0"
] | null | null | null |
com.nast.email/src/main/resources/com/ur/urcap/mail/impl/daemon/MailDaemon.py
|
nagmik/URCap-EMail
|
fcf3fb269c06b4a8b66989bbf7a5b0c7b203d519
|
[
"Apache-2.0"
] | 1
|
2020-01-20T21:20:18.000Z
|
2020-01-20T21:20:18.000Z
|
#!/usr/bin/env python
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
import string
import smtplib
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
# *****************************
# send email
# *****************************
def sendMail( smtpHost, smtpPort, username, password, mailRecipient, mailFrom, subject, message ):
msg = MIMEMultipart()
msg['From'] = mailFrom
msg['To'] = mailRecipient
msg['Subject'] = subject
body = message
msg.attach(MIMEText(body, 'plain'))
## Connect to host
try:
server = smtplib.SMTP(smtpHost, smtpPort)
except smtplib.socket.gaierror:
return 400
## Login
try:
server.login(username, password)
except smtplib.SMTPAuthenticationError:
server.quit()
return 401
## Send message
try:
text = msg.as_string()
server.sendmail(mailFrom, mailRecipient, text)
except smtplib.SMTPException:
return 402
finally:
server.quit()
return 200
# *****************************
server = SimpleXMLRPCServer(("", 33015), allow_none=True)
server.register_function(sendMail, "sendMail")
server.serve_forever()
| 24.66
| 98
| 0.623682
|
5cb01068cee0b6d808f74931f393bf2882f44796
| 1,779
|
py
|
Python
|
step1_run_time_series_converter.py
|
M2LabOrg/WRF_little_r
|
8f46e733387db4c62f39426a03b6a03b3b406b0e
|
[
"Apache-2.0"
] | 1
|
2021-09-14T06:41:02.000Z
|
2021-09-14T06:41:02.000Z
|
step1_run_time_series_converter.py
|
M2LabOrg/WRF_little_r
|
8f46e733387db4c62f39426a03b6a03b3b406b0e
|
[
"Apache-2.0"
] | null | null | null |
step1_run_time_series_converter.py
|
M2LabOrg/WRF_little_r
|
8f46e733387db4c62f39426a03b6a03b3b406b0e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
This script creates little_r formatted files to be used
in WRF. They are part of the input files needed for observation nudging.
The script makes use of the function time_series_to_little_r, which is a
FORTRAN wrapper found in time_series_converter.py and record.py
Wrapper sources:
https://github.com/tommz9/python-little_r
https://github.com/valcap/csv2little_r
Note that:
- You need to install the Python packages needed to run this script:
$ pip install -r requirements.txt
- After running this script, you need to convert it to the format needed by WRF
- You do that by running:
$ perl RT_fdda_reformat_obsnud.pl OUTPUT/obs:2021-04-14_02
$ perl RT_fdda_reformat_obsnud.pl OUTPUT/obs:2021-04-14_03
$ (and so on)
- This will produce files with extension .obsnud, which you will concatenate
(see example below)
- You will also need to change the file name to OBS_DOMAIN101 for domain 1,
and OBS_DOMAIN201 for domain 2, and so on, as described in the WRF Users' manual
$ cat *.obsnud >> OBS_DOMAIN101
Adapted here by: Michel Mesquita, Ph.D. (July 2021)
"""
import pandas as pd
from pandas.tseries.offsets import DateOffset
from time_series_converter import time_series_to_little_r
df = pd.read_csv('inputToyData.csv', sep=";")
df.set_index(pd.to_datetime(df['datetime']), drop = False, inplace = True)
df.index = df.index.tz_localize('GMT').tz_convert('Europe/Oslo')
time_series_to_little_r(
df.index,
df['temperature'],
'Bergen', # location
60.3971, # latitude
5.3244, # longitude
40, # altitude
'temperature', # variable
'OUTPUT_STEP1/obs', # start of filename or path + start of filename
convert_to_kelvin=True)
| 31.210526
| 82
| 0.717257
|
d0657c8c7475f2b5d7ccd2206cb4843366bd0d82
| 260
|
py
|
Python
|
arc/arc006/arc006a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | 1
|
2019-08-21T00:49:34.000Z
|
2019-08-21T00:49:34.000Z
|
arc/arc006/arc006a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
arc/arc006/arc006a.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
E = set(input().split())
B = input()
L = set(input().split())
t = E & L
if len(t) == 6:
print(1)
elif len(t) == 5:
if B in L:
print(2)
else:
print(3)
elif len(t) == 4:
print(4)
elif len(t) == 3:
print(5)
else:
print(0)
| 13.684211
| 24
| 0.465385
|
e6362bc8dca85d6c909c76274c9275f3ace0d201
| 6,141
|
py
|
Python
|
buddy/migrations/0001_initial.py
|
gc-13/studybuddy
|
4488475eea7844adecb955d79d17687c4e0accda
|
[
"HPND",
"MIT"
] | null | null | null |
buddy/migrations/0001_initial.py
|
gc-13/studybuddy
|
4488475eea7844adecb955d79d17687c4e0accda
|
[
"HPND",
"MIT"
] | null | null | null |
buddy/migrations/0001_initial.py
|
gc-13/studybuddy
|
4488475eea7844adecb955d79d17687c4e0accda
|
[
"HPND",
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2020-11-13 16:55
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('major', models.CharField(blank=True, max_length=60)),
('description', models.CharField(blank=True, max_length=140)),
('year', models.CharField(blank=True, max_length=10)),
('first', models.CharField(blank=True, max_length=100)),
('second', models.CharField(blank=True, max_length=100)),
('third', models.CharField(blank=True, max_length=100)),
('image', models.ImageField(blank=True, upload_to='profile_image')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject', models.CharField(max_length=4)),
('catalog_number', models.CharField(max_length=4)),
('class_title', models.CharField(max_length=100)),
('instructor', models.CharField(max_length=40)),
],
),
migrations.CreateModel(
name='StudyRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.CharField(max_length=500)),
('assignment', models.CharField(blank=True, max_length=140)),
('current_size', models.PositiveIntegerField(default=1)),
('sizeOfGroup', models.PositiveIntegerField()),
('accepted', models.BooleanField(default=False)),
('hidden', models.BooleanField(default=False)),
('course', models.ForeignKey(default=20485, on_delete=django.db.models.deletion.CASCADE, to='buddy.course')),
('users', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='StudyGroup',
fields=[
('groupID', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100)),
('groupme_id', models.PositiveIntegerField(blank=True, default=0)),
('groupme_shareurl', models.URLField(blank=True, default='')),
('current_size', models.PositiveIntegerField(default=2)),
('sizeOfGroup', models.PositiveIntegerField(default=2)),
('hidden', models.BooleanField(default=False)),
('course', models.ForeignKey(default=20485, on_delete=django.db.models.deletion.CASCADE, to='buddy.course')),
('studyrequest', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='buddy.studyrequest')),
('users', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='user',
name='courses',
field=models.ManyToManyField(to='buddy.Course'),
),
migrations.AddField(
model_name='user',
name='groups',
field=models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups'),
),
migrations.AddField(
model_name='user',
name='user_permissions',
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
),
]
| 57.392523
| 329
| 0.617978
|
6208deefd49012e284c73519bc7ffe6ffbad0104
| 652
|
py
|
Python
|
contrib/python/pypodman/pypodman/lib/__init__.py
|
stevekuznetsov/libpod
|
af791f340cfc3f8134e1fe0e3b0a6d3597706277
|
[
"Apache-2.0"
] | null | null | null |
contrib/python/pypodman/pypodman/lib/__init__.py
|
stevekuznetsov/libpod
|
af791f340cfc3f8134e1fe0e3b0a6d3597706277
|
[
"Apache-2.0"
] | null | null | null |
contrib/python/pypodman/pypodman/lib/__init__.py
|
stevekuznetsov/libpod
|
af791f340cfc3f8134e1fe0e3b0a6d3597706277
|
[
"Apache-2.0"
] | null | null | null |
"""Remote podman client support library."""
from pypodman.lib.action_base import AbstractActionBase
from pypodman.lib.parser_actions import (BooleanAction, BooleanValidate,
PathAction, PositiveIntAction,
UnitAction)
from pypodman.lib.podman_parser import PodmanArgumentParser
from pypodman.lib.report import Report, ReportColumn
# Silence pylint overlording...
assert BooleanAction
assert BooleanValidate
assert PathAction
assert PositiveIntAction
assert UnitAction
__all__ = [
'AbstractActionBase',
'PodmanArgumentParser',
'Report',
'ReportColumn',
]
| 29.636364
| 72
| 0.710123
|
e999071b638aee46a8642b223ab112f55905a720
| 3,818
|
py
|
Python
|
linear_projection.py
|
evrimozmermer/self_supervised_learning_linear_projection
|
2cad8362c1d3be37e4ce5e94abf7f5e5db227cb9
|
[
"Apache-2.0"
] | 1
|
2021-11-26T14:27:09.000Z
|
2021-11-26T14:27:09.000Z
|
linear_projection.py
|
evrimozmermer/self_supervised_learning_linear_projection
|
2cad8362c1d3be37e4ce5e94abf7f5e5db227cb9
|
[
"Apache-2.0"
] | null | null | null |
linear_projection.py
|
evrimozmermer/self_supervised_learning_linear_projection
|
2cad8362c1d3be37e4ce5e94abf7f5e5db227cb9
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 16 20:16:44 2021
@author: tekin.evrim.ozmermer
"""
import torchvision
import torch
from torch import nn
from classifiers.MLP_ExactSolution import Model as mlpes
class LinearProjection(nn.Module):
def __init__(self, cfg):
super().__init__()
self.cfg = cfg
if cfg.model == "resnet18":
self.model = torchvision.models.resnet18(zero_init_residual=True)#pretrained=True)#
elif cfg.model == "resnet34":
self.model = torchvision.models.resnet34(zero_init_residual=True)#pretrained=True)#
elif cfg.model == "resnet50":
self.model = torchvision.models.resnet50(zero_init_residual=True)#pretrained=True)#
elif cfg.model == "resnet101":
self.model = torchvision.models.resnet101(zero_init_residual=True)#pretrained=True)#
else:
print("Model architecture is given wrong, default is being used\n DEFAULT: RESNET50")
self.model = torchvision.models.resnet50(pretrained=True)
self.model.gap = nn.AdaptiveAvgPool2d(1)
self.model.gmp = nn.AdaptiveMaxPool2d(1)
self.model.embedding = nn.Sequential(nn.Linear(self.model.fc.in_features,
cfg.embedding_size, bias = False))
self.linear_projection = mlpes(cfg)
def forward_conv_layers(self, x):
x = self.model.conv1(x)
x = self.model.bn1(x)
x = self.model.relu(x)
x = self.model.maxpool(x)
x = self.model.layer1(x)
x = self.model.layer2(x)
x = self.model.layer3(x)
x = self.model.layer4(x)
return x
def forward_pooling(self, x):
avg_x = self.model.gap(x)
max_x = self.model.gmp(x)
return avg_x+max_x
def flatten(self, x):
return x.view(x.size(0), -1)
def l2_norm(self, x):
input_size = x.size()
buffer = torch.pow(x, 2)
normp = torch.sum(buffer, 1).add_(1e-12)
norm = torch.sqrt(normp).detach()
_output = torch.div(x, norm.view(-1, 1).expand_as(x))
output = _output.view(input_size)
return output
def criterion_negative(self, sims, alpha, mrg):
shape = sims.shape[0]
neg_exp_sum = torch.exp(alpha * (sims + mrg))
neg_term = torch.log(1 + neg_exp_sum).sum()/shape
return neg_term
def criterion_positive(self, sims, alpha, mrg):
shape = sims.shape[0]
pos_exp_sum = torch.exp(-alpha * (sims - mrg))
pos_term = torch.log(1 + pos_exp_sum).sum()/shape
return pos_term
def forward(self, x):
if type(x) == tuple:
x0 = self.forward_conv_layers(x[0])
x0 = self.forward_pooling(x0)
x0 = self.flatten(x0)
z0 = self.model.embedding(x0)
x1 = self.forward_conv_layers(x[1])
x1 = self.forward_pooling(x1)
x1 = self.flatten(x1)
z1 = self.model.embedding(x1)
# calculate loss
self.linear_projection.create_collection(backbone = None,
dl_coll = None, input_batch = z0)
self.linear_projection.solve_exact()
loss = self.linear_projection.calculate_loss(z1)
return loss
else:
x = self.forward_conv_layers(x)
x = self.forward_pooling(x)
x = self.flatten(x)
z = self.model.embedding(x)
return z
def off_diagonal(x):
# return a flattened view of the off-diagonal elements of a square matrix
n, m = x.shape
assert n == m
return x.flatten()[:-1].view(n - 1, n + 1)[:, 1:].flatten()
| 35.351852
| 97
| 0.57098
|
3b39bbfe59b4a940b81dac1981613e6c028361e3
| 2,876
|
py
|
Python
|
alipay/aop/api/domain/KoubeiRetailWmsDeliverypackageQueryModel.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/domain/KoubeiRetailWmsDeliverypackageQueryModel.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/domain/KoubeiRetailWmsDeliverypackageQueryModel.py
|
articuly/alipay-sdk-python-all
|
0259cd28eca0f219b97dac7f41c2458441d5e7a6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.OperateContext import OperateContext
class KoubeiRetailWmsDeliverypackageQueryModel(object):
def __init__(self):
self._express_code = None
self._notice_order_id = None
self._operate_context = None
self._work_order_id = None
@property
def express_code(self):
return self._express_code
@express_code.setter
def express_code(self, value):
self._express_code = value
@property
def notice_order_id(self):
return self._notice_order_id
@notice_order_id.setter
def notice_order_id(self, value):
self._notice_order_id = value
@property
def operate_context(self):
return self._operate_context
@operate_context.setter
def operate_context(self, value):
if isinstance(value, OperateContext):
self._operate_context = value
else:
self._operate_context = OperateContext.from_alipay_dict(value)
@property
def work_order_id(self):
return self._work_order_id
@work_order_id.setter
def work_order_id(self, value):
self._work_order_id = value
def to_alipay_dict(self):
params = dict()
if self.express_code:
if hasattr(self.express_code, 'to_alipay_dict'):
params['express_code'] = self.express_code.to_alipay_dict()
else:
params['express_code'] = self.express_code
if self.notice_order_id:
if hasattr(self.notice_order_id, 'to_alipay_dict'):
params['notice_order_id'] = self.notice_order_id.to_alipay_dict()
else:
params['notice_order_id'] = self.notice_order_id
if self.operate_context:
if hasattr(self.operate_context, 'to_alipay_dict'):
params['operate_context'] = self.operate_context.to_alipay_dict()
else:
params['operate_context'] = self.operate_context
if self.work_order_id:
if hasattr(self.work_order_id, 'to_alipay_dict'):
params['work_order_id'] = self.work_order_id.to_alipay_dict()
else:
params['work_order_id'] = self.work_order_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiRetailWmsDeliverypackageQueryModel()
if 'express_code' in d:
o.express_code = d['express_code']
if 'notice_order_id' in d:
o.notice_order_id = d['notice_order_id']
if 'operate_context' in d:
o.operate_context = d['operate_context']
if 'work_order_id' in d:
o.work_order_id = d['work_order_id']
return o
| 31.955556
| 81
| 0.636648
|
b844f8a46b62ebdf685fb854ec92baa2163e6f77
| 6,358
|
py
|
Python
|
padmal/DataLogs/POC_Setup/2022-02-21:03-21-19/plot.py
|
CloudyPadmal/Phantom-Contiki
|
c1f0e53cb1aaaa946c36f2d1dc72499875c35f5b
|
[
"BSD-3-Clause"
] | 1
|
2022-03-07T18:50:41.000Z
|
2022-03-07T18:50:41.000Z
|
padmal/DataLogs/POC_Setup/2022-02-21:03-21-19/plot.py
|
CloudyPadmal/Phantom-Contiki
|
c1f0e53cb1aaaa946c36f2d1dc72499875c35f5b
|
[
"BSD-3-Clause"
] | null | null | null |
padmal/DataLogs/POC_Setup/2022-02-21:03-21-19/plot.py
|
CloudyPadmal/Phantom-Contiki
|
c1f0e53cb1aaaa946c36f2d1dc72499875c35f5b
|
[
"BSD-3-Clause"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
plt.rcParams["figure.figsize"] = (20, 8)
POWER_L = 0
RATE_P = 50
PACKETS = 2500
MIN_RSSI = -100
MAX_RSSI = -10
BINS = [i * (PACKETS / 10) for i in range(11)]
SCATTER = 5
L_WIDTH = 0.5
props = dict(boxstyle='round', facecolor='#cccccc', alpha=0.5)
TX_NODE = '5a7a.b713.0074.1200'
def extract_packet_data(filename):
"""
This method will take a file of packet readings as input and go through each line.
If any line has the IP address defined above, it will fill in the points array with
the corresponding RSSI value and another array with sequence number
There will be four arrays returned at last, two with RSSI readings and two with seq.
[INFO: EavesDr ] Received 0 from 0f2a.7d13.0074.1200 [RSSI: -60 | LQI: 107]
"""
file_node_lines = filename.readlines()
node_points = []
node_seq = []
for line in file_node_lines:
if TX_NODE in line:
try:
line_as_list = line.split(' ')
rssi = int(line_as_list[-4])
seq = int(line_as_list[-8])
node_points.append(rssi)
node_seq.append(seq)
except:
continue
print("Parsing", filename.name)
return node_points, node_seq
#######################################################################################################################
# Files #
#######################################################################################################################
Ev1 = open('Eaves-1.txt', 'r')
Ev2 = open('Eaves-2.txt', 'r')
Ev3 = open('Eaves-3.txt', 'r')
Ev4 = open('Eaves-4.txt', 'r')
Ph2 = open('Receive.txt', 'r')
#######################################################################################################################
# Plots #
#######################################################################################################################
f, ((ev1, ev2, ev3, ev4, pha), (fr1, fr2, fr3, fr4, fr5)) = plt.subplots(2, 5)
ttl = 'RSSI Measurements {Packets: ' + str(PACKETS) + '; Power: ' + str(POWER_L) + ' dBm; Rate: ' + str(RATE_P) + \
' ms; Bin: ' + str(int(PACKETS / 10)) + ' packets}'
f.suptitle(ttl, fontweight='bold')
(P1_E1, S_P1_E1) = extract_packet_data(Ev1)
prr1 = 'PRR:' + str(round((len(P1_E1) / PACKETS), 3) * 100)[:4] + '%'
ev1.scatter(S_P1_E1, P1_E1, s=SCATTER, label='from node 1')
ev1.plot([np.mean(P1_E1) for _ in range(PACKETS)], label='node 1 mean', linewidth=L_WIDTH)
ev1.set_xlim(0, PACKETS)
ev1.set_ylim(MIN_RSSI, MAX_RSSI)
ev1.set_title('Eaves 01')
ev1.set_xlabel('Sequence number')
ev1.set_ylabel('RSSI (dBm)')
ev1.text(0.3, 0.95, prr1, transform=ev1.transAxes, fontsize=8,
verticalalignment='center', bbox=props)
print("EV 1 Ready")
(P1_E2, S_P1_E2) = extract_packet_data(Ev2)
prr2 = 'PRR:' + str(round((len(P1_E2) / PACKETS), 3) * 100)[:4] + '%'
ev2.scatter(S_P1_E2, P1_E2, s=SCATTER, label='from node 1')
ev2.plot([np.mean(P1_E2) for _ in range(PACKETS)], label='node 1 mean', linewidth=L_WIDTH)
ev2.set_xlim(0, PACKETS)
ev2.set_ylim(MIN_RSSI, MAX_RSSI)
ev2.set_title('Eaves 02')
ev2.set_xlabel('Sequence number')
ev2.text(0.4, 0.95, prr2, transform=ev2.transAxes, fontsize=8,
verticalalignment='center', bbox=props)
print("EV 2 Ready")
(P1_E3, S_P1_E3) = extract_packet_data(Ev3)
prr3 = 'PRR:' + str(round((len(P1_E3) / PACKETS), 3) * 100)[:4] + '%'
ev3.scatter(S_P1_E3, P1_E3, s=SCATTER, label='from node 1')
ev3.plot([np.mean(P1_E3) for _ in range(PACKETS)], label='node 1 mean', linewidth=L_WIDTH)
ev3.set_xlim(0, PACKETS)
ev3.set_ylim(MIN_RSSI, MAX_RSSI)
ev3.set_title('Eaves 03')
ev3.set_xlabel('Sequence number')
ev3.text(0.4, 0.95, prr3, transform=ev3.transAxes, fontsize=8,
verticalalignment='center', bbox=props)
print("EV 3 Ready")
(P1_E4, S_P1_E4) = extract_packet_data(Ev4)
prr4 = 'PRR:' + str(round((len(P1_E4) / PACKETS), 3) * 100)[:4] + '%'
ev4.scatter(S_P1_E4, P1_E4, s=SCATTER, label='from node 1')
ev4.plot([np.mean(P1_E4) for _ in range(PACKETS)], label='node 1 mean', linewidth=L_WIDTH)
ev4.set_xlim(0, PACKETS)
ev4.set_ylim(MIN_RSSI, MAX_RSSI)
ev4.set_title('Eaves 04')
ev4.set_xlabel('Sequence number')
ev4.text(0.4, 0.95, prr4, transform=ev4.transAxes, fontsize=8,
verticalalignment='center', bbox=props)
print("EV 4 Ready")
(P1_P2, S_P1_P2) = extract_packet_data(Ph2)
prr5 = 'PRR:' + str(round((len(P1_P2) / PACKETS), 3) * 100)[:4] + '%'
pha.scatter(S_P1_P2, P1_P2, s=SCATTER, label='from node 1')
pha.plot([np.mean(P1_P2) for _ in range(PACKETS)], label='node 1 mean', linewidth=L_WIDTH)
pha.set_xlim(0, PACKETS)
pha.set_ylim(MIN_RSSI, MAX_RSSI)
pha.set_title('In-body')
pha.set_xlabel('Sequence number')
pha.text(0.4, 0.95, prr5, transform=pha.transAxes, fontsize=8,
verticalalignment='center', bbox=props)
print("RX Ready")
#######################################################################################################################
# Fast RSSI Sampling #
#######################################################################################################################
fr1.hist(S_P1_E1, BINS, label='count', alpha=0.7, rwidth=0.9)
fr1.set_xlabel('Sequence Range')
fr1.set_ylabel('Count')
fr2.hist(S_P1_E2, BINS, label='count', alpha=0.7, rwidth=0.9)
fr2.set_xlabel('Reading instance')
fr3.hist(S_P1_E3, BINS, label='count', alpha=0.7, rwidth=0.9)
fr3.set_xlabel('Reading instance')
fr4.hist(S_P1_E4, BINS, label='count', alpha=0.7, rwidth=0.9)
fr4.set_xlabel('Reading instance')
fr5.hist(S_P1_P2, BINS, label='count', alpha=0.7, rwidth=0.9)
fr5.set_xlabel('Reading instance')
ev1.grid(True, axis='y', alpha=0.35)
ev2.grid(True, axis='y', alpha=0.35)
ev3.grid(True, axis='y', alpha=0.35)
ev4.grid(True, axis='y', alpha=0.35)
pha.grid(True, axis='y', alpha=0.35)
fr1.grid(True, axis='y', alpha=0.35)
fr2.grid(True, axis='y', alpha=0.35)
fr3.grid(True, axis='y', alpha=0.35)
fr4.grid(True, axis='y', alpha=0.35)
fr5.grid(True, axis='y', alpha=0.35)
plt.savefig('results.png', dpi=300)
plt.show()
| 40.496815
| 119
| 0.56134
|
ae76db1762cd23ecd5394f05cf6db126f6dbc993
| 4,411
|
py
|
Python
|
posts/migrations/0004_auto_20211108_0210.py
|
GrimJ0/yatube
|
7c6919bdfd25130d853d0d3ffdb9a63f32660c73
|
[
"BSD-3-Clause"
] | 1
|
2021-11-09T21:29:16.000Z
|
2021-11-09T21:29:16.000Z
|
posts/migrations/0004_auto_20211108_0210.py
|
GrimJ0/yatube
|
7c6919bdfd25130d853d0d3ffdb9a63f32660c73
|
[
"BSD-3-Clause"
] | null | null | null |
posts/migrations/0004_auto_20211108_0210.py
|
GrimJ0/yatube
|
7c6919bdfd25130d853d0d3ffdb9a63f32660c73
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.2.9 on 2021-11-07 23:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0003_auto_20211107_2325'),
]
operations = [
migrations.AlterModelOptions(
name='comment',
options={'ordering': ('-created',), 'verbose_name': 'Комментарий', 'verbose_name_plural': 'Комментарии'},
),
migrations.AlterModelOptions(
name='follow',
options={'verbose_name': 'Подписчик', 'verbose_name_plural': 'Подписчики'},
),
migrations.AlterModelOptions(
name='group',
options={'verbose_name': 'Группа', 'verbose_name_plural': 'Группы'},
),
migrations.AlterModelOptions(
name='post',
options={'ordering': ('-pub_date',), 'verbose_name': 'Пост', 'verbose_name_plural': 'Посты'},
),
migrations.AlterField(
model_name='comment',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL, verbose_name='Автор'),
),
migrations.AlterField(
model_name='comment',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_name='Дата публикации'),
),
migrations.AlterField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='posts.Post', verbose_name='Пост'),
),
migrations.AlterField(
model_name='comment',
name='text',
field=models.TextField(help_text='Введите ваш комментарий', verbose_name='Комментария'),
),
migrations.AlterField(
model_name='follow',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='following', to=settings.AUTH_USER_MODEL, verbose_name='Автор'),
),
migrations.AlterField(
model_name='follow',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follower', to=settings.AUTH_USER_MODEL, verbose_name='Подписчик'),
),
migrations.AlterField(
model_name='group',
name='description',
field=models.TextField(help_text='Опишите группу', verbose_name='Описание'),
),
migrations.AlterField(
model_name='group',
name='slug',
field=models.SlugField(help_text='Задайте уникальный адрес сообщества', unique=True, verbose_name='Адрес'),
),
migrations.AlterField(
model_name='group',
name='title',
field=models.CharField(max_length=100, verbose_name='Название сообщества'),
),
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to=settings.AUTH_USER_MODEL, verbose_name='Автор'),
),
migrations.AlterField(
model_name='post',
name='group',
field=models.ForeignKey(blank=True, help_text='Выберите группу', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='posts', to='posts.Group', verbose_name='Сообщество'),
),
migrations.AlterField(
model_name='post',
name='image',
field=models.ImageField(blank=True, help_text='Добавьте изображение к посту', null=True, upload_to='posts/', verbose_name='Изображение'),
),
migrations.AlterField(
model_name='post',
name='pub_date',
field=models.DateTimeField(auto_now_add=True, verbose_name='Дата публикации'),
),
migrations.AlterField(
model_name='post',
name='text',
field=models.TextField(help_text='Введите текст вашего поста', verbose_name='Текст поста'),
),
migrations.AlterField(
model_name='post',
name='title',
field=models.CharField(max_length=100, unique=True, verbose_name='Заголовок'),
),
]
| 41.224299
| 201
| 0.607345
|
3d3ac9456e88a1e83999d907de2204c66088cd8a
| 1,080
|
py
|
Python
|
exercise_2020_05/test_data_wrapper.py
|
yorkshire-geek/advent_of_code_2020
|
6868f89849ab7347689136b010020f8cd90f2d93
|
[
"Apache-2.0"
] | null | null | null |
exercise_2020_05/test_data_wrapper.py
|
yorkshire-geek/advent_of_code_2020
|
6868f89849ab7347689136b010020f8cd90f2d93
|
[
"Apache-2.0"
] | null | null | null |
exercise_2020_05/test_data_wrapper.py
|
yorkshire-geek/advent_of_code_2020
|
6868f89849ab7347689136b010020f8cd90f2d93
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from .exercise_5 import DataWrapper
class MyTestCase(unittest.TestCase):
data_wrapper = DataWrapper("FBFBBFFRLR")
def test_row_data(self):
self.assertEqual("FBFBBFF", self.data_wrapper.get_row_str())
def test_seat_data(self):
self.assertEqual("RLR", self.data_wrapper.get_column_str())
def test_row_binary(self):
self.assertEqual("0101100", self.data_wrapper.get_row_binary())
def test_column_binary(self):
self.assertEqual("0101100", self.data_wrapper.get_row_binary())
def test_row(self):
self.assertEqual(44, self.data_wrapper.get_row())
def test_column(self):
self.assertEqual(5, self.data_wrapper.get_column())
def test_id(self):
self.assertEqual(357, self.data_wrapper.get_id())
def test_one_two_three(self):
self.assertEqual(567, DataWrapper("BFFFBBFRRR").get_id())
self.assertEqual(119, DataWrapper("FFFBBBFRRR").get_id())
self.assertEqual(820, DataWrapper("BBFFBBFRLL").get_id())
if __name__ == '__main__':
unittest.main()
| 29.189189
| 71
| 0.700926
|
23dfa9774dd013e4933c2bc360661be0a8433e5d
| 1,527
|
py
|
Python
|
openstack/tests/functional/network/v2/test_quota.py
|
NeCTAR-RC/openstacksdk
|
60a24f6c4717a1f9a0e545c9a07e68afaedc5a27
|
[
"Apache-2.0"
] | 99
|
2018-03-28T15:41:45.000Z
|
2022-01-23T17:22:13.000Z
|
openstack/tests/functional/network/v2/test_quota.py
|
NeCTAR-RC/openstacksdk
|
60a24f6c4717a1f9a0e545c9a07e68afaedc5a27
|
[
"Apache-2.0"
] | 5
|
2018-05-25T16:54:23.000Z
|
2021-11-21T02:27:16.000Z
|
openstack/tests/functional/network/v2/test_quota.py
|
NeCTAR-RC/openstacksdk
|
60a24f6c4717a1f9a0e545c9a07e68afaedc5a27
|
[
"Apache-2.0"
] | 104
|
2018-04-06T14:33:54.000Z
|
2022-03-01T01:58:09.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestQuota(base.BaseFunctionalTest):
def test_list(self):
for qot in self.conn.network.quotas():
self.assertIsNotNone(qot.project_id)
self.assertIsNotNone(qot.networks)
def test_list_details(self):
expected_keys = ['limit', 'used', 'reserved']
project_id = self.conn.session.get_project_id()
quota_details = self.conn.network.get_quota(project_id, details=True)
for details in quota_details._body.attributes.values():
for expected_key in expected_keys:
self.assertTrue(expected_key in details.keys())
def test_set(self):
attrs = {'networks': 123456789}
for project_quota in self.conn.network.quotas():
self.conn.network.update_quota(project_quota, **attrs)
new_quota = self.conn.network.get_quota(project_quota.project_id)
self.assertEqual(123456789, new_quota.networks)
| 41.27027
| 77
| 0.707924
|
147b9a4a68b6d401d59161005d7af4109ea5f30d
| 6,204
|
py
|
Python
|
vendor-local/lib/python/celery/worker/bootsteps.py
|
Mozilla-GitHub-Standards/6f0d85288b5b0ef8beecb60345173dc14c98e40f48e1307a444ab1e08231e695
|
bf6a382913901ad193d907f022086931df0de8c4
|
[
"BSD-3-Clause"
] | 1
|
2015-07-13T03:29:04.000Z
|
2015-07-13T03:29:04.000Z
|
vendor-local/lib/python/celery/worker/bootsteps.py
|
Mozilla-GitHub-Standards/6f0d85288b5b0ef8beecb60345173dc14c98e40f48e1307a444ab1e08231e695
|
bf6a382913901ad193d907f022086931df0de8c4
|
[
"BSD-3-Clause"
] | 2
|
2015-03-03T23:02:19.000Z
|
2019-03-30T04:45:51.000Z
|
vendor-local/lib/python/celery/worker/bootsteps.py
|
Mozilla-GitHub-Standards/6f0d85288b5b0ef8beecb60345173dc14c98e40f48e1307a444ab1e08231e695
|
bf6a382913901ad193d907f022086931df0de8c4
|
[
"BSD-3-Clause"
] | 2
|
2016-04-15T11:43:05.000Z
|
2016-04-15T11:43:15.000Z
|
# -*- coding: utf-8 -*-
"""
celery.worker.bootsteps
~~~~~~~~~~~~~~~~~~~~~~~
The boot-step components.
"""
from __future__ import absolute_import
from collections import defaultdict
from importlib import import_module
from celery.datastructures import DependencyGraph
from celery.utils.imports import instantiate
from celery.utils.log import get_logger
logger = get_logger(__name__)
class Namespace(object):
"""A namespace containing components.
Every component must belong to a namespace.
When component classes are created they are added to the
mapping of unclaimed components. The components will be
claimed when the namespace they belong to is created.
:keyword name: Set the name of this namespace.
:keyword app: Set the Celery app for this namespace.
"""
name = None
_unclaimed = defaultdict(dict)
_started_count = 0
def __init__(self, name=None, app=None):
self.app = app
self.name = name or self.name
self.services = []
def modules(self):
"""Subclasses can override this to return a
list of modules to import before components are claimed."""
return []
def load_modules(self):
"""Will load the component modules this namespace depends on."""
for m in self.modules():
self.import_module(m)
def apply(self, parent, **kwargs):
"""Apply the components in this namespace to an object.
This will apply the ``__init__`` and ``include`` methods
of each components with the object as argument.
For ``StartStopComponents`` the services created
will also be added the the objects ``components`` attribute.
"""
self._debug('Loading modules.')
self.load_modules()
self._debug('Claiming components.')
self.components = self._claim()
self._debug('Building boot step graph.')
self.boot_steps = [self.bind_component(name, parent, **kwargs)
for name in self._finalize_boot_steps()]
self._debug(
'New boot order: {%s}', ', '.join(c.name for c in self.boot_steps),
)
for component in self.boot_steps:
component.include(parent)
return self
def bind_component(self, name, parent, **kwargs):
"""Bind component to parent object and this namespace."""
comp = self[name](parent, **kwargs)
comp.namespace = self
return comp
def import_module(self, module):
return import_module(module)
def __getitem__(self, name):
return self.components[name]
def _find_last(self):
for C in self.components.itervalues():
if C.last:
return C
def _finalize_boot_steps(self):
G = self.graph = DependencyGraph(
(C.name, C.requires) for C in self.components.itervalues())
last = self._find_last()
if last:
for obj in G:
if obj != last.name:
G.add_edge(last.name, obj)
return G.topsort()
def _claim(self):
return self._unclaimed[self.name]
def _debug(self, msg, *args):
return logger.debug('[%s] ' + msg,
*(self.name.capitalize(), ) + args)
class ComponentType(type):
"""Metaclass for components."""
def __new__(cls, name, bases, attrs):
abstract = attrs.pop('abstract', False)
if not abstract:
try:
cname = attrs['name']
except KeyError:
raise NotImplementedError('Components must be named')
namespace = attrs.get('namespace', None)
if not namespace:
attrs['namespace'], _, attrs['name'] = cname.partition('.')
cls = super(ComponentType, cls).__new__(cls, name, bases, attrs)
if not abstract:
Namespace._unclaimed[cls.namespace][cls.name] = cls
return cls
class Component(object):
"""A component.
The :meth:`__init__` method is called when the component
is bound to a parent object, and can as such be used
to initialize attributes in the parent object at
parent instantiation-time.
"""
__metaclass__ = ComponentType
#: The name of the component, or the namespace
#: and the name of the component separated by dot.
name = None
#: List of component names this component depends on.
#: Note that the dependencies must be in the same namespace.
requires = ()
#: can be used to specify the namespace,
#: if the name does not include it.
namespace = None
#: if set the component will not be registered,
#: but can be used as a component base class.
abstract = True
#: Optional obj created by the :meth:`create` method.
#: This is used by StartStopComponents to keep the
#: original service object.
obj = None
#: This flag is reserved for the workers Consumer,
#: since it is required to always be started last.
#: There can only be one object marked with lsat
#: in every namespace.
last = False
#: This provides the default for :meth:`include_if`.
enabled = True
def __init__(self, parent, **kwargs):
pass
def create(self, parent):
"""Create the component."""
pass
def include_if(self, parent):
"""An optional predicate that decided whether this
component should be created."""
return self.enabled
def instantiate(self, qualname, *args, **kwargs):
return instantiate(qualname, *args, **kwargs)
def include(self, parent):
if self.include_if(parent):
self.obj = self.create(parent)
return True
class StartStopComponent(Component):
abstract = True
terminable = False
def start(self):
return self.obj.start()
def stop(self):
return self.obj.stop()
def terminate(self):
if self.terminable:
return self.obj.terminate()
return self.obj.stop()
def include(self, parent):
if super(StartStopComponent, self).include(parent):
parent.components.append(self.obj)
| 29.264151
| 79
| 0.620729
|
9a477ec2dfb5268c91fe53bc2f477afa12b61727
| 1,543
|
py
|
Python
|
src/helper/paths_generator.py
|
amillert/pic2story
|
efa0c7c966392b4efb88ae370d8eb78cd2ab062b
|
[
"MIT"
] | 3
|
2020-09-27T21:25:01.000Z
|
2021-02-14T14:12:04.000Z
|
src/helper/paths_generator.py
|
amillert/pic2story
|
efa0c7c966392b4efb88ae370d8eb78cd2ab062b
|
[
"MIT"
] | 15
|
2020-09-26T10:52:56.000Z
|
2020-11-15T08:19:12.000Z
|
src/helper/paths_generator.py
|
amillert/pic2story
|
efa0c7c966392b4efb88ae370d8eb78cd2ab062b
|
[
"MIT"
] | null | null | null |
"""
Module providing some helper functions
"""
import os
# TODO: file needs general refactor
def extract(path):
"""
Function for extracting path or paths if directory
:param path: str
:return: str / list[str] # TODO: fix
"""
if os.path.isdir(path):
res = []
contents = os.listdir(path)
for cont in contents:
res.append(extract(os.path.join(path, cont)))
return res
return path
def generate_abs_for_dir(dir_path, abs_paths):
"""
Function for generating absolute paths in the directory
:param dir_path: str
:param abs_paths: list[str]
:return:
"""
for path in os.listdir(dir_path):
if "-" not in path:
joined = os.path.join(dir_path, path)
if os.path.isdir(joined):
abs_paths.extend(extract(joined))
else:
abs_paths.append(extract(joined))
return abs_paths
def generate_absolute_paths(paths):
"""
Function for generating paths
:param paths: list[str]
:return: list[list[str]] ?
"""
abs_paths = []
for path in paths:
abs_path = os.path.abspath(path)
if os.path.isdir(abs_path):
abs_paths = generate_abs_for_dir(abs_path, abs_paths)
else:
abs_paths.append(abs_path)
return abs_paths
def parent_path(path):
"""
Function for easier access to the parent directory
:param path: str
:return: str
"""
return os.path.abspath(os.path.join(path, os.pardir))
| 22.691176
| 65
| 0.605314
|
51c817e9b4cb7e38af78f4d52835eda6f1b496cf
| 12,028
|
py
|
Python
|
mars/dataframe/base/astype.py
|
humaohai/mars
|
11373f64c3039d424f9276e610ae5ad108ea0eb1
|
[
"Apache-2.0"
] | 1
|
2020-06-25T13:51:16.000Z
|
2020-06-25T13:51:16.000Z
|
mars/dataframe/base/astype.py
|
humaohai/mars
|
11373f64c3039d424f9276e610ae5ad108ea0eb1
|
[
"Apache-2.0"
] | null | null | null |
mars/dataframe/base/astype.py
|
humaohai/mars
|
11373f64c3039d424f9276e610ae5ad108ea0eb1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pandas as pd
from pandas.api.types import CategoricalDtype
from ... import opcodes as OperandDef
from ...serialize import AnyField, StringField, ListField
from ...utils import recursive_tile
from ...tensor.base import sort
from ..utils import build_empty_df, build_empty_series
from ..core import SERIES_TYPE
from ..operands import DataFrameOperand, DataFrameOperandMixin, ObjectType
class DataFrameAstype(DataFrameOperand, DataFrameOperandMixin):
_op_type_ = OperandDef.ASTYPE
_dtype_values = AnyField('dtype_values')
_errors = StringField('errors')
_category_cols = ListField('category_cols')
def __init__(self, dtype_values=None, copy=None, errors=None,
category_cols=None, object_type=None, **kw):
super().__init__(_dtype_values=dtype_values,
_errors=errors, _category_cols=category_cols,
_object_type=object_type, **kw)
@property
def dtype_values(self):
return self._dtype_values
@property
def errors(self):
return self._errors
@property
def category_cols(self):
return self._category_cols
@classmethod
def _tile_one_chunk(cls, op):
c = op.inputs[0].chunks[0]
chunk_op = op.copy().reset_key()
chunk_params = op.outputs[0].params.copy()
chunk_params['index'] = c.index
out_chunks = [chunk_op.new_chunk([c], **chunk_params)]
new_op = op.copy()
return new_op.new_tileables(op.inputs, nsplits=op.inputs[0].nsplits,
chunks=out_chunks, **op.outputs[0].params.copy())
@classmethod
def _tile_series(cls, op):
in_series = op.inputs[0]
out = op.outputs[0]
unique_chunk = None
if op.dtype_values == 'category' and isinstance(op.dtype_values, str):
unique_chunk = recursive_tile(sort(in_series.unique())).chunks[0]
chunks = []
for c in in_series.chunks:
chunk_op = op.copy().reset_key()
params = c.params.copy()
params['dtype'] = out.dtype
if unique_chunk is not None:
chunk_op._category_cols = [in_series.name]
new_chunk = chunk_op.new_chunk([c, unique_chunk], **params)
else:
new_chunk = chunk_op.new_chunk([c], **params)
chunks.append(new_chunk)
new_op = op.copy()
return new_op.new_seriess(op.inputs, nsplits=in_series.nsplits,
chunks=chunks, **out.params.copy())
@classmethod
def _tile_dataframe(cls, op):
in_df = op.inputs[0]
out = op.outputs[0]
cum_nsplits = np.cumsum((0,) + in_df.nsplits[1])
out_chunks = []
if op.dtype_values == 'category':
# all columns need unique values
for c in in_df.chunks:
chunk_op = op.copy().reset_key()
params = c.params.copy()
dtypes = out.dtypes[cum_nsplits[c.index[1]]: cum_nsplits[c.index[1] + 1]]
params['dtypes'] = dtypes
chunk_op._category_cols = list(c.columns_value.to_pandas())
unique_chunks = []
for col in c.columns_value.to_pandas():
unique_chunks.append(recursive_tile(sort(in_df[col].unique())).chunks[0])
new_chunk = chunk_op.new_chunk([c] + unique_chunks, **params)
out_chunks.append(new_chunk)
elif isinstance(op.dtype_values, dict) and 'category' in op.dtype_values.values():
# some columns' types are category
category_cols = [c for c, v in op.dtype_values.items()
if isinstance(v, str) and v == 'category']
unique_chunks = dict((col, recursive_tile(sort(in_df[col].unique())).chunks[0])
for col in category_cols)
for c in in_df.chunks:
chunk_op = op.copy().reset_key()
params = c.params.copy()
dtypes = out.dtypes[cum_nsplits[c.index[1]]: cum_nsplits[c.index[1] + 1]]
params['dtypes'] = dtypes
chunk_category_cols = []
chunk_unique_chunks = []
for col in c.columns_value.to_pandas():
if col in category_cols:
chunk_category_cols.append(col)
chunk_unique_chunks.append(unique_chunks[col])
chunk_op._category_cols = chunk_category_cols
new_chunk = chunk_op.new_chunk([c] + chunk_unique_chunks, **params)
out_chunks.append(new_chunk)
else:
for c in in_df.chunks:
chunk_op = op.copy().reset_key()
params = c.params.copy()
dtypes = out.dtypes[cum_nsplits[c.index[1]]: cum_nsplits[c.index[1] + 1]]
params['dtypes'] = dtypes
new_chunk = chunk_op.new_chunk([c], **params)
out_chunks.append(new_chunk)
new_op = op.copy()
return new_op.new_dataframes(op.inputs, nsplits=in_df.nsplits,
chunks=out_chunks, **out.params.copy())
@classmethod
def tile(cls, op):
if len(op.inputs[0].chunks) == 1:
return cls._tile_one_chunk(op)
elif isinstance(op.inputs[0], SERIES_TYPE):
return cls._tile_series(op)
else:
return cls._tile_dataframe(op)
@classmethod
def execute(cls, ctx, op):
in_data = ctx[op.inputs[0].key]
if not isinstance(op.dtype_values, dict):
if op.category_cols is not None:
uniques = [ctx[c.key] for c in op.inputs[1:]]
dtype = dict((col, CategoricalDtype(unique_values)) for
col, unique_values in zip(op.category_cols, uniques))
ctx[op.outputs[0].key] = in_data.astype(dtype, errors=op.errors)
else:
ctx[op.outputs[0].key] = in_data.astype(op.dtype_values, errors=op.errors)
else:
selected_dtype = dict((k, v) for k, v in op.dtype_values.items()
if k in in_data.columns)
if op.category_cols is not None:
uniques = [ctx[c.key] for c in op.inputs[1:]]
for col, unique_values in zip(op.category_cols, uniques):
selected_dtype[col] = CategoricalDtype(unique_values)
ctx[op.outputs[0].key] = in_data.astype(selected_dtype, errors=op.errors)
def __call__(self, df):
if isinstance(df, SERIES_TYPE):
self._object_type = ObjectType.series
empty_series = build_empty_series(df.dtype)
new_series = empty_series.astype(self.dtype_values, errors=self.errors)
if new_series.dtype != df.dtype:
dtype = CategoricalDtype() if isinstance(
new_series.dtype, CategoricalDtype) else new_series.dtype
else: # pragma: no cover
dtype = df.dtype
return self.new_series([df], shape=df.shape, dtype=dtype,
name=df.name, index_value=df.index_value)
else:
self._object_type = ObjectType.dataframe
empty_df = build_empty_df(df.dtypes)
new_df = empty_df.astype(self.dtype_values, errors=self.errors)
dtypes = []
for dt, new_dt in zip(df.dtypes, new_df.dtypes):
if new_dt != dt and isinstance(new_dt, CategoricalDtype):
dtypes.append(CategoricalDtype())
else:
dtypes.append(new_dt)
dtypes = pd.Series(dtypes, index=new_df.dtypes.index)
return self.new_dataframe([df], shape=df.shape, dtypes=dtypes,
index_value=df.index_value,
columns_value=df.columns_value)
def astype(df, dtype, copy=True, errors='raise'):
"""
Cast a pandas object to a specified dtype ``dtype``.
Parameters
----------
dtype : data type, or dict of column name -> data type
Use a numpy.dtype or Python type to cast entire pandas object to
the same type. Alternatively, use {col: dtype, ...}, where col is a
column label and dtype is a numpy.dtype or Python type to cast one
or more of the DataFrame's columns to column-specific types.
copy : bool, default True
Return a copy when ``copy=True`` (be very careful setting
``copy=False`` as changes to values then may propagate to other
pandas objects).
errors : {'raise', 'ignore'}, default 'raise'
Control raising of exceptions on invalid data for provided dtype.
- ``raise`` : allow exceptions to be raised
- ``ignore`` : suppress exceptions. On error return original object.
Returns
-------
casted : same type as caller
See Also
--------
to_datetime : Convert argument to datetime.
to_timedelta : Convert argument to timedelta.
to_numeric : Convert argument to a numeric type.
numpy.ndarray.astype : Cast a numpy array to a specified type.
Examples
--------
Create a DataFrame:
>>> import mars.dataframe as md
>>> df = md.DataFrame(pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]}))
>>> df.dtypes
col1 int64
col2 int64
dtype: object
Cast all columns to int32:
>>> df.astype('int32').dtypes
col1 int32
col2 int32
dtype: object
Cast col1 to int32 using a dictionary:
>>> df.astype({'col1': 'int32'}).dtypes
col1 int32
col2 int64
dtype: object
Create a series:
>>> ser = md.Series(pd.Series([1, 2], dtype='int32'))
>>> ser.execute()
0 1
1 2
dtype: int32
>>> ser.astype('int64').execute()
0 1
1 2
dtype: int64
Convert to categorical type:
>>> ser.astype('category').execute()
0 1
1 2
dtype: category
Categories (2, int64): [1, 2]
Convert to ordered categorical type with custom ordering:
>>> cat_dtype = pd.api.types.CategoricalDtype(
... categories=[2, 1], ordered=True)
>>> ser.astype(cat_dtype).execute()
0 1
1 2
dtype: category
Categories (2, int64): [2 < 1]
Note that using ``copy=False`` and changing data on a new
pandas object may propagate changes:
>>> s1 = md.Series(pd.Series([1, 2]))
>>> s2 = s1.astype('int64', copy=False)
>>> s1.execute() # note that s1[0] has changed too
0 1
1 2
dtype: int64
"""
if isinstance(dtype, dict):
keys = list(dtype.keys())
if isinstance(df, SERIES_TYPE):
if len(keys) != 1 or keys[0] != df.name:
raise KeyError('Only the Series name can be used for the key in Series dtype mappings.')
else:
dtype = list(dtype.values())[0]
else:
for k in keys:
columns = df.columns_value.to_pandas()
if k not in columns:
raise KeyError('Only a column name can be used for the key in a dtype mappings argument.')
op = DataFrameAstype(dtype_values=dtype, errors=errors)
r = op(df)
if not copy:
df.data = r.data
return df
else:
return r
| 37.943218
| 110
| 0.587629
|
125499d1b492f52653e97ff9af0a87245a597744
| 26,887
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_09_01/operations/_virtual_networks_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-03-24T06:26:11.000Z
|
2021-04-18T15:55:59.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_09_01/operations/_virtual_networks_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 4
|
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_09_01/operations/_virtual_networks_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworksOperations(object):
"""VirtualNetworksOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetwork"
"""Gets the specified virtual network by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetwork, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_09_01.models.VirtualNetwork
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
parameters, # type: "_models.VirtualNetwork"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetwork"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetwork')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
parameters, # type: "_models.VirtualNetwork"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetwork"]
"""Creates or updates a virtual network in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param parameters: Parameters supplied to the create or update virtual network operation.
:type parameters: ~azure.mgmt.network.v2016_09_01.models.VirtualNetwork
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetwork or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2016_09_01.models.VirtualNetwork]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkListResult"]
"""Gets all virtual networks in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2016_09_01.models.VirtualNetworkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualNetworks'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkListResult"]
"""Gets all virtual networks in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2016_09_01.models.VirtualNetworkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks'} # type: ignore
def check_ip_address_availability(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
ip_address=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.IPAddressAvailabilityResult"
"""Checks whether a private IP address is available for use.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param ip_address: The private IP address to be verified.
:type ip_address: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IPAddressAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_09_01.models.IPAddressAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IPAddressAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self.check_ip_address_availability.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if ip_address is not None:
query_parameters['ipAddress'] = self._serialize.query("ip_address", ip_address, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('IPAddressAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_ip_address_availability.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/CheckIPAddressAvailability'} # type: ignore
| 48.620253
| 230
| 0.662774
|
4f58841e546993f965038dec2306af4284050f45
| 901
|
py
|
Python
|
examples/centerize.py
|
penguinflys/imgviz
|
3deadced1fcce8ca51716c705d07a058b1839514
|
[
"MIT"
] | 171
|
2018-12-28T23:40:01.000Z
|
2022-03-29T14:55:27.000Z
|
examples/centerize.py
|
penguinflys/imgviz
|
3deadced1fcce8ca51716c705d07a058b1839514
|
[
"MIT"
] | 16
|
2018-12-29T16:21:15.000Z
|
2022-03-09T15:36:06.000Z
|
examples/centerize.py
|
penguinflys/imgviz
|
3deadced1fcce8ca51716c705d07a058b1839514
|
[
"MIT"
] | 23
|
2018-12-29T13:11:18.000Z
|
2022-02-06T15:18:42.000Z
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import imgviz
def centerize():
data = imgviz.data.arc2017()
rgb = data["rgb"]
H, W = rgb.shape[:2]
centerized1 = imgviz.centerize(rgb, shape=(H, H))
rgb_T = rgb.transpose(1, 0, 2)
centerized2 = imgviz.centerize(rgb_T, shape=(H, H))
# -------------------------------------------------------------------------
plt.figure(dpi=200)
plt.subplot(131)
plt.title("original")
plt.axis("off")
plt.imshow(rgb)
plt.subplot(132)
plt.title("centerized1:\n{}".format(centerized1.shape))
plt.imshow(centerized1)
plt.axis("off")
plt.subplot(133)
plt.title("centerized2:\n{}".format(centerized2.shape))
plt.imshow(centerized2)
plt.axis("off")
return imgviz.io.pyplot_to_numpy()
if __name__ == "__main__":
from base import run_example
run_example(centerize)
| 20.022222
| 79
| 0.588235
|
0bc6164cf6aa4cdcc0bdd2265556a07b34572ac5
| 9,195
|
py
|
Python
|
userbot/__init__.py
|
Rizkipratama183/OpenUserBot
|
896c19686dd8ced2ec2e0faaad4d5dd41a53d707
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2020-05-18T00:17:28.000Z
|
2020-05-18T00:17:28.000Z
|
userbot/__init__.py
|
Rizkipratama183/OpenUserBot
|
896c19686dd8ced2ec2e0faaad4d5dd41a53d707
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 2
|
2020-05-19T13:01:21.000Z
|
2020-05-19T20:46:27.000Z
|
userbot/__init__.py
|
Rizkipratama183/OpenUserBot
|
896c19686dd8ced2ec2e0faaad4d5dd41a53d707
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.c (the "License");
# you may not use this file except in compliance with the License.
#
# thanks to penn5 for bug fixing
""" Userbot initialization. """
import os
from sys import version_info
from logging import basicConfig, getLogger, INFO, DEBUG
from distutils.util import strtobool as sb
from pymongo import MongoClient
from redis import StrictRedis
from pylast import LastFMNetwork, md5
from pySmartDL import SmartDL
from dotenv import load_dotenv
from requests import get
from telethon import TelegramClient
from telethon.sessions import StringSession
load_dotenv("config.env")
# Bot Logs setup:
CONSOLE_LOGGER_VERBOSE = sb(os.environ.get("CONSOLE_LOGGER_VERBOSE", "False"))
if CONSOLE_LOGGER_VERBOSE:
basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=DEBUG,
)
else:
basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=INFO)
LOGS = getLogger(__name__)
if version_info[0] < 3 or version_info[1] < 8:
LOGS.info("You MUST have a python version of at least 3.8."
"Multiple features depend on this. Bot quitting.")
quit(1)
# Check if the config was edited by using the already used variable.
# Basically, its the 'virginity check' for the config file ;)
CONFIG_CHECK = os.environ.get(
"", None)
if CONFIG_CHECK:
LOGS.info(
"Please remove the line mentioned in the first hashtag from the config.env file"
)
quit(1)
#Quotes API Token
QUOTES_API_TOKEN =os.environ.get("QOUTES_API_TOKEN", "21958215-520f-4460-9b05-5751920f67a5")
# Telegram App KEY and HASH
API_KEY = os.environ.get("API_KEY", "1287613")
API_HASH = os.environ.get("API_HASH", "b31151a1dd663e9538b79a5afdf5da07")
# Photo Chat - Get this value from http://antiddos.systems
API_TOKEN = os.environ.get("API_TOKEN", "21958215-520f-4460-9b05-5751920f67a5")
API_URL = os.environ.get("API_URL", "http://antiddos.systems")
# Userbot Session String
STRING_SESSION = os.environ.get("STRING_SESSION", "1BVtsOHgBu24AAhATiCBpTWK9hgPK11Mu4Hbdrdd66SPrp3R50yebePiUc0f246sMrUJg-q8fP_I7sGFGC85ShJgU0ojhjb6-0WSFMP84hpF6iSSMZx5axU9DvOMjtyz6CT4JyOAbfZ-nLRxJOg57EVXZazhvt7L59CZZU4hqLThQvNK0XnrAhcEZUgdtsg7Iph99-UAhdy5p98Cm4f8P94e6GNXCbKJ9wefEodrXzza76PwRu3fbB0hm_1fk-P85YKXFXOhNPiPvGFKQsq9a7tauOu5YulXHnxRi2k9vqinsXQCcs0vomLRxma3aZk6cHyBcxinV0odMtVRgjbjGVtf7ZkHdXDk=")
# Logging channel/group ID configuration.
BOTLOG_CHATID = int(os.environ.get("BOTLOG_CHATID", "-492505987"))
# Userbot logging feature switch.
BOTLOG = sb(os.environ.get("BOTLOG", "True"))
LOGSPAMMER = sb(os.environ.get("LOGSPAMMER", "False"))
# Bleep Blop, this is a bot ;)
PM_AUTO_BAN = sb(os.environ.get("PM_AUTO_BAN", "False"))
# Heroku Credentials for updater.
HEROKU_MEMEZ = sb(os.environ.get("HEROKU_MEMEZ", "False"))
HEROKU_APP_NAME = os.environ.get("HEROKU_APP_NAME", None)
HEROKU_API_KEY = os.environ.get("HEROKU_API_KEY", "7ca04981-b77a-407f-a580-627af0282177")
# Github Credentials for updater and Gitupload.
GIT_REPO_NAME = os.environ.get("GIT_REPO_NAME", "OpenUserBot")
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", "ee3478dffc9ea26e91db89799962ef6ed4de1eeb")
# Custom (forked) repo URL for updater.
UPSTREAM_REPO_URL = os.environ.get("UPSTREAM_REPO_URL", "https://github.com/Rizkipratama183/OpenUserBot.git")
UPSTREAM_REPO_BRANCH = os.environ.get("UPSTREAM_REPO_BRANCH", "https://github.com/mkaraniya/OpenUserBot.git")
# Console verbose logging
CONSOLE_LOGGER_VERBOSE = sb(os.environ.get("CONSOLE_LOGGER_VERBOSE", "False"))
# SQL Database URI
DB_URI = os.environ.get("DATABASE_URL", None)
# For MONGO based DataBase
MONGO_URI = os.environ.get("MONGO_URI", None)
# OCR API key
OCR_SPACE_API_KEY = os.environ.get("OCR_SPACE_API_KEY", "69a15874ea88957")
# remove.bg API key
REM_BG_API_KEY = os.environ.get("REM_BG_API_KEY", None)
# Chrome Driver and Headless Google Chrome Binaries
CHROME_DRIVER = os.environ.get("CHROME_DRIVER", None)
GOOGLE_CHROME_BIN = os.environ.get("GOOGLE_CHROME_BIN", None)
# OpenWeatherMap API Key
OPEN_WEATHER_MAP_APPID = os.environ.get("OPEN_WEATHER_MAP_APPID", "9857d9956ac29abdd218b58359655143")
WEATHER_DEFCITY = os.environ.get("WEATHER_DEFCITY", "Indonesia/Jakarta")
# Lydia API
LYDIA_API_KEY = os.environ.get("LYDIA_API_KEY", None)
# set blacklist_chats where you do not want userbot's features
UB_BLACK_LIST_CHAT = os.environ.get("UB_BLACK_LIST_CHAT", "")
# Telegraph
TELEGRAPH_SHORT_NAME = os.environ.get("TELEGRAPH_SHORT_NAME", "♤■Clown_Cyber■♧")
# Anti Spambot Config
ANTI_SPAMBOT = sb(os.environ.get("ANTI_SPAMBOT", "True"))
ANTI_SPAMBOT_SHOUT = sb(os.environ.get("ANTI_SPAMBOT_SHOUT", "True"))
# Youtube API key
YOUTUBE_API_KEY = os.environ.get("YOUTUBE_API_KEY", None)
# Default .alive name
ALIVE_NAME = os.environ.get("ALIVE_NAME", "Clown Cyber")
# Time & Date - Country and Time Zone
COUNTRY = str(os.environ.get("COUNTRY", "Indonesia"))
TZ_NUMBER = int(os.environ.get("TZ_NUMBER", 1))
TERM_ALIAS = os.environ.get("TERM_ALIAS", "OUB")
# Clean Welcome
CLEAN_WELCOME = sb(os.environ.get("CLEAN_WELCOME", "True"))
# Last.fm Module
BIO_PREFIX = os.environ.get("BIO_PREFIX", None)
DEFAULT_BIO = os.environ.get("DEFAULT_BIO", None)
LASTFM_API = os.environ.get("LASTFM_API", None)
LASTFM_SECRET = os.environ.get("LASTFM_SECRET", None)
LASTFM_USERNAME = os.environ.get("LASTFM_USERNAME", None)
LASTFM_PASSWORD_PLAIN = os.environ.get("LASTFM_PASSWORD", None)
LASTFM_PASS = md5(LASTFM_PASSWORD_PLAIN)
if LASTFM_API and LASTFM_SECRET and LASTFM_USERNAME and LASTFM_PASS:
lastfm = LastFMNetwork(api_key=LASTFM_API,
api_secret=LASTFM_SECRET,
username=LASTFM_USERNAME,
password_hash=LASTFM_PASS)
else:
lastfm = None
# Google Drive Module
G_DRIVE_CLIENT_ID = os.environ.get("G_DRIVE_CLIENT_ID", None)
G_DRIVE_CLIENT_SECRET = os.environ.get("G_DRIVE_CLIENT_SECRET", None)
G_DRIVE_AUTH_TOKEN_DATA = os.environ.get("G_DRIVE_AUTH_TOKEN_DATA", None)
GDRIVE_FOLDER_ID = os.environ.get("GDRIVE_FOLDER_ID", None)
TEMP_DOWNLOAD_DIRECTORY = os.environ.get("TMP_DOWNLOAD_DIRECTORY",
"./downloads")
# Genius lyrics get this value from https://genius.com/developers both has same values
GENIUS_API_TOKEN = os.environ.get("GENIUS", True)
# Genius lyrics get this value from https://genius.com/developers both has same values
GENIUS = os.environ.get("GENIUS_API_TOKEN", "LV3R5X9024-uD3UGzw_7Z090QUx43EPa_dEUpUj26F5oEw7598stRAD6GvSO2z2U")
# Init Mongo
MONGOCLIENT = MongoClient(MONGO_URI, 27017, serverSelectionTimeoutMS=1)
MONGO = MONGOCLIENT.userbot
def is_mongo_alive():
try:
MONGOCLIENT.server_info()
except BaseException:
return False
return True
# Init Redis
# Redis will be hosted inside the docker container that hosts the bot
# We need redis for just caching, so we just leave it to non-persistent
REDIS = StrictRedis(host='localhost', port=6379, db=0)
def is_redis_alive():
try:
REDIS.ping()
return True
except BaseException:
return False
# Setting Up CloudMail.ru and MEGA.nz extractor binaries,
# and giving them correct perms to work properly.
if not os.path.exists('bin'):
os.mkdir('bin')
binaries = {
"https://raw.githubusercontent.com/adekmaulana/megadown/master/megadown":
"bin/megadown",
"https://raw.githubusercontent.com/yshalsager/cmrudl.py/master/cmrudl.py":
"bin/cmrudl"
}
for binary, path in binaries.items():
downloader = SmartDL(binary, path, progress_bar=False)
downloader.start()
os.chmod(path, 0o755)
# 'bot' variable
if STRING_SESSION:
# pylint: disable=invalid-name
bot = TelegramClient(StringSession(STRING_SESSION), API_KEY, API_HASH)
else:
# pylint: disable=invalid-name
bot = TelegramClient("userbot", API_KEY, API_HASH)
async def check_botlog_chatid():
if not BOTLOG_CHATID and LOGSPAMMER:
LOGS.info(
"You must set up the BOTLOG_CHATID variable in the config.env or environment variables, for the private error log storage to work."
)
quit(1)
elif not BOTLOG_CHATID and BOTLOG:
LOGS.info(
"You must set up the BOTLOG_CHATID variable in the config.env or environment variables, for the userbot logging feature to work."
)
quit(1)
elif not BOTLOG or not LOGSPAMMER:
return
entity = await bot.get_entity(BOTLOG_CHATID)
if entity.default_banned_rights.send_messages:
LOGS.info(
"Your account doesn't have rights to send messages to BOTLOG_CHATID "
"group. Check if you typed the Chat ID correctly.")
quit(1)
with bot:
try:
bot.loop.run_until_complete(check_botlog_chatid())
except:
LOGS.info(
"BOTLOG_CHATID environment variable isn't a "
"valid entity. Check your environment variables/config.env file.")
quit(1)
# Global Variables
COUNT_MSG = 0
USERS = {}
COUNT_PM = {}
LASTMSG = {}
ENABLE_KILLME = True
CMD_HELP = {}
ISAFK = False
AFKREASON = None
| 34.567669
| 406
| 0.736378
|
0a7922b9cf2a8d5632f3e4f6933a9a68f500bdb0
| 3,907
|
py
|
Python
|
dataset/datasets.py
|
ruiming46zrm/CurricularFace
|
d853f3d28659f0929469029ec80e29e91e7b24c1
|
[
"MIT"
] | 433
|
2020-04-02T04:24:50.000Z
|
2022-03-21T12:57:53.000Z
|
dataset/datasets.py
|
clscy/CurricularFace
|
d853f3d28659f0929469029ec80e29e91e7b24c1
|
[
"MIT"
] | 35
|
2020-04-09T02:13:52.000Z
|
2022-03-07T07:48:10.000Z
|
dataset/datasets.py
|
clscy/CurricularFace
|
d853f3d28659f0929469029ec80e29e91e7b24c1
|
[
"MIT"
] | 72
|
2020-04-02T21:57:37.000Z
|
2022-01-10T02:50:33.000Z
|
import numpy as np
import torch
from PIL import Image
from torch.utils.data import Dataset
import os
from collections import defaultdict
class ImageDataset(Dataset):
def __init__(self, root_dir, transform):
super(ImageDataset, self).__init__()
self.transform = transform
self.root_dir = root_dir
classes, class_to_idx = self._find_classes(self.root_dir)
samples, label_to_indexes = self._make_dataset(self.root_dir, class_to_idx)
print('samples num', len(samples))
self.samples = samples
self.class_to_idx = class_to_idx
self.label_to_indexes = label_to_indexes
self.classes = sorted(self.label_to_indexes.keys())
print('class num', len(self.classes))
def _find_classes(self, dir):
classes = [d.name for d in os.scandir(dir) if d.is_dir()]
classes.sort()
class_to_idx = {classes[i]: i for i in range(len(classes))}
return classes, class_to_idx
def _make_dataset(self, root_dir, class_to_idx):
root_dir = os.path.expanduser(root_dir)
images = []
label2index = defaultdict(list)
image_index = 0
for target in sorted(class_to_idx.keys()):
d = os.path.join(root_dir, target)
if not os.path.isdir(d):
continue
for root, _, fnames in sorted(os.walk(d)):
for fname in sorted(fnames):
path = os.path.join(root, fname)
item = (path, class_to_idx[target])
images.append(item)
label2index[class_to_idx[target]].append(image_index)
image_index += 1
return images, label2index
def __getitem__(self, index):
path, target = self.samples[index]
sample = Image.open(path)
if self.transform is not None:
sample = self.transform(sample)
return sample, target
def __len__(self):
return len(self.samples)
def read_samples_from_record(root_dir, record_dir, Train):
samples = []
classes = set()
names = []
label2index = defaultdict(list)
with open(record_dir, "r") as f:
for index, line in enumerate(f):
line = line.split()
if Train and len(line) < 2:
print('Error, Label is missing')
exit()
elif len(line) == 1:
image_dir = line[0]
label = 0
else:
image_dir, label = line[0], line[1]
label = int(label)
names.append(image_dir)
image_dir = os.path.join(root_dir, image_dir)
samples.append((image_dir, label))
classes.add(label)
label2index[label].append(index)
return samples, classes, names, label2index
class FaceDataset(Dataset):
def __init__(self, root_dir, record_dir, transform, Train=True):
super(FaceDataset, self).__init__()
self.transform = transform
self.root_dir = root_dir
self.train = Train
self.imgs, self.classes, self.names, self.label_to_indexes = read_samples_from_record(root_dir, record_dir, Train=Train)
print("Number of Sampels:{} Number of Classes: {}".format(len(self.imgs), len(self.classes)))
def __getitem__(self, index):
path, target = self.imgs[index]
sample = Image.open(path)
sample = sample.convert("RGB")
if self.transform is not None:
sample = self.transform(sample)
if self.train:
return sample, target
else:
return sample, target, self.names[index]
def __len__(self):
return len(self.imgs)
def get_sample_num_of_each_class(self):
sample_num = []
for label in self.classes:
sample_num.append(len(self.label_to_indexes[label]))
return sample_num
| 35.198198
| 128
| 0.602252
|
4343906b42accb20dabd6fbbcacdfe128c509e37
| 4,472
|
py
|
Python
|
naolution/utils/cnn.py
|
kostelansky17/NAOlution
|
60101858c233e4f0e83d38c4f8733c71187a1639
|
[
"MIT"
] | null | null | null |
naolution/utils/cnn.py
|
kostelansky17/NAOlution
|
60101858c233e4f0e83d38c4f8733c71187a1639
|
[
"MIT"
] | null | null | null |
naolution/utils/cnn.py
|
kostelansky17/NAOlution
|
60101858c233e4f0e83d38c4f8733c71187a1639
|
[
"MIT"
] | null | null | null |
import numpy as np
from keras import backend as K
from keras import layers
from keras.models import Sequential
from keras.preprocessing import image as keras_image
from keras.activations import relu, tanh
from keras.initializers import random_normal
from keras.layers.core import Dense, Flatten, Dropout
from keras.layers.convolutional import Conv2D, MaxPooling2D
from sklearn.utils import shuffle
"""
Creates Convolutional Neural Network with randomly initialized weights
@return model: keras.model.Sequential
"""
def create_cnn():
#Input shape - image 128x128 pixels in grayscale
input_shape = (128,128,1)
model = Sequential()
model.add(MaxPooling2D((2, 2), input_shape = input_shape))
model.add(Conv2D(4, (4, 4), kernel_initializer= 'random_normal', bias_initializer='random_normal'))
model.add(Conv2D(4, (4, 4), kernel_initializer= 'random_normal', bias_initializer='random_normal'))
model.add(MaxPooling2D((2, 2)))
model.add(Conv2D(8, (4, 4), kernel_initializer= 'random_normal', bias_initializer='random_normal'))
model.add(Conv2D(8, (4, 4), kernel_initializer= 'random_normal', bias_initializer='random_normal'))
model.add(MaxPooling2D((2, 2)))
model.add(Flatten())
model.add(Dropout(0.25))
model.add(Dense(16, activation = relu, kernel_initializer= 'random_normal', bias_initializer='random_normal'))
model.add(Dense(16, activation = relu, kernel_initializer= 'random_normal', bias_initializer='random_normal'))
model.add(Dense(3, activation = tanh, kernel_initializer= 'random_normal', bias_initializer='random_normal'))
return model
"""
Creates list of CNNs generated by create_cnn().
@param size: int (number of CNNs)
@return cnn_list: list (list of CNNs)
"""
def create_list_cnn(number):
cnn_list = []
for i in range(number):
cnn_list.append(create_cnn())
return cnn_list
"""
Loads and preprocess image to input shape for CNN created by cnn.py
@param img_path: String (path to image)
@return img: image in desired shape
"""
def preprocess_img_from_path(img_path):
loaded_img = keras_image.load_img(img_path, color_mode = 'grayscale', target_size=(128, 128))
array_img = keras_image.img_to_array(loaded_img)
img = np.expand_dims(array_img, axis=0)
return img
"""
Creates dummmy Convolutional Neural Network with randomly initialized weights
@return model: keras.model.Sequential
"""
def create_dummy_model():
input_shape = (128,128,1)
model = Sequential()
model.add(MaxPooling2D((2, 2), input_shape = input_shape))
model.add(Conv2D(1, (2, 2), kernel_initializer= 'random_normal', bias_initializer='random_normal'))
model.add(Flatten())
model.add(Dense(2, activation = relu, kernel_initializer= 'random_normal', bias_initializer='random_normal'))
return model
"""
Crossing two Keras sequential models
@param model_A: First model
@param model_B: Second model
@return img: image in desired shape
"""
def mix_two_models(model_A, model_B):
model_C = create_cnn()
for layer_A, layer_B, layer_C in zip(model_A.layers, model_B.layers, model_C.layers):
new_layer = list()
for array_A, array_B in zip(layer_A.get_weights(), layer_B.get_weights()):
choice = np.random.randint(2, size = array_A.size).reshape(array_A.shape).astype(bool)
array_C = np.where(choice, array_A, array_B)
new_layer.append(array_C)
layer_C.set_weights(new_layer)
return model_C
"""
Functionality testing created while developent
"""
if __name__ == "__main__":
individual_A = create_cnn()
individual_B = create_cnn()
individual_C = create_cnn()
for layer_A, layer_B, layer_C in zip(individual_A.layers, individual_B.layers, individual_C.layers):
print("LAYER:")
print(layer_A.get_weights())
print(type(layer_A.get_weights()))
ml = list()
for array_A, array_B in zip(layer_A.get_weights(), layer_B.get_weights()):
print("ARR_A")
print(array_A)
print(array_A.shape)
print("ARR_B")
print(array_B)
print(array_B.shape)
choice = np.random.randint(2, size = array_A.size).reshape(array_A.shape).astype(bool)
res = np.where(choice, array_A, array_B)
print("SSSS")
print(res)
print(res.shape)
ml.append(res)
layer_C.set_weights(ml)
| 32.642336
| 114
| 0.695886
|
7a567e3282ff44c354f9f0236598d852bef6000b
| 373
|
py
|
Python
|
lec2.py
|
Caleb0929/IA241
|
2fed3e8d0f12bb8180a3e53beed036949cd9eaa0
|
[
"MIT"
] | null | null | null |
lec2.py
|
Caleb0929/IA241
|
2fed3e8d0f12bb8180a3e53beed036949cd9eaa0
|
[
"MIT"
] | null | null | null |
lec2.py
|
Caleb0929/IA241
|
2fed3e8d0f12bb8180a3e53beed036949cd9eaa0
|
[
"MIT"
] | null | null | null |
"""
this is a regional comment
"""
#print(" hello world ") # this is a single line comment
#print( type("123.") )
#print("It's our second python class")
#print("Hello" + " World")
my_str = 'hello world'
print(my_str)
my_str = 'second str'
print(my_str)
my_int = 2
my_float = 2.0
print(my_int + 3)
print(my_int * 3)
print(my_int ** 3)
print(my_int + my_float)
| 12.032258
| 55
| 0.646113
|
3734437b152ba33d8b52edec0724174e259d179d
| 3,584
|
py
|
Python
|
imperative/python/megengine/tools/accuracy_shake_var_tree.py
|
Olalaye/MegEngine
|
695d24f24517536e6544b07936d189dbc031bbce
|
[
"Apache-2.0"
] | 5,168
|
2020-03-19T06:10:04.000Z
|
2022-03-31T11:11:54.000Z
|
imperative/python/megengine/tools/accuracy_shake_var_tree.py
|
Olalaye/MegEngine
|
695d24f24517536e6544b07936d189dbc031bbce
|
[
"Apache-2.0"
] | 286
|
2020-03-25T01:36:23.000Z
|
2022-03-31T10:26:33.000Z
|
imperative/python/megengine/tools/accuracy_shake_var_tree.py
|
Olalaye/MegEngine
|
695d24f24517536e6544b07936d189dbc031bbce
|
[
"Apache-2.0"
] | 515
|
2020-03-19T06:10:05.000Z
|
2022-03-30T09:15:59.000Z
|
#!/usr/bin/env python3
# -*-coding=utf-8-*-
# This tool is used to analyze the file generated by compare_binary_iodump.py.
# parse() can build a dependency tree with those varnodes
# where accuracy shake occurs and show the root varnodes.
# get_varNode()/get_dependence_list()/get_reference_list()/show_src_info()
# are some functions which are used to query dependencies between varnodes.
import argparse
import os
class varNode:
var_node_dict = {}
var_node_root_dict = {}
def __init__(self, id, dependence_list, src_info):
self.src_info = src_info
if not id in varNode.var_node_dict.keys():
self.id = id
self.dependence_list = []
self.reference_list = []
else:
self = varNode.var_node_dict[id]
if dependence_list:
self.vitrual = False
self.is_root = True
else:
self.vitrual = True
self.is_root = False
for i in dependence_list:
if not i in varNode.var_node_dict.keys():
varNode.var_node_dict[i] = varNode(i, [], "")
dv = varNode.var_node_dict[i]
self.dependence_list.append(dv)
if not dv.vitrual:
self.is_root = False
dv.reference_list.append(self)
for i in self.reference_list:
i.is_root = False
varNode.var_node_root_dict.pop[i.id]
if self.is_root:
varNode.var_node_root_dict[id] = self
varNode.var_node_dict[id] = self
@staticmethod
def get_varNode(id):
return varNode.var_node_dict[id]
def get_dependence_list(self):
return self.dependence_list
def get_reference_list(self):
return self.reference_list
def show_src_info(self):
print(self.src_info)
def get_dependence(string, src_info):
start1 = "id:"
end1 = ","
e = 0
count = string.count(start1)
dependence_list = []
for x in range(0, count):
s = string.find(start1, e)
e = string.find(end1, s)
sub_str = string[s:e]
if x == 0:
var = sub_str
else:
dependence_list.append(sub_str)
varNode(var, dependence_list, src_info)
def parse(filename):
with open(filename) as f:
varNode.var_node_dict.clear()
varNode.var_node_root_dict.clear()
line = f.readline()
s = ["", "", ""]
idx = 1
while line:
if line.find("not equal: ") != -1:
s[2] = line
src_info = s[0] + "\n" + s[1] + "\n" + s[2]
get_dependence(s[0], src_info)
else:
if line.find("var={id:") != -1:
idx = idx ^ 1
s[idx] = ""
s[idx] = s[idx] + line.strip()
line = f.readline()
return varNode.var_node_root_dict
def main():
parser = argparse.ArgumentParser(
description=(
"Analyze the outputs of compare_binary_iodump.py"
"Should save the outputs of compare_binary_iodump.py"
"as a file"
),
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"filename", help="file which save the outputs of compare_binary_iodump.py"
)
args = parser.parse_args()
parse(args.filename)
print("varnode root:")
for key, value in varNode.var_node_root_dict.items():
print(key)
print("detail info:")
value.show_src_info()
if __name__ == "__main__":
main()
| 27.358779
| 82
| 0.577288
|
672808a32edbecea6f4841e38b145440d7c0ecd4
| 1,197
|
py
|
Python
|
dlhub_sdk/utils/schemas.py
|
DLHub-Argonne/dlhub_sdk
|
9449f120490cba40fa43d4ccb06a0d1d7e78f1fd
|
[
"Apache-2.0"
] | 24
|
2018-11-01T12:48:21.000Z
|
2021-12-30T21:19:16.000Z
|
dlhub_sdk/utils/schemas.py
|
DLHub-Argonne/dlhub_sdk
|
9449f120490cba40fa43d4ccb06a0d1d7e78f1fd
|
[
"Apache-2.0"
] | 79
|
2018-11-27T16:41:29.000Z
|
2022-03-25T17:32:09.000Z
|
dlhub_sdk/utils/schemas.py
|
DLHub-Argonne/dlhub_toolbox
|
d8e06ba4247ebd3a955782099a4a9fa68890bea4
|
[
"Apache-2.0"
] | 4
|
2019-02-27T16:23:19.000Z
|
2020-09-19T01:25:30.000Z
|
"""Utilities for validating against DLHub schemas"""
from typing import Union
from jsonschema import Draft7Validator, RefResolver
import requests
from dlhub_sdk.models import BaseMetadataModel
_schema_repo = "https://raw.githubusercontent.com/DLHub-Argonne/dlhub_schemas/master/schemas/"
def validate_against_dlhub_schema(document: Union[dict, BaseMetadataModel], schema_name: str):
"""Validate a metadata document against one of the DLHub schemas
Note: Requires an internet connection
Args:
document: Document instance to be validated
schema_name (string): Name of schema (e.g., "dataset" for validating datasets).
For full list, see: https://github.com/DLHub-Argonne/dlhub_schemas
Raises:
(jsonschema.SchemaError) If the schema fails to validate
"""
# Convert to dictionary, if needed
if isinstance(document, BaseMetadataModel):
document = document.to_dict()
# Make the schema validator
schema = requests.get("{}/{}.json".format(_schema_repo, schema_name)).json()
validator = Draft7Validator(schema, resolver=RefResolver(_schema_repo, schema))
# Test the document
validator.validate(document)
| 34.2
| 94
| 0.736842
|
85724628b3792ced75fd57e833bd34daef742c58
| 475
|
py
|
Python
|
full-stack-angular-ngrx/backend/src/core/services/pg_data_session.py
|
t4d-classes/angular_02212022
|
152dfa4b14ee84c1c34cef0b852349b250103e3b
|
[
"MIT"
] | null | null | null |
full-stack-angular-ngrx/backend/src/core/services/pg_data_session.py
|
t4d-classes/angular_02212022
|
152dfa4b14ee84c1c34cef0b852349b250103e3b
|
[
"MIT"
] | null | null | null |
full-stack-angular-ngrx/backend/src/core/services/pg_data_session.py
|
t4d-classes/angular_02212022
|
152dfa4b14ee84c1c34cef0b852349b250103e3b
|
[
"MIT"
] | null | null | null |
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from src.core.interfaces.data_session import DataSessionInterface
class PgDataSession(DataSessionInterface):
def __init__(self, url: str, name: str, user: str,
password: str):
engine = create_engine(
f'postgresql://{user}:{password}@{url}/{name}')
self.Session = sessionmaker(bind=engine)
def get_session(self):
return self.Session
| 27.941176
| 65
| 0.690526
|
0dd8978b9c29cc5df77b21474df7b16fd0eca5de
| 1,686
|
py
|
Python
|
eng/utils.py
|
Dalloriam/popeui
|
f3477cd546e885bc53e755b3eb1452ce43ef5697
|
[
"MIT"
] | 30
|
2016-08-25T14:47:49.000Z
|
2017-12-20T23:01:03.000Z
|
eng/utils.py
|
dalloriam/engel
|
f3477cd546e885bc53e755b3eb1452ce43ef5697
|
[
"MIT"
] | 25
|
2016-07-18T01:57:07.000Z
|
2016-08-24T18:33:54.000Z
|
eng/utils.py
|
dalloriam/engel
|
f3477cd546e885bc53e755b3eb1452ce43ef5697
|
[
"MIT"
] | 5
|
2016-08-26T12:54:42.000Z
|
2017-09-17T00:08:26.000Z
|
import os
from jinja2 import Template
from eng.logging import error, success
def create_folder(folder_path):
if os.path.isdir(folder_path):
error("Directory {dirName} already exists.".format(dirName=os.path.abspath(folder_path)))
os.mkdir(folder_path)
success(os.path.abspath(folder_path) + "/")
def write_file(file_path, content):
if os.path.isdir(file_path):
error("File {filePath} already exists.").format(filePath=os.path.abspath(file_path))
with open(file_path, "a") as outfile:
outfile.write(content)
success(os.path.abspath(file_path))
def read_template(template):
template_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates")
filename = template + ".template"
template_file = os.path.join(template_dir, filename)
if not os.path.isfile(template_file):
error("Unknown template file ({tmpl}).".format(tmpl=filename))
data = None
with open(template_file, "rU") as infile:
data = infile.read()
return Template(data)
def render_app(app_name):
template = read_template("app")
ccName = app_name.replace('_', ' ').title().replace(' ', '')
return template.render(appCamelCase=ccName)
def render_view(view_name):
template = read_template("view")
view_title = view_name.replace('_', ' ').title().replace(' ', '')
view_camel = view_title + 'View'
return template.render(viewCamelCase=view_camel, viewTitle=view_title)
def render_service(service_name):
template = read_template("service")
svc_camel = service_name.replace('_', ' ').title().replace(' ', '') + 'Service'
return template.render(serviceCamelCase=svc_camel)
| 30.654545
| 97
| 0.69395
|
cc71595b8bbf92207b50b99a748848dac230bf4e
| 500
|
py
|
Python
|
plotly/validators/histogram/marker/colorbar/_tick0.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | 2
|
2020-03-24T11:41:14.000Z
|
2021-01-14T07:59:43.000Z
|
plotly/validators/histogram/marker/colorbar/_tick0.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | null | null | null |
plotly/validators/histogram/marker/colorbar/_tick0.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | 4
|
2019-06-03T14:49:12.000Z
|
2022-01-06T01:05:12.000Z
|
import _plotly_utils.basevalidators
class Tick0Validator(_plotly_utils.basevalidators.AnyValidator):
def __init__(
self,
plotly_name='tick0',
parent_name='histogram.marker.colorbar',
**kwargs
):
super(Tick0Validator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='colorbars',
implied_edits={'tickmode': 'linear'},
role='style',
**kwargs
)
| 25
| 64
| 0.594
|
dc6d551743c6ad44bc972f07050291cf490441bb
| 374
|
py
|
Python
|
Catkin_PKG_Car/build/race_robot/catkin_generated/pkg.installspace.context.pc.py
|
jessecha/OPCAS
|
2b51543b4ad1ee37dba2e45a0c7d0b872309d418
|
[
"MIT"
] | 1
|
2021-02-28T05:58:50.000Z
|
2021-02-28T05:58:50.000Z
|
Catkin_PKG_Car/build/race_robot/catkin_generated/pkg.installspace.context.pc.py
|
jessecha/OPCAS
|
2b51543b4ad1ee37dba2e45a0c7d0b872309d418
|
[
"MIT"
] | null | null | null |
Catkin_PKG_Car/build/race_robot/catkin_generated/pkg.installspace.context.pc.py
|
jessecha/OPCAS
|
2b51543b4ad1ee37dba2e45a0c7d0b872309d418
|
[
"MIT"
] | null | null | null |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "race_robot"
PROJECT_SPACE_DIR = "/home/nvidia/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
| 41.555556
| 68
| 0.705882
|
3fd63e1d10600a20b76c274d31c46bbb2f7fff83
| 631
|
py
|
Python
|
src/global_get_latent.py
|
P2Oileen/oh-my-face
|
b73cb8ea713205bbf2bc1408145fa668c715359b
|
[
"MIT"
] | 45
|
2021-12-20T07:49:17.000Z
|
2022-03-18T17:08:30.000Z
|
src/global_get_latent.py
|
P2Oileen/oh-my-face
|
b73cb8ea713205bbf2bc1408145fa668c715359b
|
[
"MIT"
] | null | null | null |
src/global_get_latent.py
|
P2Oileen/oh-my-face
|
b73cb8ea713205bbf2bc1408145fa668c715359b
|
[
"MIT"
] | null | null | null |
from cog_predict import get_latent_code
import cv2
import argparse
import torch
parser = argparse.ArgumentParser(description='Process Options.')
parser.add_argument('--input_dir', default='input.jpg', type=str)
parser.add_argument('--data_type', default='face', type=str) #[face, cat]
parser.add_argument('--weight_dir', default='./weights', type=str)
args = parser.parse_args()
img = cv2.imread(args.input_dir)
latent,img = get_latent_code(img, args.data_type, args.weight_dir)
latent = latent.unsqueeze(0)
print(latent.shape)
print("print aligned image:",cv2.imwrite("input_aligned.jpg",img))
torch.save(latent,"tmp_latent.pt")
| 37.117647
| 73
| 0.773376
|
613a67a11ccf95894141c3d36685b8bd82d00fd0
| 58
|
py
|
Python
|
flavio/physics/edms/__init__.py
|
jasonaebischerGIT/flavio
|
b8d833c8380c619112ed75175cb7db52b788b1cd
|
[
"MIT"
] | null | null | null |
flavio/physics/edms/__init__.py
|
jasonaebischerGIT/flavio
|
b8d833c8380c619112ed75175cb7db52b788b1cd
|
[
"MIT"
] | null | null | null |
flavio/physics/edms/__init__.py
|
jasonaebischerGIT/flavio
|
b8d833c8380c619112ed75175cb7db52b788b1cd
|
[
"MIT"
] | null | null | null |
r"""Electric dipole moments."""
from . import neutronedm
| 14.5
| 31
| 0.724138
|
21ab7aff8e27cf2dfd4fd785c56d30b37a749332
| 516
|
py
|
Python
|
configurator/missile/classes/PersistenceStrategy.py
|
ClockworkOrigins/m2etis
|
3b9c0f98c172f48889e75fe0b80a61a0e47670f5
|
[
"Apache-2.0"
] | 2
|
2016-01-24T22:08:27.000Z
|
2017-01-01T13:06:44.000Z
|
configurator/missile/classes/PersistenceStrategy.py
|
ClockworkOrigins/m2etis
|
3b9c0f98c172f48889e75fe0b80a61a0e47670f5
|
[
"Apache-2.0"
] | null | null | null |
configurator/missile/classes/PersistenceStrategy.py
|
ClockworkOrigins/m2etis
|
3b9c0f98c172f48889e75fe0b80a61a0e47670f5
|
[
"Apache-2.0"
] | null | null | null |
import copy
import logging
from strategy import Strategy
logging.basicConfig(level=logging.INFO)
LOG = logging.getLogger(__name__)
class PersistenceStrategy(Strategy):
def __init__(self, strategy):
self.__dict__ = copy.deepcopy(strategy.__dict__)
def compatible(self, event):
if event is None:
return True
if event.dimensions["persistency"]["enabled"] == self.classification["description"]["enabled"]:
return True
else:
return False
| 23.454545
| 103
| 0.670543
|
9d67d91f6844786c8bf5b84f3c6a90ee733b9574
| 210
|
py
|
Python
|
chartpy/__init__.py
|
Joukahainen/chartpy
|
410f9e4553cb07be7d11823cad404f10da079ada
|
[
"Apache-2.0"
] | 519
|
2016-08-17T10:38:58.000Z
|
2022-03-30T19:30:15.000Z
|
chartpy/__init__.py
|
distagon/chartpy
|
39282158cdb6bbddba1ea1d5faa9ff182c3ceb39
|
[
"Apache-2.0"
] | 5
|
2016-08-21T22:16:17.000Z
|
2019-12-06T06:17:13.000Z
|
chartpy/__init__.py
|
distagon/chartpy
|
39282158cdb6bbddba1ea1d5faa9ff182c3ceb39
|
[
"Apache-2.0"
] | 108
|
2016-08-21T12:01:10.000Z
|
2022-03-25T06:38:58.000Z
|
__author__ = 'saeedamen'
from chartpy.chart import Chart
from chartpy.style import Style
from chartpy.canvas import Canvas
from chartpy.chartconstants import ChartConstants
from chartpy.twitter import Twitter
| 26.25
| 49
| 0.847619
|
09cc0ee75bb333141c9a82af2ffabac227365dda
| 11,780
|
py
|
Python
|
app/DataCollection/models.py
|
RohitKochhar/FTU-Django-Dashboard
|
4015969058184ed9f11c48915ed1515b4524f46a
|
[
"Apache-2.0"
] | null | null | null |
app/DataCollection/models.py
|
RohitKochhar/FTU-Django-Dashboard
|
4015969058184ed9f11c48915ed1515b4524f46a
|
[
"Apache-2.0"
] | null | null | null |
app/DataCollection/models.py
|
RohitKochhar/FTU-Django-Dashboard
|
4015969058184ed9f11c48915ed1515b4524f46a
|
[
"Apache-2.0"
] | null | null | null |
################################################################################
# File Name: models.py
#
# File Author: Rohit Singh
#
# File Description:
# This file defines models used
# in our database
#
# File History:
# 2020-11-05: Result model added by Rohit
# 2020-11-02: Created by Rohit
#
################################################################################
# Imports ---------------------------------------------------------------------
# Django imports
from django.db import models
from django.conf import settings
# Python imports
import numpy as np
import os
import json
import paho.mqtt.client as mqtt
# Class Definitions-------------------------------------------------------------
class TestConfiguration(models.Model):
# Constants
i_MinimumTemperature = 0
i_MaximumTemperature = 125
i_MinimumVoltage = -50
i_MaximumVoltage = 50
i_MinimumField = 0
i_MaximumField = 50
i_MinimumTestTime = 0
i_MaximumTestTime = 1000
# Variables
i_TestId = models.IntegerField(default=0)
s_TestDesc = models.CharField(max_length=200, default="Default Test")
i_DesiredTemp = models.IntegerField(default=0)
i_DesiredVoltage = models.IntegerField(default=0)
i_DesiredField = models.IntegerField(default=0)
i_DesiredTestTime = models.IntegerField(default=0)
i_DesiredSerialRate = models.IntegerField(default=9600)
############################################################################
# Function Name: save
# Function Description: Checks inputs before saving
# Inputs: (self) | Output: either ValueError or a saved object
# Function History:
# 2020-11-08: Created by Rohit
############################################################################
def save(self, *args, **kwargs):
# Check i_TestId uniqueness
try:
b_TestIdIsUnique = False
tc = TestConfiguration.objects.get(i_TestId = self.i_TestId)
except Exception as e:
if type(e) == self.DoesNotExist:
# No object was found with this unique
b_TestIdIsUnique = True
if b_TestIdIsUnique == False:
raise ValueError(f"Test ID: {self.i_TestId} is already in use")
if self.i_TestId < 0:
raise ValueError("Test ID must be a positive integer")
if self.i_DesiredTemp < self.i_MinimumTemperature or self.i_DesiredTemp > self.i_MaximumTemperature:
raise ValueError(f"Temperature must be between {self.i_MinimumTemperature} and {self.i_MaximumTemperature}")
if self.i_DesiredVoltage < self.i_MinimumVoltage or self.i_DesiredVoltage > self.i_MaximumVoltage:
raise ValueError(f"Voltage must be between {self.i_MinimumVoltage} and {self.i_MaximumVoltage}")
if self.i_DesiredField < self.i_MinimumField or self.i_DesiredField > self.i_MaximumField:
raise ValueError(f"Magnetic Field must be betten {self.i_MinimumField} and {self.i_MaximumField}")
if self.i_DesiredTestTime < self.i_MinimumTestTime or self.i_DesiredTestTime > self.i_MaximumTestTime:
raise ValueError(f"Test time must be between {self.i_MinimumTestTime} and {self.i_MaximumTestTime}")
# TODO: Fix this thang
if self.i_DesiredSerialRate != 9600:
raise ValueError("Serial Rate must be 9600")
super().save(*args, **kwargs)
############################################################################
# Function Name: GetJSONInstructions
# Function Description: Returns the JSON object to be sent to board
# Inputs: (self) | Output: JSON instructions to be sent
# Function History:
# 2020-11-05: Created by Rohit
############################################################################
def GetJSONInstructions(self):
test_values = {
'temperature': self.i_DesiredTemp,
'v_stress': self.i_DesiredVoltage,
'test_time': self.i_DesiredTestTime,
'magnetic_field': self.i_DesiredField,
'Test_start': 1,
'Test_stop': 0,
'serial_rate': self.i_DesiredSerialRate,
}
measurement_params = {
'temperature': {"unit": "C"},
'v_stress': {'unit': 'mV'},
'test_time': {'unit': 'seconds'},
'magnetic_field': {'unit': "mT"},
'serial_rate': {'unit':'milliseconds'}
}
instructions = {
'id': self.i_TestId,
'description': self.s_TestDesc,
'test_values': test_values,
'measurement_params': measurement_params,
}
js_instructions = json.dumps(instructions)
return js_instructions
############################################################################
# Function Name: SendJsonInstructions
# Function Description: Sends the MQTT packet to broker
# Inputs: (self) | Output: Sent object
# Function History:
# 2020-11-12: Created by Rohit
############################################################################
def SendJsonInstructions(self):
# Use Built-in method to retreive JSON instructions
s_Inst = self.GetJSONInstructions()
# Create MQTT client
client = mqtt.Client()
# Connect to the wireless broker
client.connect("35.173.190.207", 1883, 60)
# Publish the message to topic
s_Topic = "test"
client.publish(s_Topic, payload=s_Inst, qos=0, retain = False)
# Save some memory by deleting the client
del client
return
############################################################################
# Function Name: ___str___
# Function Description: Returns the objects identity string
# Inputs: (self) | Output: "ID: 0, Description: Vibe Check"
# Function History:
# 2020-11-02: Created by Rohit
############################################################################
def __str__(self):
return f"ID: {self.i_TestId}, Description: {self.s_TestDesc}"
class Experiment(models.Model):
i_ExperimentId = models.IntegerField(default=0)
s_ExperimentName = models.CharField(max_length=200, default="Default Experiment")
i_IterationNo = models.IntegerField(default=0)
d_Date = models.DateTimeField('Trial Date')
m_TestConfiguration = models.ForeignKey(TestConfiguration, on_delete=models.CASCADE)
s_ResultsFile = models.CharField(max_length=100, default="SampleTest.csv")
s_EmailAddress = models.CharField(max_length=100, default='IvanovFTU2020@gmail.com')
############################################################################
# Function Name: save
# Function Description: Checks inputs before saving
# Inputs: (self) | Output: either ValueError or a saved object
# Function History:
# 2020-11-09: Created by Rohit
############################################################################
def save(self, *args, **kwargs):
# Check i_ExperimentId uniqueness
try:
b_ExperimentIdIsUnique = False
exp = Experiment.objects.get(i_ExperimentId = self.i_ExperimentId)
except Exception as e:
if type(e) == self.DoesNotExist:
# No object was found with this unique
b_ExperimentIdIsUnique = True
if b_ExperimentIdIsUnique == False:
raise ValueError(f"Experiment ID: {self.i_ExperimentId} is already in use")
if self.i_ExperimentId < 0:
raise ValueError(f"Experiment ID: {self.i_ExperimentId} is invalid. (ID must be a positive integer)")
super().save(*args, **kwargs)
############################################################################
# Function Name: ___str___
# Function Description: Returns the objects identity string
# Inputs: (self) | Output: "ID: 0, (04/19/1999) Name: Vibe Check"
# Function History:
# 2020-11-02: Created by Rohit
############################################################################
def __str__(self):
return f"ID: {self.i_ExperimentId}, ({str(self.d_Date.month)}/{str(self.d_Date.day)}/{str(self.d_Date.year)}) Name: {self.s_ExperimentName}"
class Result(models.Model):
# Pre-defined variables
i_ColumnIdx = 0
# User defined variables
s_FileName = models.CharField(max_length=200, default="SampleTest.csv")
############################################################################
# Function Name: save
# Function Description: Checks inputs before saving
# Inputs: (self) | Output: either ValueError or a saved object
# Function History:
# 2020-11-08: Created by Rohit
############################################################################
def save(self, *args, **kwargs):
# Reset the column index whenever we save
i_ColumnIdx = 0
# TODO: Add a uniqueness check here to ensure you can't create a duplicate
# model (blocked until SampleTest simulations are resolved)
super().save(*args, **kwargs)
############################################################################
# Function Name: LoadResultsFilepath
# Function Description: Returns the associated csv file's path
# Inputs: (self) | Output: './DataCollection/TestResults/SampleTest.csv'
# Function History:
# 2020-11-05: Created by Rohit
############################################################################
def LoadResultsFilepath(self):
s_FilePath = os.path.join(settings.MEDIA_ROOT, './DataCollection/TestResults/' + self.s_FileName)
if os.path.exists(s_FilePath):
return s_FilePath
else:
return -1
############################################################################
# Function Name: LoadResultsAsMatrix
# Function Description: Returns a matrix of the experiments findings
# Inputs: (self) | Output: M_data
# Function History:
# 2020-11-05: Created by Rohit
############################################################################
def LoadResultsAsMatrix(self):
s_csvFilePath = self.LoadResultsFilepath()
M_data = np.genfromtxt(s_csvFilePath, delimiter=',', dtype=None, encoding='utf8')
return M_data
############################################################################
# Function Name: GetColumnByIndex
# Function Description: Returns the nth column of the matrix
# The nth column is assigned by m_Result.i_ColumnIdx
# Inputs: (self) | all rows of the ith column
# Function History:
# 2020-11-05: Created by Rohit
############################################################################
def GetColumnByIndex(self):
M_data = self.LoadResultsAsMatrix()
return M_data[:,self.i_ColumnIdx]
############################################################################
# Function Name: ___str___
# Function Description: Returns the objects identity string
# Inputs: (self) | Output: "SampleTest.csv"
# Function History:
# 2020-11-05: Created by Rohit
############################################################################
def __str__(self):
return f"{self.s_FileName}"
| 45.836576
| 148
| 0.521902
|
f14097d8b129653ff56adc6c32ccac5f0fccc066
| 92,214
|
py
|
Python
|
test/unit/common/test_db.py
|
CiscoSystems/swift
|
d5067017f0509129d8d3e41aeff5d7c2a634643e
|
[
"Apache-2.0"
] | null | null | null |
test/unit/common/test_db.py
|
CiscoSystems/swift
|
d5067017f0509129d8d3e41aeff5d7c2a634643e
|
[
"Apache-2.0"
] | null | null | null |
test/unit/common/test_db.py
|
CiscoSystems/swift
|
d5067017f0509129d8d3e41aeff5d7c2a634643e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests for swift.common.db """
from __future__ import with_statement
import hashlib
import os
import unittest
from shutil import rmtree, copy
from StringIO import StringIO
from time import sleep, time
from uuid import uuid4
import simplejson
import sqlite3
import swift.common.db
from swift.common.db import AccountBroker, chexor, ContainerBroker, \
DatabaseBroker, DatabaseConnectionError, dict_factory, get_db_connection
from swift.common.utils import normalize_timestamp
from swift.common.exceptions import LockTimeout
class TestDatabaseConnectionError(unittest.TestCase):
def test_str(self):
err = \
DatabaseConnectionError(':memory:', 'No valid database connection')
self.assert_(':memory:' in str(err))
self.assert_('No valid database connection' in str(err))
err = DatabaseConnectionError(':memory:',
'No valid database connection', timeout=1357)
self.assert_(':memory:' in str(err))
self.assert_('No valid database connection' in str(err))
self.assert_('1357' in str(err))
class TestDictFactory(unittest.TestCase):
def test_normal_case(self):
conn = sqlite3.connect(':memory:')
conn.execute('CREATE TABLE test (one TEXT, two INTEGER)')
conn.execute('INSERT INTO test (one, two) VALUES ("abc", 123)')
conn.execute('INSERT INTO test (one, two) VALUES ("def", 456)')
conn.commit()
curs = conn.execute('SELECT one, two FROM test')
self.assertEquals(dict_factory(curs, curs.next()),
{'one': 'abc', 'two': 123})
self.assertEquals(dict_factory(curs, curs.next()),
{'one': 'def', 'two': 456})
class TestChexor(unittest.TestCase):
def test_normal_case(self):
self.assertEquals(chexor('d41d8cd98f00b204e9800998ecf8427e',
'new name', normalize_timestamp(1)),
'4f2ea31ac14d4273fe32ba08062b21de')
def test_invalid_old_hash(self):
self.assertRaises(TypeError, chexor, 'oldhash', 'name',
normalize_timestamp(1))
def test_no_name(self):
self.assertRaises(Exception, chexor,
'd41d8cd98f00b204e9800998ecf8427e', None, normalize_timestamp(1))
class TestGetDBConnection(unittest.TestCase):
def test_normal_case(self):
conn = get_db_connection(':memory:')
self.assert_(hasattr(conn, 'execute'))
def test_invalid_path(self):
self.assertRaises(DatabaseConnectionError, get_db_connection,
'invalid database path / name')
class TestDatabaseBroker(unittest.TestCase):
def setUp(self):
self.testdir = os.path.join(os.path.dirname(__file__), 'db')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
def test_DB_PREALLOCATION_setting(self):
u = uuid4().hex
b = DatabaseBroker(u)
self.assertRaises(OSError, b._preallocate)
swift.common.db.DB_PREALLOCATION = False
b._preallocate()
def tearDown(self):
rmtree(self.testdir, ignore_errors=1)
def test_memory_db_init(self):
broker = DatabaseBroker(':memory:')
self.assertEqual(broker.db_file, ':memory:')
self.assertRaises(AttributeError, broker.initialize,
normalize_timestamp('0'))
def test_disk_db_init(self):
db_file = os.path.join(self.testdir, '1.db')
broker = DatabaseBroker(db_file)
self.assertEqual(broker.db_file, db_file)
self.assert_(broker.conn is None)
def test_initialize(self):
self.assertRaises(AttributeError,
DatabaseBroker(':memory:').initialize,
normalize_timestamp('1'))
stub_dict = {}
def stub(*args, **kwargs):
for key in stub_dict.keys():
del stub_dict[key]
stub_dict['args'] = args
for key, value in kwargs.items():
stub_dict[key] = value
broker = DatabaseBroker(':memory:')
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
self.assert_(hasattr(stub_dict['args'][0], 'execute'))
self.assertEquals(stub_dict['args'][1], '0000000001.00000')
with broker.get() as conn:
conn.execute('SELECT * FROM outgoing_sync')
conn.execute('SELECT * FROM incoming_sync')
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
self.assert_(hasattr(stub_dict['args'][0], 'execute'))
self.assertEquals(stub_dict['args'][1], '0000000001.00000')
with broker.get() as conn:
conn.execute('SELECT * FROM outgoing_sync')
conn.execute('SELECT * FROM incoming_sync')
def test_delete_db(self):
def init_stub(conn, put_timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('CREATE TABLE test_stat (id TEXT)')
conn.execute('INSERT INTO test_stat (id) VALUES (?)',
(str(uuid4),))
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
stub_called = [False]
def delete_stub(*a, **kw):
stub_called[0] = True
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker._initialize = init_stub
# Initializes a good broker for us
broker.initialize(normalize_timestamp('1'))
self.assert_(broker.conn is not None)
broker._delete_db = delete_stub
stub_called[0] = False
broker.delete_db('2')
self.assert_(stub_called[0])
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker.db_type = 'test'
broker._initialize = init_stub
broker.initialize(normalize_timestamp('1'))
broker._delete_db = delete_stub
stub_called[0] = False
broker.delete_db('2')
self.assert_(stub_called[0])
# ensure that metadata was cleared
m2 = broker.metadata
self.assert_(not any(v[0] for v in m2.itervalues()))
self.assert_(all(v[1] == normalize_timestamp('2')
for v in m2.itervalues()))
def test_get(self):
broker = DatabaseBroker(':memory:')
got_exc = False
try:
with broker.get() as conn:
conn.execute('SELECT 1')
except Exception:
got_exc = True
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
got_exc = False
try:
with broker.get() as conn:
conn.execute('SELECT 1')
except Exception:
got_exc = True
self.assert_(got_exc)
def stub(*args, **kwargs):
pass
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('CREATE TABLE test (one TEXT)')
try:
with broker.get() as conn:
conn.execute('INSERT INTO test (one) VALUES ("1")')
raise Exception('test')
conn.commit()
except Exception:
pass
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
with broker.get() as conn:
self.assertEquals(
[r[0] for r in conn.execute('SELECT * FROM test')], [])
with broker.get() as conn:
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
with broker.get() as conn:
self.assertEquals(
[r[0] for r in conn.execute('SELECT * FROM test')], ['1'])
orig_renamer = swift.common.db.renamer
try:
swift.common.db.renamer = lambda a, b: b
qpath = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(self.testdir))))
if qpath:
qpath += '/quarantined/tests/db'
else:
qpath = 'quarantined/tests/db'
# Test malformed database
copy(os.path.join(os.path.dirname(__file__),
'malformed_example.db'),
os.path.join(self.testdir, '1.db'))
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker.db_type = 'test'
exc = None
try:
with broker.get() as conn:
conn.execute('SELECT * FROM test')
except Exception, err:
exc = err
self.assertEquals(str(exc),
'Quarantined %s to %s due to malformed database' %
(self.testdir, qpath))
# Test corrupted database
copy(os.path.join(os.path.dirname(__file__),
'corrupted_example.db'),
os.path.join(self.testdir, '1.db'))
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker.db_type = 'test'
exc = None
try:
with broker.get() as conn:
conn.execute('SELECT * FROM test')
except Exception, err:
exc = err
self.assertEquals(str(exc),
'Quarantined %s to %s due to corrupted database' %
(self.testdir, qpath))
finally:
swift.common.db.renamer = orig_renamer
def test_lock(self):
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'), timeout=.1)
got_exc = False
try:
with broker.lock():
pass
except Exception:
got_exc = True
self.assert_(got_exc)
def stub(*args, **kwargs):
pass
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
with broker.lock():
pass
with broker.lock():
pass
broker2 = DatabaseBroker(os.path.join(self.testdir, '1.db'), timeout=.1)
broker2._initialize = stub
with broker.lock():
got_exc = False
try:
with broker2.lock():
pass
except LockTimeout:
got_exc = True
self.assert_(got_exc)
try:
with broker.lock():
raise Exception('test')
except Exception:
pass
with broker.lock():
pass
def test_newid(self):
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker.db_contains_type = 'test'
uuid1 = str(uuid4())
def _initialize(conn, timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('CREATE TABLE test_stat (id TEXT)')
conn.execute('INSERT INTO test_stat (id) VALUES (?)', (uuid1,))
conn.commit()
broker._initialize = _initialize
broker.initialize(normalize_timestamp('1'))
uuid2 = str(uuid4())
broker.newid(uuid2)
with broker.get() as conn:
uuids = [r[0] for r in conn.execute('SELECT * FROM test_stat')]
self.assertEquals(len(uuids), 1)
self.assertNotEquals(uuids[0], uuid1)
uuid1 = uuids[0]
points = [(r[0], r[1]) for r in conn.execute('SELECT sync_point, '
'remote_id FROM incoming_sync WHERE remote_id = ?', (uuid2,))]
self.assertEquals(len(points), 1)
self.assertEquals(points[0][0], -1)
self.assertEquals(points[0][1], uuid2)
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
uuid3 = str(uuid4())
broker.newid(uuid3)
with broker.get() as conn:
uuids = [r[0] for r in conn.execute('SELECT * FROM test_stat')]
self.assertEquals(len(uuids), 1)
self.assertNotEquals(uuids[0], uuid1)
uuid1 = uuids[0]
points = [(r[0], r[1]) for r in conn.execute('SELECT sync_point, '
'remote_id FROM incoming_sync WHERE remote_id = ?', (uuid3,))]
self.assertEquals(len(points), 1)
self.assertEquals(points[0][1], uuid3)
broker.newid(uuid2)
with broker.get() as conn:
uuids = [r[0] for r in conn.execute('SELECT * FROM test_stat')]
self.assertEquals(len(uuids), 1)
self.assertNotEquals(uuids[0], uuid1)
points = [(r[0], r[1]) for r in conn.execute('SELECT sync_point, '
'remote_id FROM incoming_sync WHERE remote_id = ?', (uuid2,))]
self.assertEquals(len(points), 1)
self.assertEquals(points[0][1], uuid2)
def test_get_items_since(self):
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker.db_contains_type = 'test'
def _initialize(conn, timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.execute('INSERT INTO test (one) VALUES ("2")')
conn.execute('INSERT INTO test (one) VALUES ("3")')
conn.commit()
broker._initialize = _initialize
broker.initialize(normalize_timestamp('1'))
self.assertEquals(broker.get_items_since(-1, 10),
[{'one': '1'}, {'one': '2'}, {'one': '3'}])
self.assertEquals(broker.get_items_since(-1, 2),
[{'one': '1'}, {'one': '2'}])
self.assertEquals(broker.get_items_since(1, 2),
[{'one': '2'}, {'one': '3'}])
self.assertEquals(broker.get_items_since(3, 2), [])
self.assertEquals(broker.get_items_since(999, 2), [])
def test_get_sync(self):
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker.db_contains_type = 'test'
uuid1 = str(uuid4())
def _initialize(conn, timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('CREATE TABLE test_stat (id TEXT)')
conn.execute('INSERT INTO test_stat (id) VALUES (?)', (uuid1,))
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
pass
broker._initialize = _initialize
broker.initialize(normalize_timestamp('1'))
uuid2 = str(uuid4())
self.assertEquals(broker.get_sync(uuid2), -1)
broker.newid(uuid2)
self.assertEquals(broker.get_sync(uuid2), 1)
uuid3 = str(uuid4())
self.assertEquals(broker.get_sync(uuid3), -1)
with broker.get() as conn:
conn.execute('INSERT INTO test (one) VALUES ("2")')
conn.commit()
broker.newid(uuid3)
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
self.assertEquals(broker.get_sync(uuid2, incoming=False), -1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), -1)
broker.merge_syncs([{'sync_point': 1, 'remote_id': uuid2}],
incoming=False)
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
self.assertEquals(broker.get_sync(uuid2, incoming=False), 1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), -1)
broker.merge_syncs([{'sync_point': 2, 'remote_id': uuid3}],
incoming=False)
self.assertEquals(broker.get_sync(uuid2, incoming=False), 1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), 2)
def test_merge_syncs(self):
broker = DatabaseBroker(':memory:')
def stub(*args, **kwargs):
pass
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
uuid2 = str(uuid4())
broker.merge_syncs([{'sync_point': 1, 'remote_id': uuid2}])
self.assertEquals(broker.get_sync(uuid2), 1)
uuid3 = str(uuid4())
broker.merge_syncs([{'sync_point': 2, 'remote_id': uuid3}])
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
self.assertEquals(broker.get_sync(uuid2, incoming=False), -1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), -1)
broker.merge_syncs([{'sync_point': 3, 'remote_id': uuid2},
{'sync_point': 4, 'remote_id': uuid3}],
incoming=False)
self.assertEquals(broker.get_sync(uuid2, incoming=False), 3)
self.assertEquals(broker.get_sync(uuid3, incoming=False), 4)
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
broker.merge_syncs([{'sync_point': 5, 'remote_id': uuid2}])
self.assertEquals(broker.get_sync(uuid2), 5)
def test_get_replication_info(self):
self.get_replication_info_tester(metadata=False)
def test_get_replication_info_with_metadata(self):
self.get_replication_info_tester(metadata=True)
def get_replication_info_tester(self, metadata=False):
broker = DatabaseBroker(':memory:', account='a')
broker.db_type = 'test'
broker.db_contains_type = 'test'
broker_creation = normalize_timestamp(1)
broker_uuid = str(uuid4())
broker_metadata = metadata and simplejson.dumps(
{'Test': ('Value', normalize_timestamp(1))}) or ''
def _initialize(conn, put_timestamp):
if put_timestamp is None:
put_timestamp = normalize_timestamp(0)
conn.executescript('''
CREATE TABLE test (
ROWID INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE,
created_at TEXT
);
CREATE TRIGGER test_insert AFTER INSERT ON test
BEGIN
UPDATE test_stat
SET test_count = test_count + 1,
hash = chexor(hash, new.name, new.created_at);
END;
CREATE TRIGGER test_update BEFORE UPDATE ON test
BEGIN
SELECT RAISE(FAIL,
'UPDATE not allowed; DELETE and INSERT');
END;
CREATE TRIGGER test_delete AFTER DELETE ON test
BEGIN
UPDATE test_stat
SET test_count = test_count - 1,
hash = chexor(hash, old.name, old.created_at);
END;
CREATE TABLE test_stat (
account TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
test_count INTEGER,
hash TEXT default '00000000000000000000000000000000',
id TEXT
%s
);
INSERT INTO test_stat (test_count) VALUES (0);
''' % (metadata and ", metadata TEXT DEFAULT ''" or ""))
conn.execute('''
UPDATE test_stat
SET account = ?, created_at = ?, id = ?, put_timestamp = ?
''', (broker.account, broker_creation, broker_uuid, put_timestamp))
if metadata:
conn.execute('UPDATE test_stat SET metadata = ?',
(broker_metadata,))
conn.commit()
broker._initialize = _initialize
put_timestamp = normalize_timestamp(2)
broker.initialize(put_timestamp)
info = broker.get_replication_info()
self.assertEquals(info, {'count': 0,
'hash': '00000000000000000000000000000000',
'created_at': broker_creation, 'put_timestamp': put_timestamp,
'delete_timestamp': '0', 'max_row': -1, 'id': broker_uuid,
'metadata': broker_metadata})
insert_timestamp = normalize_timestamp(3)
with broker.get() as conn:
conn.execute('''
INSERT INTO test (name, created_at) VALUES ('test', ?)
''', (insert_timestamp,))
conn.commit()
info = broker.get_replication_info()
self.assertEquals(info, {'count': 1,
'hash': 'bdc4c93f574b0d8c2911a27ce9dd38ba',
'created_at': broker_creation, 'put_timestamp': put_timestamp,
'delete_timestamp': '0', 'max_row': 1, 'id': broker_uuid,
'metadata': broker_metadata})
with broker.get() as conn:
conn.execute('DELETE FROM test')
conn.commit()
info = broker.get_replication_info()
self.assertEquals(info, {'count': 0,
'hash': '00000000000000000000000000000000',
'created_at': broker_creation, 'put_timestamp': put_timestamp,
'delete_timestamp': '0', 'max_row': 1, 'id': broker_uuid,
'metadata': broker_metadata})
return broker
def test_metadata(self):
# Initializes a good broker for us
broker = self.get_replication_info_tester(metadata=True)
# Add our first item
first_timestamp = normalize_timestamp(1)
first_value = '1'
broker.update_metadata({'First': [first_value, first_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
# Add our second item
second_timestamp = normalize_timestamp(2)
second_value = '2'
broker.update_metadata({'Second': [second_value, second_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Update our first item
first_timestamp = normalize_timestamp(3)
first_value = '1b'
broker.update_metadata({'First': [first_value, first_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Delete our second item (by setting to empty string)
second_timestamp = normalize_timestamp(4)
second_value = ''
broker.update_metadata({'Second': [second_value, second_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Reclaim at point before second item was deleted
broker.reclaim(normalize_timestamp(3))
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Reclaim at point second item was deleted
broker.reclaim(normalize_timestamp(4))
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Reclaim after point second item was deleted
broker.reclaim(normalize_timestamp(5))
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' not in broker.metadata)
class TestContainerBroker(unittest.TestCase):
""" Tests for swift.common.db.ContainerBroker """
def test_creation(self):
""" Test swift.common.db.ContainerBroker.__init__ """
broker = ContainerBroker(':memory:', account='a', container='c')
self.assertEqual(broker.db_file, ':memory:')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
curs = conn.cursor()
curs.execute('SELECT 1')
self.assertEqual(curs.fetchall()[0][0], 1)
def test_exception(self):
""" Test swift.common.db.ContainerBroker throwing a conn away after
unhandled exception """
first_conn = None
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
first_conn = conn
try:
with broker.get() as conn:
self.assertEquals(first_conn, conn)
raise Exception('OMG')
except Exception:
pass
self.assert_(broker.conn is None)
def test_empty(self):
""" Test swift.common.db.ContainerBroker.empty """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
self.assert_(broker.empty())
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
self.assert_(not broker.empty())
sleep(.00001)
broker.delete_object('o', normalize_timestamp(time()))
self.assert_(broker.empty())
def test_reclaim(self):
broker = ContainerBroker(':memory:', account='test_account',
container='test_container')
broker.initialize(normalize_timestamp('1'))
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.delete_object('o', normalize_timestamp(time()))
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001)
broker.reclaim(normalize_timestamp(time()), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
# Test the return values of reclaim()
broker.put_object('w', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('x', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('y', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('z', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
# Test before deletion
res = broker.reclaim(normalize_timestamp(time()), time())
broker.delete_db(normalize_timestamp(time()))
def test_delete_object(self):
""" Test swift.common.db.ContainerBroker.delete_object """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.delete_object('o', normalize_timestamp(time()))
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1)
def test_put_object(self):
""" Test swift.common.db.ContainerBroker.put_object """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
# Create initial object
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Reput same event
broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 124,
'application/x-test',
'aa0749bacbc79ec65fe206943d8fe449')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 124)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'aa0749bacbc79ec65fe206943d8fe449')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put old event
otimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_object('"{<object \'&\' name>}"', otimestamp, 124,
'application/x-test',
'aa0749bacbc79ec65fe206943d8fe449')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 124)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'aa0749bacbc79ec65fe206943d8fe449')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put old delete event
dtimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_object('"{<object \'&\' name>}"', dtimestamp, 0, '', '',
deleted=1)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 124)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'aa0749bacbc79ec65fe206943d8fe449')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put new delete event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 0, '', '',
deleted=1)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 1)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# We'll use this later
sleep(.0001)
in_between_timestamp = normalize_timestamp(time())
# New post event
sleep(.0001)
previous_timestamp = timestamp
timestamp = normalize_timestamp(time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0],
previous_timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put event from after last put but before last post
timestamp = in_between_timestamp
broker.put_object('"{<object \'&\' name>}"', timestamp, 456,
'application/x-test3',
'6af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 456)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test3')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'6af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
def test_get_info(self):
""" Test swift.common.db.ContainerBroker.get_info """
broker = ContainerBroker(':memory:', account='test1', container='test2')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['account'], 'test1')
self.assertEquals(info['container'], 'test2')
self.assertEquals(info['hash'], '00000000000000000000000000000000')
info = broker.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
broker.put_object('o1', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 123)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 246)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 1123)
sleep(.00001)
broker.delete_object('o1', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 1000)
sleep(.00001)
broker.delete_object('o2', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
info = broker.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
def test_set_x_syncs(self):
broker = ContainerBroker(':memory:', account='test1', container='test2')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
broker.set_x_container_sync_points(1, 2)
info = broker.get_info()
self.assertEquals(info['x_container_sync_point1'], 1)
self.assertEquals(info['x_container_sync_point2'], 2)
def test_get_report_info(self):
broker = ContainerBroker(':memory:', account='test1', container='test2')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['account'], 'test1')
self.assertEquals(info['container'], 'test2')
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
broker.put_object('o1', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 123)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 246)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 1123)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
put_timestamp = normalize_timestamp(time())
sleep(.001)
delete_timestamp = normalize_timestamp(time())
broker.reported(put_timestamp, delete_timestamp, 2, 1123)
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 1123)
self.assertEquals(info['reported_put_timestamp'], put_timestamp)
self.assertEquals(info['reported_delete_timestamp'], delete_timestamp)
self.assertEquals(info['reported_object_count'], 2)
self.assertEquals(info['reported_bytes_used'], 1123)
sleep(.00001)
broker.delete_object('o1', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 1000)
self.assertEquals(info['reported_object_count'], 2)
self.assertEquals(info['reported_bytes_used'], 1123)
sleep(.00001)
broker.delete_object('o2', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
self.assertEquals(info['reported_object_count'], 2)
self.assertEquals(info['reported_bytes_used'], 1123)
def test_list_objects_iter(self):
""" Test swift.common.db.ContainerBroker.list_objects_iter """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
for obj1 in xrange(4):
for obj2 in xrange(125):
broker.put_object('%d/%04d' % (obj1, obj2),
normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125):
broker.put_object('2/0051/%04d' % obj,
normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125):
broker.put_object('3/%04d/0049' % obj,
normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(100, '', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0099')
listing = broker.list_objects_iter(100, '', '0/0050', None, '')
self.assertEquals(len(listing), 50)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0049')
listing = broker.list_objects_iter(100, '0/0099', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '1/0074')
listing = broker.list_objects_iter(55, '1/0074', None, None, '')
self.assertEquals(len(listing), 55)
self.assertEquals(listing[0][0], '1/0075')
self.assertEquals(listing[-1][0], '2/0004')
listing = broker.list_objects_iter(10, '', None, '0/01', '')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '0/0109')
listing = broker.list_objects_iter(10, '', None, '0/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0009')
listing = broker.list_objects_iter(10, '', None, '', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['0/', '1/', '2/', '3/'])
listing = broker.list_objects_iter(10, '2', None, None, '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['2/', '3/'])
listing = broker.list_objects_iter(10, '2/',None, None, '/')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['3/'])
listing = broker.list_objects_iter(10, '2/0050', None, '2/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '2/0051')
self.assertEquals(listing[1][0], '2/0051/')
self.assertEquals(listing[2][0], '2/0052')
self.assertEquals(listing[-1][0], '2/0059')
listing = broker.list_objects_iter(10, '3/0045', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0045/', '3/0046', '3/0046/', '3/0047',
'3/0047/', '3/0048', '3/0048/', '3/0049',
'3/0049/', '3/0050'])
broker.put_object('3/0049/', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(10, '3/0048', None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/0049', '3/0049', '3/0049/',
'3/0049/0049', '3/0050', '3/0050/0049', '3/0051', '3/0051/0049',
'3/0052', '3/0052/0049'])
listing = broker.list_objects_iter(10, '3/0048', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/', '3/0049', '3/0049/', '3/0050',
'3/0050/', '3/0051', '3/0051/', '3/0052', '3/0052/', '3/0053'])
listing = broker.list_objects_iter(10, None, None, '3/0049/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing],
['3/0049/', '3/0049/0049'])
listing = broker.list_objects_iter(10, None, None, None, None,
'3/0049')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['3/0049/0049'])
listing = broker.list_objects_iter(2, None, None, '3/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['3/0000', '3/0000/'])
listing = broker.list_objects_iter(2, None, None, None, None, '3')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['3/0000', '3/0001'])
def test_list_objects_iter_prefix_delim(self):
""" Test swift.common.db.ContainerBroker.list_objects_iter """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('/pets/dogs/1', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/dogs/2', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/fish/a', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/fish/b', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/fish_info.txt', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/snakes', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
#def list_objects_iter(self, limit, marker, prefix, delimiter, path=None,
# format=None):
listing = broker.list_objects_iter(100, None, None, '/pets/f', '/')
self.assertEquals([row[0] for row in listing], ['/pets/fish/', '/pets/fish_info.txt'])
listing = broker.list_objects_iter(100, None, None, '/pets/fish', '/')
self.assertEquals([row[0] for row in listing], ['/pets/fish/', '/pets/fish_info.txt'])
listing = broker.list_objects_iter(100, None, None, '/pets/fish/', '/')
self.assertEquals([row[0] for row in listing], ['/pets/fish/a', '/pets/fish/b'])
def test_double_check_trailing_delimiter(self):
""" Test swift.common.db.ContainerBroker.list_objects_iter for a
container that has an odd file with a trailing delimiter """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('c', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(15, None, None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'a/a', 'a/a/a', 'a/a/b', 'a/b', 'b', 'b/a', 'b/b', 'c'])
listing = broker.list_objects_iter(15, None, None, '', '/')
self.assertEquals(len(listing), 5)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'b', 'b/', 'c'])
listing = broker.list_objects_iter(15, None, None, 'a/', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['a/', 'a/a', 'a/a/', 'a/b'])
listing = broker.list_objects_iter(15, None, None, 'b/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['b/a', 'b/b'])
def test_chexor(self):
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('a', normalize_timestamp(1), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
hasha = hashlib.md5('%s-%s' % ('a', '0000000001.00000')).digest()
hashb = hashlib.md5('%s-%s' % ('b', '0000000002.00000')).digest()
hashc = ''.join(('%2x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
broker.put_object('b', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
hashb = hashlib.md5('%s-%s' % ('b', '0000000003.00000')).digest()
hashc = ''.join(('%02x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
def test_newid(self):
"""test DatabaseBroker.newid"""
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
id = broker.get_info()['id']
broker.newid('someid')
self.assertNotEquals(id, broker.get_info()['id'])
def test_get_items_since(self):
"""test DatabaseBroker.get_items_since"""
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('a', normalize_timestamp(1), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
max_row = broker.get_replication_info()['max_row']
broker.put_object('b', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
items = broker.get_items_since(max_row, 1000)
self.assertEquals(len(items), 1)
self.assertEquals(items[0]['name'], 'b')
def test_sync_merging(self):
""" exercise the DatabaseBroker sync functions a bit """
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
self.assertEquals(broker2.get_sync('12345'), -1)
broker1.merge_syncs([{'sync_point': 3, 'remote_id': '12345'}])
broker2.merge_syncs(broker1.get_syncs())
self.assertEquals(broker2.get_sync('12345'), 3)
def test_merge_items(self):
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
broker1.put_object('a', normalize_timestamp(1), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
id = broker1.get_info()['id']
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 2)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
broker1.put_object('c', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 3)
self.assertEquals(['a', 'b', 'c'],
sorted([rec['name'] for rec in items]))
def test_merge_items_overwrite(self):
"""test DatabaseBroker.merge_items"""
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
id = broker1.get_info()['id']
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
broker1.put_object('a', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
broker1.put_object('a', normalize_timestamp(4), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3))
def test_merge_items_post_overwrite_out_of_order(self):
"""test DatabaseBroker.merge_items"""
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
id = broker1.get_info()['id']
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
broker1.put_object('a', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
broker1.put_object('a', normalize_timestamp(4), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3))
self.assertEquals(rec['content_type'], 'text/plain')
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3))
broker1.put_object('b', normalize_timestamp(5), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(5))
self.assertEquals(rec['content_type'], 'text/plain')
def premetadata_create_container_stat_table(self, conn, put_timestamp=None):
"""
Copied from swift.common.db.ContainerBroker before the metadata column was
added; used for testing with TestContainerBrokerBeforeMetadata.
Create the container_stat table which is specifc to the container DB.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
if put_timestamp is None:
put_timestamp = normalize_timestamp(0)
conn.executescript("""
CREATE TABLE container_stat (
account TEXT,
container TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
object_count INTEGER,
bytes_used INTEGER,
reported_put_timestamp TEXT DEFAULT '0',
reported_delete_timestamp TEXT DEFAULT '0',
reported_object_count INTEGER DEFAULT 0,
reported_bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0'
);
INSERT INTO container_stat (object_count, bytes_used)
VALUES (0, 0);
""")
conn.execute('''
UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ?
''', (self.account, self.container, normalize_timestamp(time()),
str(uuid4()), put_timestamp))
class TestContainerBrokerBeforeMetadata(TestContainerBroker):
"""
Tests for swift.common.db.ContainerBroker against databases created before
the metadata column was added.
"""
def setUp(self):
self._imported_create_container_stat_table = \
ContainerBroker.create_container_stat_table
ContainerBroker.create_container_stat_table = \
premetadata_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
exc = None
with broker.get() as conn:
try:
conn.execute('SELECT metadata FROM container_stat')
except BaseException, err:
exc = err
self.assert_('no such column: metadata' in str(exc))
def tearDown(self):
ContainerBroker.create_container_stat_table = \
self._imported_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('SELECT metadata FROM container_stat')
def prexsync_create_container_stat_table(self, conn, put_timestamp=None):
"""
Copied from swift.common.db.ContainerBroker before the
x_container_sync_point[12] columns were added; used for testing with
TestContainerBrokerBeforeXSync.
Create the container_stat table which is specifc to the container DB.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
if put_timestamp is None:
put_timestamp = normalize_timestamp(0)
conn.executescript("""
CREATE TABLE container_stat (
account TEXT,
container TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
object_count INTEGER,
bytes_used INTEGER,
reported_put_timestamp TEXT DEFAULT '0',
reported_delete_timestamp TEXT DEFAULT '0',
reported_object_count INTEGER DEFAULT 0,
reported_bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0',
metadata TEXT DEFAULT ''
);
INSERT INTO container_stat (object_count, bytes_used)
VALUES (0, 0);
""")
conn.execute('''
UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ?
''', (self.account, self.container, normalize_timestamp(time()),
str(uuid4()), put_timestamp))
class TestContainerBrokerBeforeXSync(TestContainerBroker):
"""
Tests for swift.common.db.ContainerBroker against databases created before
the x_container_sync_point[12] columns were added.
"""
def setUp(self):
self._imported_create_container_stat_table = \
ContainerBroker.create_container_stat_table
ContainerBroker.create_container_stat_table = \
prexsync_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
exc = None
with broker.get() as conn:
try:
conn.execute('''SELECT x_container_sync_point1
FROM container_stat''')
except BaseException, err:
exc = err
self.assert_('no such column: x_container_sync_point1' in str(exc))
def tearDown(self):
ContainerBroker.create_container_stat_table = \
self._imported_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('SELECT x_container_sync_point1 FROM container_stat')
class TestAccountBroker(unittest.TestCase):
""" Tests for swift.common.db.AccountBroker """
def test_creation(self):
""" Test swift.common.db.AccountBroker.__init__ """
broker = AccountBroker(':memory:', account='a')
self.assertEqual(broker.db_file, ':memory:')
got_exc = False
try:
with broker.get() as conn:
pass
except Exception:
got_exc = True
self.assert_(got_exc)
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
curs = conn.cursor()
curs.execute('SELECT 1')
self.assertEqual(curs.fetchall()[0][0], 1)
def test_exception(self):
""" Test swift.common.db.AccountBroker throwing a conn away after
exception """
first_conn = None
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
first_conn = conn
try:
with broker.get() as conn:
self.assertEquals(first_conn, conn)
raise Exception('OMG')
except Exception:
pass
self.assert_(broker.conn is None)
def test_empty(self):
""" Test swift.common.db.AccountBroker.empty """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
self.assert_(broker.empty())
broker.put_container('o', normalize_timestamp(time()), 0, 0, 0)
self.assert_(not broker.empty())
sleep(.00001)
broker.put_container('o', 0, normalize_timestamp(time()), 0, 0)
self.assert_(broker.empty())
def test_reclaim(self):
broker = AccountBroker(':memory:', account='test_account')
broker.initialize(normalize_timestamp('1'))
broker.put_container('c', normalize_timestamp(time()), 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.put_container('c', 0, normalize_timestamp(time()), 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001)
broker.reclaim(normalize_timestamp(time()), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
# Test reclaim after deletion. Create 3 test containers
broker.put_container('x', 0, 0, 0, 0)
broker.put_container('y', 0, 0, 0, 0)
broker.put_container('z', 0, 0, 0, 0)
res = broker.reclaim(normalize_timestamp(time()), time())
# self.assertEquals(len(res), 2)
# self.assert_(isinstance(res, tuple))
# containers, account_name = res
# self.assert_(containers is None)
# self.assert_(account_name is None)
# Now delete the account
broker.delete_db(normalize_timestamp(time()))
res = broker.reclaim(normalize_timestamp(time()), time())
# self.assertEquals(len(res), 2)
# self.assert_(isinstance(res, tuple))
# containers, account_name = res
# self.assertEquals(account_name, 'test_account')
# self.assertEquals(len(containers), 3)
# self.assert_('x' in containers)
# self.assert_('y' in containers)
# self.assert_('z' in containers)
# self.assert_('a' not in containers)
def test_delete_container(self):
""" Test swift.common.db.AccountBroker.delete_container """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
broker.put_container('o', normalize_timestamp(time()), 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.put_container('o', 0, normalize_timestamp(time()), 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
def test_get_container_timestamp(self):
""" Test swift.common.db.AccountBroker.get_container_timestamp """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
# Create initial container
timestamp = normalize_timestamp(time())
broker.put_container('container_name', timestamp, 0, 0, 0)
# test extant map
ts = broker.get_container_timestamp('container_name')
self.assertEquals(ts, timestamp)
# test missing map
ts = broker.get_container_timestamp('something else')
self.assertEquals(ts, None)
def test_put_container(self):
""" Test swift.common.db.AccountBroker.put_container """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
# Create initial container
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Reput same event
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put old event
otimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_container('"{<container \'&\' name>}"', otimestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put old delete event
dtimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_container('"{<container \'&\' name>}"', 0, dtimestamp, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT delete_timestamp FROM container").fetchone()[0],
dtimestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put new delete event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', 0, timestamp, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT delete_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 1)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
def test_get_info(self):
""" Test swift.common.db.AccountBroker.get_info """
broker = AccountBroker(':memory:', account='test1')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['account'], 'test1')
self.assertEquals(info['hash'], '00000000000000000000000000000000')
info = broker.get_info()
self.assertEquals(info['container_count'], 0)
broker.put_container('c1', normalize_timestamp(time()), 0, 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 1)
sleep(.00001)
broker.put_container('c2', normalize_timestamp(time()), 0, 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 2)
sleep(.00001)
broker.put_container('c2', normalize_timestamp(time()), 0, 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 2)
sleep(.00001)
broker.put_container('c1', 0, normalize_timestamp(time()), 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 1)
sleep(.00001)
broker.put_container('c2', 0, normalize_timestamp(time()), 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 0)
def test_list_containers_iter(self):
""" Test swift.common.db.AccountBroker.list_containers_iter """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
for cont1 in xrange(4):
for cont2 in xrange(125):
broker.put_container('%d/%04d' % (cont1, cont2),
normalize_timestamp(time()), 0, 0, 0)
for cont in xrange(125):
broker.put_container('2/0051/%04d' % cont,
normalize_timestamp(time()), 0, 0, 0)
for cont in xrange(125):
broker.put_container('3/%04d/0049' % cont,
normalize_timestamp(time()), 0, 0, 0)
listing = broker.list_containers_iter(100, '', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0099')
listing = broker.list_containers_iter(100, '', '0/0050', None, '')
self.assertEquals(len(listing), 51)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0050')
listing = broker.list_containers_iter(100, '0/0099', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '1/0074')
listing = broker.list_containers_iter(55, '1/0074', None, None, '')
self.assertEquals(len(listing), 55)
self.assertEquals(listing[0][0], '1/0075')
self.assertEquals(listing[-1][0], '2/0004')
listing = broker.list_containers_iter(10, '', None, '0/01', '')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '0/0109')
listing = broker.list_containers_iter(10, '', None, '0/01', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '0/0109')
listing = broker.list_containers_iter(10, '', None, '0/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0009')
listing = broker.list_containers_iter(10, '', None, '', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['0/', '1/', '2/', '3/'])
listing = broker.list_containers_iter(10, '2/', None, None, '/')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['3/'])
listing = broker.list_containers_iter(10, '', None, '2', '/')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['2/'])
listing = broker.list_containers_iter(10, '2/0050', None, '2/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '2/0051')
self.assertEquals(listing[1][0], '2/0051/')
self.assertEquals(listing[2][0], '2/0052')
self.assertEquals(listing[-1][0], '2/0059')
listing = broker.list_containers_iter(10, '3/0045', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0045/', '3/0046', '3/0046/', '3/0047',
'3/0047/', '3/0048', '3/0048/', '3/0049',
'3/0049/', '3/0050'])
broker.put_container('3/0049/', normalize_timestamp(time()), 0, 0, 0)
listing = broker.list_containers_iter(10, '3/0048', None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/0049', '3/0049', '3/0049/', '3/0049/0049',
'3/0050', '3/0050/0049', '3/0051', '3/0051/0049',
'3/0052', '3/0052/0049'])
listing = broker.list_containers_iter(10, '3/0048', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/', '3/0049', '3/0049/', '3/0050',
'3/0050/', '3/0051', '3/0051/', '3/0052',
'3/0052/', '3/0053'])
listing = broker.list_containers_iter(10, None, None, '3/0049/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing],
['3/0049/', '3/0049/0049'])
def test_double_check_trailing_delimiter(self):
""" Test swift.common.db.AccountBroker.list_containers_iter for an
account that has an odd file with a trailing delimiter """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
broker.put_container('a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/a/a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/a/b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('b/a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('b/b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('c', normalize_timestamp(time()), 0, 0, 0)
listing = broker.list_containers_iter(15, None, None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'a/a', 'a/a/a', 'a/a/b', 'a/b', 'b',
'b/a', 'b/b', 'c'])
listing = broker.list_containers_iter(15, None, None, '', '/')
self.assertEquals(len(listing), 5)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'b', 'b/', 'c'])
listing = broker.list_containers_iter(15, None, None, 'a/', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['a/', 'a/a', 'a/a/', 'a/b'])
listing = broker.list_containers_iter(15, None, None, 'b/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['b/a', 'b/b'])
def test_chexor(self):
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
broker.put_container('a', normalize_timestamp(1),
normalize_timestamp(0), 0, 0)
broker.put_container('b', normalize_timestamp(2),
normalize_timestamp(0), 0, 0)
hasha = hashlib.md5('%s-%s' %
('a', '0000000001.00000-0000000000.00000-0-0')
).digest()
hashb = hashlib.md5('%s-%s' %
('b', '0000000002.00000-0000000000.00000-0-0')
).digest()
hashc = \
''.join(('%02x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
broker.put_container('b', normalize_timestamp(3),
normalize_timestamp(0), 0, 0)
hashb = hashlib.md5('%s-%s' %
('b', '0000000003.00000-0000000000.00000-0-0')
).digest()
hashc = \
''.join(('%02x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
def test_merge_items(self):
broker1 = AccountBroker(':memory:', account='a')
broker1.initialize(normalize_timestamp('1'))
broker2 = AccountBroker(':memory:', account='a')
broker2.initialize(normalize_timestamp('1'))
broker1.put_container('a', normalize_timestamp(1), 0, 0, 0)
broker1.put_container('b', normalize_timestamp(2), 0, 0, 0)
id = broker1.get_info()['id']
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 2)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
broker1.put_container('c', normalize_timestamp(3), 0, 0, 0)
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 3)
self.assertEquals(['a', 'b', 'c'],
sorted([rec['name'] for rec in items]))
def premetadata_create_account_stat_table(self, conn, put_timestamp):
"""
Copied from swift.common.db.AccountBroker before the metadata column was
added; used for testing with TestAccountBrokerBeforeMetadata.
Create account_stat table which is specific to the account DB.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
conn.executescript("""
CREATE TABLE account_stat (
account TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
container_count INTEGER,
object_count INTEGER DEFAULT 0,
bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0'
);
INSERT INTO account_stat (container_count) VALUES (0);
""")
conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ?
''', (self.account, normalize_timestamp(time()), str(uuid4()),
put_timestamp))
class TestAccountBrokerBeforeMetadata(TestAccountBroker):
"""
Tests for swift.common.db.AccountBroker against databases created before
the metadata column was added.
"""
def setUp(self):
self._imported_create_account_stat_table = \
AccountBroker.create_account_stat_table
AccountBroker.create_account_stat_table = \
premetadata_create_account_stat_table
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
exc = None
with broker.get() as conn:
try:
conn.execute('SELECT metadata FROM account_stat')
except BaseException, err:
exc = err
self.assert_('no such column: metadata' in str(exc))
def tearDown(self):
AccountBroker.create_account_stat_table = \
self._imported_create_account_stat_table
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('SELECT metadata FROM account_stat')
if __name__ == '__main__':
unittest.main()
| 45.22511
| 94
| 0.58245
|
35d14ef7657d4272430c1b7767d4d43612636748
| 9,851
|
py
|
Python
|
python/paddle/fluid/layers/metric_op.py
|
LWhite027/PaddleBox
|
b14bcdf285dd8829e11ab12cc815ac1b1ab62694
|
[
"Apache-2.0"
] | 10
|
2021-05-12T07:20:32.000Z
|
2022-03-04T08:21:56.000Z
|
python/paddle/fluid/layers/metric_op.py
|
AFLee/Paddle
|
311b3b44fc7d51d4d66d90ab8a3fc0d42231afda
|
[
"Apache-2.0"
] | 1
|
2021-01-25T09:40:19.000Z
|
2021-01-25T09:40:19.000Z
|
python/paddle/fluid/layers/metric_op.py
|
AFLee/Paddle
|
311b3b44fc7d51d4d66d90ab8a3fc0d42231afda
|
[
"Apache-2.0"
] | 18
|
2021-05-19T08:01:49.000Z
|
2022-02-11T03:11:32.000Z
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
All layers just related to metric.
"""
from __future__ import print_function
import warnings
from ..layer_helper import LayerHelper
from ..initializer import Normal, Constant
from ..framework import Variable, in_dygraph_mode, _varbase_creator
from .. import core
from ..param_attr import ParamAttr
from . import nn
from ..data_feeder import check_variable_and_dtype
__all__ = ['accuracy', 'auc']
def accuracy(input, label, k=1, correct=None, total=None):
"""
accuracy layer.
Refer to the https://en.wikipedia.org/wiki/Precision_and_recall
This function computes the accuracy using the input and label.
If the correct label occurs in top k predictions, then correct will increment by one.
Note: the dtype of accuracy is determined by input. the input and label dtype can be different.
Args:
input(Variable): The input of accuracy layer, which is the predictions of network. A LoDTensor or Tensor with type float32,float64.
The shape is ``[sample_number, class_dim]`` .
label(Variable): The label of dataset. LoDTensor or Tensor with type int32,int64. The shape is ``[sample_number, 1]`` .
k(int): The top k predictions for each class will be checked. Data type is int64 or int32.
correct(Variable): The correct predictions count. A Tensor with type int64 or int32.
total(Variable): The total entries count. A tensor with type int64 or int32.
Returns:
Variable: The correct rate. A Tensor with type float32.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
data = fluid.data(name="input", shape=[-1, 32, 32], dtype="float32")
label = fluid.data(name="label", shape=[-1,1], dtype="int")
fc_out = fluid.layers.fc(input=data, size=10)
predict = fluid.layers.softmax(input=fc_out)
result = fluid.layers.accuracy(input=predict, label=label, k=5)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
x = np.random.rand(3, 32, 32).astype("float32")
y = np.array([[1],[0],[1]])
output= exe.run(feed={"input": x,"label": y},
fetch_list=[result[0]])
print(output)
#[array([0.6666667], dtype=float32)]
"""
if in_dygraph_mode():
if correct is None:
correct = _varbase_creator(dtype="int32")
if total is None:
total = _varbase_creator(dtype="int32")
topk_out, topk_indices = nn.topk(input, k=k)
_acc, _, _ = core.ops.accuracy(topk_out, topk_indices, label, correct,
total)
return _acc
helper = LayerHelper("accuracy", **locals())
check_variable_and_dtype(input, 'input', ['float16', 'float32', 'float64'],
'accuracy')
topk_out, topk_indices = nn.topk(input, k=k)
acc_out = helper.create_variable_for_type_inference(dtype="float32")
if correct is None:
correct = helper.create_variable_for_type_inference(dtype="int32")
if total is None:
total = helper.create_variable_for_type_inference(dtype="int32")
helper.append_op(
type="accuracy",
inputs={
"Out": [topk_out],
"Indices": [topk_indices],
"Label": [label]
},
outputs={
"Accuracy": [acc_out],
"Correct": [correct],
"Total": [total],
})
return acc_out
def auc(input,
label,
curve='ROC',
num_thresholds=2**12 - 1,
topk=1,
slide_steps=1):
r"""
**Area Under the Curve (AUC) Layer**
This implementation computes the AUC according to forward output and label.
It is used very widely in binary classification evaluation.
Note: If input label contains values other than 0 and 1, it will be cast
to `bool`. Find the relevant definitions `here <https://en.wikipedia.org\
/wiki/Receiver_operating_characteristic#Area_under_the_curve>`_.
There are two types of possible curves:
1. ROC: Receiver operating characteristic;
2. PR: Precision Recall
Args:
input(Variable): A floating-point 2D Variable, values are in the range
[0, 1]. Each row is sorted in descending order. This
input should be the output of topk. Typically, this
Variable indicates the probability of each label.
A LoDTensor or Tensor with type float32,float64.
label(Variable): A 2D int Variable indicating the label of the training
data. The height is batch size and width is always 1.
A LoDTensor or Tensor with type int32,int64.
curve(str): Curve type, can be 'ROC' or 'PR'. Default 'ROC'.
num_thresholds(int): The number of thresholds to use when discretizing
the roc curve. Default 200.
topk(int): only topk number of prediction output will be used for auc.
slide_steps: when calc batch auc, we can not only use step currently but the previous steps can be used. slide_steps=1 means use the current step, slide_steps=3 means use current step and the previous second steps, slide_steps=0 use all of the steps.
Returns:
Variable: A tuple representing the current AUC.
The return tuple is auc_out, batch_auc_out, [
batch_stat_pos, batch_stat_neg, stat_pos, stat_neg ]
Data type is Tensor, supporting float32, float64.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
data = fluid.data(name="input", shape=[-1, 32,32], dtype="float32")
label = fluid.data(name="label", shape=[-1], dtype="int")
fc_out = fluid.layers.fc(input=data, size=2)
predict = fluid.layers.softmax(input=fc_out)
result=fluid.layers.auc(input=predict, label=label)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
x = np.random.rand(3,32,32).astype("float32")
y = np.array([1,0,1])
output= exe.run(feed={"input": x,"label": y},
fetch_list=[result[0]])
print(output)
#[array([0.5])]
"""
helper = LayerHelper("auc", **locals())
check_variable_and_dtype(input, 'input', ['float32', 'float64'], 'auc')
check_variable_and_dtype(label, 'label', ['int32', 'int64'], 'auc')
auc_out = helper.create_variable_for_type_inference(dtype="float64")
batch_auc_out = helper.create_variable_for_type_inference(dtype="float64")
# make tp, tn, fp, fn persistable, so that can accumulate all batches.
# for batch auc
# we create slide_step+1 buckets, the first slide_steps buckets store
# historical batch-level values, and the last bucket stores the sum values of
# previous slide_step buckets.
# The index of bucket that the newest batch will use is determined by batch_id mod slide_steps,
# and batch_id is store in the last posision of following variable
batch_stat_pos = helper.create_global_variable(
persistable=True,
dtype='int64',
shape=[(1 + slide_steps) * (num_thresholds + 1) + 1])
batch_stat_neg = helper.create_global_variable(
persistable=True,
dtype='int64',
shape=[(1 + slide_steps) * (num_thresholds + 1) + 1])
# for global auc
# Needn't maintain the batch id
stat_pos = helper.create_global_variable(
persistable=True, dtype='int64', shape=[1, num_thresholds + 1])
stat_neg = helper.create_global_variable(
persistable=True, dtype='int64', shape=[1, num_thresholds + 1])
for var in [batch_stat_pos, batch_stat_neg, stat_pos, stat_neg]:
helper.set_variable_initializer(
var, Constant(
value=0.0, force_cpu=False))
# Batch AUC
helper.append_op(
type="auc",
inputs={
"Predict": [input],
"Label": [label],
"StatPos": [batch_stat_pos],
"StatNeg": [batch_stat_neg]
},
attrs={
"curve": curve,
"num_thresholds": num_thresholds,
"slide_steps": slide_steps
},
outputs={
"AUC": [batch_auc_out],
"StatPosOut": [batch_stat_pos],
"StatNegOut": [batch_stat_neg]
})
# Global AUC
helper.append_op(
type="auc",
inputs={
"Predict": [input],
"Label": [label],
"StatPos": [stat_pos],
"StatNeg": [stat_neg]
},
attrs={
"curve": curve,
"num_thresholds": num_thresholds,
"slide_steps": 0
},
outputs={
"AUC": [auc_out],
"StatPosOut": [stat_pos],
"StatNegOut": [stat_neg]
})
return auc_out, batch_auc_out, [
batch_stat_pos, batch_stat_neg, stat_pos, stat_neg
]
| 39.09127
| 258
| 0.617501
|
3fad4eec46ea2d6b02d515d89bff8163657fb518
| 739
|
py
|
Python
|
configs/liteflownet2/liteflownet2_pre_M4S4R4_8x1_flyingchairs_320x448.py
|
hologerry/mmflow
|
40caf064851bd95317424e31cc137c0007a2bece
|
[
"Apache-2.0"
] | 481
|
2021-11-16T07:04:23.000Z
|
2022-03-31T22:21:21.000Z
|
configs/liteflownet2/liteflownet2_pre_M4S4R4_8x1_flyingchairs_320x448.py
|
hologerry/mmflow
|
40caf064851bd95317424e31cc137c0007a2bece
|
[
"Apache-2.0"
] | 72
|
2021-11-16T12:25:55.000Z
|
2022-03-28T13:10:45.000Z
|
configs/liteflownet2/liteflownet2_pre_M4S4R4_8x1_flyingchairs_320x448.py
|
hologerry/mmflow
|
40caf064851bd95317424e31cc137c0007a2bece
|
[
"Apache-2.0"
] | 48
|
2021-11-16T06:48:46.000Z
|
2022-03-30T12:46:40.000Z
|
_base_ = [
'../_base_/models/liteflownet2/liteflownet2_pre_M4S4R4.py',
'../_base_/datasets/flyingchairs_320x448.py',
'../_base_/default_runtime.py'
]
optimizer = dict(type='Adam', lr=1e-4, weight_decay=0.0004, betas=(0.9, 0.999))
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step', by_epoch=False, gamma=0.5, step=[120000, 160000, 200000])
runner = dict(type='IterBasedRunner', max_iters=240000)
checkpoint_config = dict(by_epoch=False, interval=40000)
evaluation = dict(interval=40000, metric='EPE')
# Weights are initialized from model of previous stage
load_from = 'https://download.openmmlab.com/mmflow/liteflownet2/liteflownet2_pre_M5S5R5_8x1_flyingchairs_320x448.pth' # noqa
| 41.055556
| 125
| 0.756428
|
76984a93b2c732b02e1fefb6db23bdb08a9ef297
| 884
|
py
|
Python
|
tfxc/bigquery.py
|
sfujiwara/tfxc
|
5469862e7c6bdac89edb0bd7cbc1808b8c7e7665
|
[
"MIT"
] | null | null | null |
tfxc/bigquery.py
|
sfujiwara/tfxc
|
5469862e7c6bdac89edb0bd7cbc1808b8c7e7665
|
[
"MIT"
] | null | null | null |
tfxc/bigquery.py
|
sfujiwara/tfxc
|
5469862e7c6bdac89edb0bd7cbc1808b8c7e7665
|
[
"MIT"
] | null | null | null |
from google.cloud import bigquery
from tfx.types.experimental.simple_artifacts import Dataset
from tfx import v1 as tfx
# TODO(sfujiwara): Automatically create dataset if it does not exist.
@tfx.dsl.components.component
def BigQueryTableGen(
project: tfx.dsl.components.Parameter[str],
query: tfx.dsl.components.Parameter[str],
destination: tfx.dsl.components.Parameter[str],
table: tfx.dsl.components.OutputArtifact[Dataset],
):
"""
A custom component for TFX Pipelines.
Executes query and saves the result to destination table.
"""
table.set_string_custom_property(key="table", value=destination)
client = bigquery.Client(project=project)
job_config = bigquery.job.QueryJobConfig(
destination=destination, write_disposition=bigquery.job.WriteDisposition.WRITE_TRUNCATE
)
_ = client.query(query, job_config=job_config)
| 34
| 95
| 0.757919
|
93b98023e6d9551444c949db1eb1b44b1863a6d9
| 19,641
|
py
|
Python
|
indico/util/mdx_latex.py
|
EdverCompany/indico
|
c4b5e7b2e3a47355d850a342ed527c09334ef336
|
[
"MIT"
] | null | null | null |
indico/util/mdx_latex.py
|
EdverCompany/indico
|
c4b5e7b2e3a47355d850a342ed527c09334ef336
|
[
"MIT"
] | 5
|
2021-04-08T19:26:47.000Z
|
2022-01-24T16:30:18.000Z
|
indico/util/mdx_latex.py
|
EdverCompany/indico
|
c4b5e7b2e3a47355d850a342ed527c09334ef336
|
[
"MIT"
] | 2
|
2019-02-24T17:29:10.000Z
|
2021-04-08T19:23:27.000Z
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
"""Extension to python-markdown to support LaTeX (rather than html) output.
Authored by Rufus Pollock: <http://www.rufuspollock.org/>
Reworked by Julian Wulfheide (ju.wulfheide@gmail.com) and
Indico Project (indico-team@cern.ch)
Usage:
======
1. Command Line. A script entitled markdown2latex.py is automatically
installed. For details of usage see help::
$ markdown2latex.py -h
2. As a python-markdown extension::
>>> import markdown
>>> md = markdown.Markdown(None, extensions=['latex'])
>>> # text is input string ...
>>> latex_out = md.convert(text)
3. Directly as a module (slight inversion of std markdown extension setup)::
>>> import markdown
>>> import mdx_latex
>>> md = markdown.Markdown()
>>> latex_mdx = mdx_latex.LaTeXExtension()
>>> latex_mdx.extendMarkdown(md, markdown.__dict__)
>>> out = md.convert(text)
History
=======
Version: 1.0 (November 15, 2006)
* First working version (compatible with markdown 1.5)
* Includes support for tables
Version: 1.1 (January 17, 2007)
* Support for verbatim and images
Version: 1.2 (June 2008)
* Refactor as an extension.
* Make into a proper python/setuptools package.
* Tested with markdown 1.7 but should work with 1.6 and (possibly) 1.5
(though pre/post processor stuff not as worked out there)
Version 1.3: (July 2008)
* Improvements to image output (width)
Version 1.3.1: (August 2009)
* Tiny bugfix to remove duplicate keyword argument and set zip_safe=False
* Add [width=\textwidth] by default for included images
Version 2.0: (June 2011)
* PEP8 cleanup
* Major rework since this was broken by new Python-Markdown releases
Version 2.1: (August 2013)
* Add handler for non locally referenced images, hyperlinks and horizontal rules
* Update math delimiters
"""
import os
import re
import textwrap
import uuid
from io import BytesIO
from mimetypes import guess_extension
from tempfile import NamedTemporaryFile
from urllib.parse import urlparse
from xml.etree import ElementTree as etree
import markdown
import requests
from lxml.html import html5parser
from PIL import Image
from requests.exceptions import ConnectionError, InvalidURL
__version__ = '2.1'
start_single_quote_re = re.compile(r"""(^|\s|")'""")
start_double_quote_re = re.compile(r'''(^|\s|'|`)"''')
end_double_quote_re = re.compile(r'"(,|\.|\s|$)')
Image.init()
IMAGE_FORMAT_EXTENSIONS = {format: ext for (ext, format) in Image.EXTENSION.items()}
safe_mathmode_commands = {
'above', 'abovewithdelims', 'acute', 'aleph', 'alpha', 'amalg', 'And', 'angle', 'approx', 'arccos', 'arcsin',
'arctan', 'arg', 'array', 'Arrowvert', 'arrowvert', 'ast', 'asymp', 'atop', 'atopwithdelims', 'backslash',
'backslash', 'bar', 'Bbb', 'begin', 'beta', 'bf', 'Big', 'big', 'bigcap', 'bigcirc', 'bigcup', 'Bigg', 'bigg',
'Biggl', 'biggl', 'Biggm', 'biggm', 'Biggr', 'biggr', 'Bigl', 'bigl', 'Bigm', 'bigm', 'bigodot', 'bigoplus',
'bigotimes', 'Bigr', 'bigr', 'bigsqcup', 'bigtriangledown', 'bigtriangleup', 'biguplus', 'bigvee', 'bigwedge',
'bmod', 'bot', 'bowtie', 'brace', 'bracevert', 'brack', 'breve', 'buildrel', 'bullet', 'cap', 'cases', 'cdot',
'cdotp', 'cdots', 'check', 'chi', 'choose', 'circ', 'clubsuit', 'colon', 'cong', 'coprod', 'cos', 'cosh', 'cot',
'coth', 'cr', 'csc', 'cup', 'dagger', 'dashv', 'ddagger', 'ddot', 'ddots', 'deg', 'Delta', 'delta', 'det',
'diamond', 'diamondsuit', 'dim', 'displaylines', 'displaystyle', 'div', 'dot', 'doteq', 'dots', 'dotsb', 'dotsc',
'dotsi', 'dotsm', 'dotso', 'Downarrow', 'downarrow', 'ell', 'emptyset', 'end', 'enspace', 'epsilon', 'eqalign',
'eqalignno', 'equiv', 'eta', 'exists', 'exp', 'fbox', 'flat', 'forall', 'frac', 'frak', 'frown', 'Gamma', 'gamma',
'gcd', 'ge', 'geq', 'gets', 'gg', 'grave', 'gt', 'gt', 'hat', 'hbar', 'hbox', 'hdashline', 'heartsuit', 'hline',
'hom', 'hookleftarrow', 'hookrightarrow', 'hphantom', 'hskip', 'hspace', 'Huge', 'huge', 'iff', 'iiint', 'iint',
'Im', 'imath', 'in', 'inf', 'infty', 'int', 'intop', 'iota', 'it', 'jmath', 'kappa', 'ker', 'kern', 'Lambda',
'lambda', 'land', 'langle', 'LARGE', 'Large', 'large', 'LaTeX', 'lbrace', 'lbrack', 'lceil', 'ldotp', 'ldots', 'le',
'left', 'Leftarrow', 'leftarrow', 'leftharpoondown', 'leftharpoonup', 'Leftrightarrow', 'leftrightarrow',
'leftroot', 'leq', 'leqalignno', 'lfloor', 'lg', 'lgroup', 'lim', 'liminf', 'limits', 'limsup', 'll', 'llap',
'lmoustache', 'ln', 'lnot', 'log', 'Longleftarrow', 'longleftarrow', 'Longleftrightarrow', 'longleftrightarrow',
'longmapsto', 'Longrightarrow', 'longrightarrow', 'lor', 'lower', 'lt', 'lt', 'mapsto', 'mathbb', 'mathbf',
'mathbin', 'mathcal', 'mathclose', 'mathfrak', 'mathinner', 'mathit', 'mathop', 'mathopen', 'mathord', 'mathpunct',
'mathrel', 'mathrm', 'mathscr', 'mathsf', 'mathstrut', 'mathtt', 'matrix', 'max', 'mbox', 'mid', 'middle', 'min',
'mit', 'mkern', 'mod', 'models', 'moveleft', 'moveright', 'mp', 'mskip', 'mspace', 'mu', 'nabla', 'natural', 'ne',
'nearrow', 'neg', 'negthinspace', 'neq', 'newline', 'ni', 'nolimits', 'normalsize', 'not', 'notin', 'nu', 'nwarrow',
'odot', 'oint', 'oldstyle', 'Omega', 'omega', 'omicron', 'ominus', 'oplus', 'oslash', 'otimes', 'over', 'overbrace',
'overleftarrow', 'overleftrightarrow', 'overline', 'overrightarrow', 'overset', 'overwithdelims', 'owns',
'parallel', 'partial', 'perp', 'phantom', 'Phi', 'phi', 'Pi', 'pi', 'pm', 'pmatrix', 'pmb', 'pmod', 'pod', 'Pr',
'prec', 'preceq', 'prime', 'prod', 'propto', 'Psi', 'psi', 'qquad', 'quad', 'raise', 'rangle', 'rbrace', 'rbrack',
'rceil', 'Re', 'rfloor', 'rgroup', 'rho', 'right', 'Rightarrow', 'rightarrow', 'rightharpoondown', 'rightharpoonup',
'rightleftharpoons', 'rlap', 'rm', 'rmoustache', 'root', 'S', 'scr', 'scriptscriptstyle', 'scriptsize',
'scriptstyle', 'searrow', 'sec', 'setminus', 'sf', 'sharp', 'Sigma', 'sigma', 'sim', 'simeq', 'sin', 'sinh', 'skew',
'small', 'smallint', 'smash', 'smile', 'Space', 'space', 'spadesuit', 'sqcap', 'sqcup', 'sqrt', 'sqsubseteq',
'sqsupseteq', 'stackrel', 'star', 'strut', 'subset', 'subseteq', 'succ', 'succeq', 'sum', 'sup', 'supset',
'supseteq', 'surd', 'swarrow', 'tan', 'tanh', 'tau', 'TeX', 'text', 'textbf', 'textit', 'textrm', 'textsf',
'textstyle', 'texttt', 'Theta', 'theta', 'thinspace', 'tilde', 'times', 'tiny', 'to', 'top', 'triangle',
'triangleleft', 'triangleright', 'tt', 'underbrace', 'underleftarrow', 'underleftrightarrow', 'underline',
'underrightarrow', 'underset', 'Uparrow', 'uparrow', 'Updownarrow', 'updownarrow', 'uplus', 'uproot', 'Upsilon',
'upsilon', 'varepsilon', 'varphi', 'varpi', 'varrho', 'varsigma', 'vartheta', 'vcenter', 'vdash', 'vdots', 'vec',
'vee', 'Vert', 'vert', 'vphantom', 'wedge', 'widehat', 'widetilde', 'wp', 'wr', 'Xi', 'xi', 'zeta', '\\'
}
class ImageURLException(Exception):
pass
def unescape_html_entities(text):
out = text.replace('&', '&')
out = out.replace('<', '<')
out = out.replace('>', '>')
out = out.replace('"', '"')
return out
def latex_escape(text, ignore_math=True, ignore_braces=False):
if text is None:
return ''
chars = {
'#': r'\#',
'$': r'\$',
'%': r'\%',
'&': r'\&',
'~': r'\~{}',
'_': r'\_',
'^': r'\^{}',
'\\': r'\textbackslash{}',
'\x0c': '',
'\x0b': ''
}
if not ignore_braces:
chars.update({
'{': r'\{',
'}': r'\}'})
math_segments = []
def substitute(x):
return chars[x.group()]
math_placeholder = f'[*LaTeXmath-{str(uuid.uuid4())}*]'
def math_replace(m):
math_segments.append(m.group(0))
return math_placeholder
if ignore_math:
# Extract math-mode segments and replace with placeholder
text = re.sub(r'\$[^\$]+\$|\$\$(^\$)\$\$', math_replace, text)
pattern = re.compile('|'.join(re.escape(k) for k in chars.keys()))
res = pattern.sub(substitute, text)
if ignore_math:
# Sanitize math-mode segments and put them back in place
math_segments = list(map(sanitize_mathmode, math_segments))
res = re.sub(re.escape(math_placeholder), lambda _: '\\protect ' + math_segments.pop(0), res)
return res
def sanitize_mathmode(text):
def _escape_unsafe_command(m):
command = m.group(1)
return m.group(0) if command in safe_mathmode_commands else r'\\' + command
return re.sub(r'\\([a-zA-Z]+|\\)', _escape_unsafe_command, text)
def escape_latex_entities(text):
"""Escape latex reserved characters."""
out = text
out = unescape_html_entities(out)
out = start_single_quote_re.sub(r'\g<1>`', out)
out = start_double_quote_re.sub(r'\g<1>``', out)
out = end_double_quote_re.sub(r"''\g<1>", out)
out = latex_escape(out)
return out
def unescape_latex_entities(text):
"""Limit ourselves as this is only used for maths stuff."""
out = text
out = out.replace('\\&', '&')
return out
def latex_render_error(message):
"""Generate nice error box in LaTeX document.
:param message: The error message
:returns: LaTeX code for error box
"""
return textwrap.dedent(r'''
\begin{tcolorbox}[width=\textwidth,colback=red!5!white,colframe=red!75!black,title={Indico rendering error}]
\begin{verbatim}%s\end{verbatim}
\end{tcolorbox}''' % latex_escape(message))
def latex_render_image(src, alt, tmpdir, strict=False):
"""Generate LaTeX code that includes an arbitrary image from a URL.
This involves fetching the image from a web server and figuring out its
MIME type. A temporary file will be created, which is not immediately
deleted since it has to be included in the LaTeX code. It should be handled
by the enclosing code.
:param src: source URL of the image
:param alt: text to use as ``alt="..."``
:param tmpdir: the directory where to put any temporary files
:param strict: whether a faulty URL should break the whole process
:returns: a ``(latex_code, file_path)`` tuple, containing the LaTeX code
and path to the temporary image file.
"""
try:
if urlparse(src).scheme not in ('http', 'https'):
raise ImageURLException(f'URL scheme not supported: {src}')
else:
try:
resp = requests.get(src, verify=False, timeout=5)
except InvalidURL:
raise ImageURLException(f"Cannot understand URL '{src}'")
except (requests.Timeout, ConnectionError):
raise ImageURLException(f'Problem downloading image ({src})')
except requests.TooManyRedirects:
raise ImageURLException(f'Too many redirects downloading image ({src})')
extension = None
if resp.status_code != 200:
raise ImageURLException(f'[{resp.status_code}] Error fetching image')
if resp.headers.get('content-type'):
extension = guess_extension(resp.headers['content-type'])
# as incredible as it might seem, '.jpe' will be the answer in some Python environments
if extension == '.jpe':
extension = '.jpg'
if not extension:
try:
# Try to use PIL to get file type
image = Image.open(BytesIO(resp.content))
# Worst case scenario, assume it's PNG
extension = IMAGE_FORMAT_EXTENSIONS.get(image.format, '.png')
except OSError:
raise ImageURLException('Cannot read image data. Maybe not an image file?')
with NamedTemporaryFile(prefix='indico-latex-', suffix=extension, dir=tmpdir, delete=False) as tempfile:
tempfile.write(resp.content)
except ImageURLException as exc:
if strict:
raise
else:
return latex_render_error(f'Could not include image: {exc}'), None
# Using graphicx and ajustbox package for *max width*
return (textwrap.dedent(r'''
\begin{figure}[H]
\centering
\includegraphics[max width=\linewidth]{%s}
\caption{%s}
\end{figure}
''' % (os.path.basename(tempfile.name), latex_escape(alt))), tempfile.name)
def makeExtension(configs=None):
return LaTeXExtension(configs=configs)
class LaTeXExtension(markdown.Extension):
def __init__(self, configs=None):
self.configs = configs
self.reset()
def extendMarkdown(self, md, md_globals):
self.md = md
# remove escape pattern -- \\(.*) -- as this messes up any embedded
# math and we don't need to escape stuff any more for html
self.md.inlinePatterns.deregister('escape')
latex_tp = LaTeXTreeProcessor(self.configs)
math_pp = MathTextPostProcessor()
link_pp = LinkTextPostProcessor()
unescape_html_pp = UnescapeHtmlTextPostProcessor()
md.treeprocessors.register(latex_tp, 'latex', md.treeprocessors._priority[-1].priority - 1)
md.postprocessors.register(unescape_html_pp, 'unescape_html', md.postprocessors._priority[-1].priority - 1)
md.postprocessors.register(math_pp, 'math', md.postprocessors._priority[-1].priority - 1)
md.postprocessors.register(link_pp, 'link', md.postprocessors._priority[-1].priority - 1)
# Needed for LaTeX postprocessors not to choke on URL-encoded urls
md.inlinePatterns.register(NonEncodedAutoMailPattern(markdown.inlinepatterns.AUTOMAIL_RE, md), 'automail', 110)
def reset(self):
pass
class NonEncodedAutoMailPattern(markdown.inlinepatterns.Pattern):
"""Reimplementation of AutoMailPattern to avoid URL-encoded links."""
def handleMatch(self, m):
el = etree.Element('a')
email = self.unescape(m.group(2))
email.removeprefix('mailto:')
el.text = markdown.util.AtomicString(''.join(email))
el.set('href', f'mailto:{email}')
return el
class LaTeXTreeProcessor(markdown.treeprocessors.Treeprocessor):
def __init__(self, configs):
self.configs = configs
def run(self, doc):
"""
Walk the dom converting relevant nodes to text nodes with relevant
content.
"""
latex_text = self.tolatex(doc)
doc.clear()
doc.text = latex_text
def tolatex(self, ournode):
buffer = ''
subcontent = ''
if ournode.text:
subcontent += escape_latex_entities(ournode.text)
for child in ournode:
subcontent += self.tolatex(child)
if ournode.tag == 'h1':
buffer += '\n\n\\section{%s}\n' % subcontent
elif ournode.tag == 'h2':
buffer += '\n\n\\subsection{%s}\n' % subcontent
elif ournode.tag == 'h3':
buffer += '\n\\subsubsection{%s}\n' % subcontent
elif ournode.tag == 'h4':
buffer += '\n\\paragraph{%s}\n' % subcontent
elif ournode.tag == 'hr':
buffer += r'\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}'
elif ournode.tag == 'ul':
# no need for leading \n as one will be provided by li
buffer += '''
\\begin{itemize}%s
\\end{itemize}
''' % subcontent
elif ournode.tag == 'ol':
# no need for leading \n as one will be provided by li
buffer += '''
\\begin{enumerate}%s
\\end{enumerate}
''' % subcontent
elif ournode.tag == 'li':
buffer += '''
\\item %s''' % subcontent.strip()
elif ournode.tag == 'blockquote':
# use quotation rather than quote as quotation can support multiple
# paragraphs
buffer += '''
\\begin{quotation}
%s
\\end{quotation}
''' % subcontent.strip()
# ignore 'code' when inside pre tags
# (mkdn produces <pre><code></code></pre>)
elif (ournode.tag == 'pre' or (ournode.tag == 'pre' and ournode.parentNode.tag != 'pre')):
buffer += '''
\\begin{verbatim}
%s
\\end{verbatim}
''' % subcontent.strip()
elif ournode.tag == 'q':
buffer += "`%s'" % subcontent.strip()
elif ournode.tag == 'p':
if self.configs.get('apply_br'):
subcontent = subcontent.replace('\n', '\\\\\\relax\n')
buffer += '\n%s\n' % subcontent.strip()
elif ournode.tag == 'strong':
buffer += '\\textbf{%s}' % subcontent.strip()
elif ournode.tag == 'em':
buffer += '\\emph{%s}' % subcontent.strip()
elif ournode.tag in ('table', 'thead', 'tbody', 'tr', 'th', 'td'):
raise RuntimeError('Unexpected table in markdown data for LaTeX')
elif ournode.tag == 'img':
buffer += latex_render_image(ournode.get('src'), ournode.get('alt'), tmpdir=self.configs.get('tmpdir'))[0]
elif ournode.tag == 'a':
# this one gets escaped in convert_link_to_latex
buffer += '<a href="{}">{}</a>'.format(ournode.get('href'), subcontent)
else:
buffer = subcontent
if ournode.tail:
buffer += escape_latex_entities(ournode.tail)
return buffer
class UnescapeHtmlTextPostProcessor(markdown.postprocessors.Postprocessor):
def run(self, text):
return unescape_html_entities(text)
# ========================= MATH =================================
class MathTextPostProcessor(markdown.postprocessors.Postprocessor):
def run(self, instr):
"""
Convert all math sections in {text} whether latex, asciimathml or
latexmathml formatted to latex.
This assumes you are using $$ as your mathematics delimiter (*not* the
standard asciimathml or latexmathml delimiter).
"""
def repl_1(matchobj):
text = unescape_latex_entities(matchobj.group(1))
tmp = text.strip()
if tmp.startswith('\\[') or tmp.startswith('\\begin'):
return text
else:
return '\\[%s\\]\n' % text
def repl_2(matchobj):
text = unescape_latex_entities(matchobj.group(1))
return f'${text}${matchobj.group(2)}'
# $$ ..... $$
pat = re.compile(r'^\$\$([^$]*)\$\$\s*$', re.MULTILINE)
out = pat.sub(repl_1, instr)
# Jones, $x=3$, is ...
pat3 = re.compile(r'\$([^$]+)\$(\s|$)')
out = pat3.sub(repl_2, out)
# # $100 million
# pat2 = re.compile('([^\$])\$([^\$])')
# out = pat2.sub('\g<1>\\$\g<2>', out)
# some extras due to asciimathml
# out = out.replace('\\lt', '<')
# out = out.replace(' * ', ' \\cdot ')
# out = out.replace('\\del', '\\partial')
return out
# ========================== LINKS =================================
class LinkTextPostProcessor(markdown.postprocessors.Postprocessor):
def run(self, instr):
new_blocks = [re.sub(r'<a[^>]*>([^<]+)</a>', lambda m: convert_link_to_latex(m.group(0)).strip(), block)
for block in instr.split('\n\n')]
return '\n\n'.join(new_blocks)
def convert_link_to_latex(instr):
dom = html5parser.fragment_fromstring(instr)
return '\\href{%s}{%s}' % (latex_escape(dom.get('href'), ignore_math=True), dom.text)
| 39.360721
| 120
| 0.600071
|
be23dabcf4cf24016633887ccb3fc264b5227895
| 14,011
|
py
|
Python
|
linkfinder.py
|
storenth/LinkFinder
|
f23e221aed9ed9733a9e667a0a5ec1fbd881c93d
|
[
"MIT"
] | null | null | null |
linkfinder.py
|
storenth/LinkFinder
|
f23e221aed9ed9733a9e667a0a5ec1fbd881c93d
|
[
"MIT"
] | null | null | null |
linkfinder.py
|
storenth/LinkFinder
|
f23e221aed9ed9733a9e667a0a5ec1fbd881c93d
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python3
# Python 3
# LinkFinder
# By Gerben_Javado
# Powered by storenth
# Fix webbrowser bug for MacOS
import os
os.environ["BROWSER"] = "open"
# Import libraries
import re, sys, glob, html, argparse, jsbeautifier, webbrowser, subprocess, base64, ssl, xml.etree.ElementTree
from gzip import GzipFile
from string import Template
try:
from StringIO import StringIO
readBytesCustom = StringIO
except ImportError:
from io import BytesIO
readBytesCustom = BytesIO
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
# Regex used
regex_str = r"""
(?:"|') # Start newline delimiter
(
((?:[a-zA-Z]{1,10}://|//) # Match a scheme [a-Z]*1-10 or //
[^\"'/]{1,}\. # Match a domainname (any character + dot)
[a-zA-Z]{2,}[^\"']{0,}) # The domainextension and/or path
|
((?:/|\.\./|\./) # Start with /,../,./
[^"'><,;| *()(%%$^/\\\[\]] # Next character can't be...
[^"'><,;|()]{1,}) # Rest of the characters can't be
|
([a-zA-Z0-9_\-/]{1,}/ # Relative endpoint with /
[a-zA-Z0-9_\-/]{1,} # Resource name
\.(?:[a-zA-Z]{1,4}|action) # Rest + extension (length 1-4 or action)
(?:[\?|#][^\"|\']{0,}|)) # ? or # mark with parameters
|
([a-zA-Z0-9_\-/]{1,}/ # REST API (no extension) with /
[a-zA-Z0-9_\-/]{3,} # Proper REST endpoints usually have 3+ chars
(?:[\?|#][^\"|\']{0,}|)) # ? or # mark with parameters
|
([a-zA-Z0-9_\-]{1,} # filename
\.(?:php|asp|aspx|jsp|json|
action|html|js|txt|xml) # . + extension
(?:[\?|#][^\"|\']{0,}|)) # ? or # mark with parameters
)
(?:"|') # End newline delimiter
"""
context_delimiter_str = "\n"
def parser_error(errmsg):
'''
Error Messages
'''
# print("Usage: python %s [Options] use -h for help" % sys.argv[0])
print("Error: {}".format(errmsg), file=sys.stderr)
sys.exit()
def parser_input(input):
'''
Parse Input
'''
# Method 1 - URL
if input.startswith(('http://', 'https://',
'file://', 'ftp://', 'ftps://')):
return [input]
# Method 2 - URL Inspector Firefox
if input.startswith('view-source:'):
return [input[12:]]
# Method 3 - Burp file
if args.burp:
jsfiles = []
items = xml.etree.ElementTree.fromstring(open(args.input, "r").read())
for item in items:
jsfiles.append({"js":base64.b64decode(item.find('response').text).decode('utf-8',"replace"), "url":item.find('url').text})
return jsfiles
# Method 4 - Folder with a wildcard
if "*" in input:
paths = glob.glob(os.path.abspath(input))
for index, path in enumerate(paths):
paths[index] = "file://%s" % path
return (paths if len(paths) > 0 else parser_error('Input with wildcard does \
not match any files.'))
# Method 5 - Local file
path = "file://%s" % os.path.abspath(input)
return [path if os.path.exists(input) else parser_error("file could not \
be found (maybe you forgot to add http/https).")]
def send_request(url):
'''
Send requests with Requests
'''
q = Request(url)
q.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36')
q.add_header('Accept', 'text/html,\
application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8')
q.add_header('Accept-Language', 'en-US,en;q=0.8')
q.add_header('Accept-Encoding', 'gzip')
q.add_header('Cookie', args.cookies)
try:
sslcontext = ssl._create_unverified_context()
response = urlopen(q, timeout=args.timeout, context=sslcontext)
except Exception as err:
raise Exception(err)
if response.info().get('Content-Encoding') == 'gzip':
data = GzipFile(fileobj=readBytesCustom(response.read())).read()
elif response.info().get('Content-Encoding') == 'deflate':
data = response.read().read()
else:
data = response.read()
return data.decode('utf-8', 'replace')
def getContext(list_matches, content, include_delimiter=0, context_delimiter_str="\n"):
'''
Parse Input
list_matches: list of tuple (link, start_index, end_index)
content: content to search for the context
include_delimiter Set 1 to include delimiter in context
'''
items = []
for m in list_matches:
match_str = m[0]
match_start = m[1]
match_end = m[2]
context_start_index = match_start
context_end_index = match_end
delimiter_len = len(context_delimiter_str)
content_max_index = len(content) - 1
while content[context_start_index] != context_delimiter_str and context_start_index > 0:
context_start_index = context_start_index - 1
while content[context_end_index] != context_delimiter_str and context_end_index < content_max_index:
context_end_index = context_end_index + 1
if include_delimiter:
context = content[context_start_index: context_end_index]
else:
context = content[context_start_index + delimiter_len: context_end_index]
item = {
"link": match_str,
"context": context
}
items.append(item)
return items
def parser_file(content, regex_str, mode=1, more_regex=None, no_dup=1):
'''
Parse Input
content: string of content to be searched
regex_str: string of regex (The link should be in the group(1))
mode: mode of parsing. Set 1 to include surrounding contexts in the result
more_regex: string of regex to filter the result
no_dup: remove duplicated link (context is NOT counted)
Return the list of ["link": link, "context": context]
The context is optional if mode=1 is provided.
'''
global context_delimiter_str
if mode == 1:
# Beautify
if len(content) > 1000000:
content = content.replace(";",";\r\n").replace(",",",\r\n")
else:
content = jsbeautifier.beautify(content)
regex = re.compile(regex_str, re.VERBOSE)
if mode == 1:
all_matches = [(m.group(1), m.start(0), m.end(0)) for m in re.finditer(regex, content)]
items = getContext(all_matches, content, context_delimiter_str=context_delimiter_str)
else:
items = [{"link": m.group(1)} for m in re.finditer(regex, content)]
if no_dup:
# Remove duplication
all_links = set()
no_dup_items = []
for item in items:
if item["link"] not in all_links:
all_links.add(item["link"])
no_dup_items.append(item)
items = no_dup_items
# Match Regex
filtered_items = []
for item in items:
# Remove other capture groups from regex results
if more_regex:
if re.search(more_regex, item["link"]):
filtered_items.append(item)
else:
filtered_items.append(item)
return filtered_items
def cli_output(endpoints, url=None):
'''
Output to CLI
'''
for endpoint in endpoints:
if url:
print("[{}]".format(url), end=' ')
print(html.escape(endpoint["link"]).encode(
'ascii', 'ignore').decode('utf8'))
def html_save(html):
'''
Save as HTML file and open in the browser
'''
hide = os.dup(1)
os.close(1)
os.open(os.devnull, os.O_RDWR)
try:
s = Template(open('%s/template.html' % sys.path[0], 'r').read())
text_file = open(args.output, "wb")
text_file.write(s.substitute(content=html).encode('utf8'))
text_file.close()
print("URL to access output: file://%s" % os.path.abspath(args.output))
file = "file:///%s" % os.path.abspath(args.output)
if sys.platform == 'linux' or sys.platform == 'linux2':
subprocess.call(["xdg-open", file])
else:
webbrowser.open(file)
except Exception as e:
print("Output can't be saved in %s \
due to exception: %s" % (args.output, e))
finally:
os.dup2(hide, 1)
def check_url(url):
nopelist = ["node_modules", "jquery.js"]
if url[-3:] == ".js":
words = url.split("/")
for word in words:
if word in nopelist:
return False
if url[:2] == "//":
url = "https:" + url
if url[:4] != "http":
if url[:1] == "/":
url = args.input + url
else:
url = args.input + "/" + url
return url
else:
return False
if __name__ == "__main__":
# Parse command line
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--domain",
help="Input a domain to recursively parse all javascript located in a page",
action="store_true")
parser.add_argument("-i", "--input",
help="Input a: URL, file or folder. \
For folders a wildcard can be used (e.g. '/*.js').",
required="True", action="store")
parser.add_argument("-o", "--output",
help="Where to save the file, \
including file name. Default: output.html",
action="store", default="output.html")
parser.add_argument("-r", "--regex",
help="RegEx for filtering purposes \
against found endpoint (e.g. ^/api/)",
action="store")
parser.add_argument("-b", "--burp",
help="",
action="store_true")
parser.add_argument("-c", "--cookies",
help="Add cookies for authenticated JS files",
action="store", default="")
default_timeout = 10
parser.add_argument("-t", "--timeout",
help="How many seconds to wait for the server to send data before giving up (default: " + str(default_timeout) + " seconds)",
default=default_timeout, type=int, metavar="<seconds>")
args = parser.parse_args()
if args.input[-1:] == "/":
args.input = args.input[:-1]
mode = 1
if args.output == "cli":
mode = 0
# Convert input to URLs or JS files
urls = parser_input(args.input)
# Convert URLs to JS
output = ''
for url in urls:
if not args.burp:
try:
file = send_request(url)
except Exception as err:
errmsg = " ".join([url, str(err)])
parser_error(errmsg)
else:
file = url['js']
url = url['url']
endpoints = parser_file(file, regex_str, mode, args.regex)
if args.domain:
for endpoint in endpoints:
endpoint = html.escape(endpoint["link"]).encode('ascii', 'ignore').decode('utf8')
endpoint = check_url(endpoint)
if endpoint is False:
continue
print("Running against: " + endpoint)
print("")
try:
file = send_request(endpoint)
new_endpoints = parser_file(file, regex_str, mode, args.regex)
if args.output == 'cli':
cli_output(new_endpoints)
else:
output += '''
<h1>File: <a href="%s" target="_blank" rel="nofollow noopener noreferrer">%s</a></h1>
''' % (html.escape(endpoint), html.escape(endpoint))
for endpoint2 in new_endpoints:
url = html.escape(endpoint2["link"])
header = "<div><a href='%s' class='text'>%s" % (
html.escape(url),
html.escape(url)
)
body = "</a><div class='container'>%s</div></div>" % html.escape(
endpoint2["context"]
)
body = body.replace(
html.escape(endpoint2["link"]),
"<span style='background-color:yellow'>%s</span>" %
html.escape(endpoint2["link"])
)
output += header + body
except Exception as e:
print("Invalid input defined or SSL error for: " + endpoint)
continue
if args.output == 'cli':
cli_output(endpoints, url)
else:
output += '''
<h1>File: <a href="%s" target="_blank" rel="nofollow noopener noreferrer">%s</a></h1>
''' % (html.escape(url), html.escape(url))
for endpoint in endpoints:
url = html.escape(endpoint["link"])
header = "<div><a href='%s' class='text'>%s" % (
html.escape(url),
html.escape(url)
)
body = "</a><div class='container'>%s</div></div>" % html.escape(
endpoint["context"]
)
body = body.replace(
html.escape(endpoint["link"]),
"<span style='background-color:yellow'>%s</span>" %
html.escape(endpoint["link"])
)
output += header + body
if args.output != 'cli':
html_save(output)
| 34.509852
| 149
| 0.523374
|
3fc5288cdbf71fe08846bd47f7b688da3b7df09e
| 4,741
|
py
|
Python
|
PySpectrograph/Spectrograph/Spectrograph.py
|
crawfordsm/pyspectrograph
|
4237ba4b4fe08a69e1d6487924d959f089ecca46
|
[
"BSD-3-Clause"
] | 18
|
2015-01-11T21:04:59.000Z
|
2021-08-06T18:30:47.000Z
|
PySpectrograph/Spectrograph/Spectrograph.py
|
crawfordsm/pyspectrograph
|
4237ba4b4fe08a69e1d6487924d959f089ecca46
|
[
"BSD-3-Clause"
] | 14
|
2015-04-23T09:39:16.000Z
|
2017-12-03T12:49:05.000Z
|
PySpectrograph/Spectrograph/Spectrograph.py
|
crawfordsm/pyspectrograph
|
4237ba4b4fe08a69e1d6487924d959f089ecca46
|
[
"BSD-3-Clause"
] | 5
|
2015-04-23T08:17:37.000Z
|
2019-06-22T13:36:47.000Z
|
"""Spectrograph is a class that general describes a spectrograph. This includes
describing the telescope, slit, collimator, grating, camera, and detector.
HISTORY
20090912 SMC First written by SM Crawford
Limitations:
-Still need to verify how alpha, grating angle, beta, and camera angle
to see if I can hardwire some of the tasks
"""
import math
from .SpectrographEquations import *
from .Grating import Grating
from .Optics import Optics
from .Slit import Slit
from .Detector import Detector
class Spectrograph(Grating, Optics, Slit, Detector):
"""A class describing a spectrograph and functions
related to a spectrograph. All angles are in degrees.
"""
def __init__(self, camang=45, gratang=45, grating=Grating(), camera=Optics(),
collimator=Optics(), telescope=Optics(), slit=Slit(),
detector=Detector()):
# initiate the grating
self.grating = grating
# initiate the telescope
self.telescope = telescope
# initiate the collimator
self.collimator = collimator
# initiate the camera
self.camera = camera
# initiate the slit
self.slit = slit
# initiate the detector
self.detector = detector
# set up the angles in the system
self.gratang = gratang
self.camang = camang
return
def alpha(self):
return self.gratang
def beta(self):
return self.camang - self.gratang
def gamma(self):
return self.gamma
def calc_wavelength(self, alpha, beta, gamma=0.0, nd=n_index):
"""Apply the grating equation to determine the wavelength
returns wavelength in mm
"""
w = gratingequation(self.grating.sigma, self.grating.order, self.grating.sign, alpha, beta, gamma=gamma, nd=nd)
return w
def calc_angdisp(self, beta):
"""Calculate the angular dispersion according to m/sigma/cos beta
returns angular dispersion in 1/mm
"""
A = calc_angdisp(self.grating.sigma, self.grating.order, beta)
return A
def calc_lindisp(self, beta):
"""Calculate the linear dispersion according to f_cam * A
return linear dispersion in mm/mm
"""
return calc_lindisp(self.camera.focallength, self.grating.sigma, self.grating.order, beta)
def calc_demagspatial(self):
"""Calculate the spatial demagnification
returns the spatial demagnification
"""
return calc_demagspatial(self.collimator.focallength, self.camera.focallength)
def calc_demagspectral(self, alpha, beta):
"""Calculate the spectral demagnification
returns the spectral demagnification
"""
return self.calc_demagspatial() / se.calc_anamorph(alpha, beta)
def calc_spatslitimage(self):
"""Calculate the spatial extant of the slit image
return in mm
"""
return self.slit.width / self.calc_demagspatial()
def calc_specslitimage(self, beta):
"""Calculate the spectral extant of the slit image
return in mm
"""
return self.slit.width * self.calc_lindisp(beta)
def calc_resolelement(self, alpha, beta):
"""Calculate the resolution of a single element for a filled slit
return the wavelength resolution in mm
"""
dw = calc_resolelement(self.slit.width, self.collimator.focallength,
self.grating.sigma, self.grating.order,
alpha, beta)
return dw
def calc_resolution(self, w, alpha, beta):
"""Calculate the resolution at a given wavelength. w/dw
returns resolution
"""
return w / self.calc_resolelement(alpha, beta)
def calc_centralwavelength(self):
"""Calculate the central wavlength
return waveleng in mm
"""
return self.calc_wavelength(self.alpha(), -self.beta())
def calc_redwavelength(self):
"""For the detector, calculate the maximum red wavelength
Assume just the width of the detector
return waveleng in mm
"""
dbeta = math.degrees(math.atan(0.5 * self.detector.width / self.camera.focallength))
return self.calc_wavelength(self.alpha(), -self.beta() - dbeta)
def calc_bluewavelength(self):
"""For the detector, calculate the maximum blue wavelength
Assume just the width of the detector
return waveleng in mm
"""
dbeta = math.degrees(math.atan(0.5 * self.detector.width / self.camera.focallength))
return self.calc_wavelength(self.alpha(), -self.beta() + dbeta)
| 30.006329
| 119
| 0.64016
|
3ddf31e510a4c981fb6715ad9a61470902d8cc8b
| 3,751
|
py
|
Python
|
basis_set_exchange/tests/test_unused.py
|
ltalirz/basis_set_exchange
|
0e9601d7b37ae7672a78a335e34ac5591dd509f0
|
[
"BSD-3-Clause"
] | 108
|
2018-07-09T14:23:49.000Z
|
2022-03-30T08:26:15.000Z
|
basis_set_exchange/tests/test_unused.py
|
susilehtola/basis_set_exchange
|
0185cecc56a67ad561167290fd56ac86c0c76ce7
|
[
"BSD-3-Clause"
] | 230
|
2018-06-01T15:15:49.000Z
|
2022-03-30T12:02:11.000Z
|
basis_set_exchange/tests/test_unused.py
|
susilehtola/basis_set_exchange
|
0185cecc56a67ad561167290fd56ac86c0c76ce7
|
[
"BSD-3-Clause"
] | 38
|
2018-07-20T15:16:47.000Z
|
2022-03-30T08:32:45.000Z
|
'''
Test for unused data
'''
import os
import basis_set_exchange as bse
from .common_testvars import data_dir, all_component_paths, all_element_paths, all_table_paths, all_metadata_files, all_families
def test_unused_data():
'''
Test for any unused data in the data directory
'''
# All elements contained in all component files
all_component_elements = {}
for component_path in all_component_paths:
component_data = bse.fileio.read_json_basis(component_path)
all_component_elements[component_path] = list(component_data['elements'].keys())
# All elements contained in all element files
# And all element data as read from the file
all_element_elements = {}
all_element_data = {}
for element_path in all_element_paths:
element_data = bse.fileio.read_json_basis(element_path)
all_element_elements[element_path] = list(element_data['elements'].keys())
all_element_data[element_path] = element_data['elements']
# Now go through what is reachable through a table file
for table_path in all_table_paths:
table_data = bse.fileio.read_json_basis(table_path)
# What element files are linked to this table file
el_files = list(table_data['elements'].items())
# Loop over the element files, and remove the corresponding entry
# from all_component_elements
for el, el_file in el_files:
# Normalize the paths (since we will be removing them later)
el_file = os.path.normpath(el_file)
el_file_path = os.path.join(data_dir, el_file)
el_file_data = all_element_data[el_file_path]
for cfile in el_file_data[el]['components']:
cfile = os.path.normpath(cfile)
cfile_path = os.path.join(data_dir, cfile)
if el in all_component_elements[cfile_path]:
all_component_elements[cfile_path].remove(el)
# Now remove the corresponding entry from all_element_elements
if el in all_element_elements[el_file_path]:
all_element_elements[el_file_path].remove(el)
# See which ones were unused
found_unused = False
# Merge into one big dictionary
remaining = all_component_elements
remaining.update(all_element_elements)
for k, v in remaining.items():
if not v:
continue
found_unused = True
for el in v:
print("Element {:3} in {} not used".format(el, k))
if found_unused:
raise RuntimeError("Found unused data")
def test_unused_notes():
'''
Test for orphan basis and family notes files
'''
all_basis_notes = []
all_family_notes = []
for root, dirs, files in os.walk(data_dir):
for basename in files:
fpath = os.path.join(root, basename)
fpath = os.path.relpath(fpath, data_dir)
if basename.endswith('.notes'):
all_basis_notes.append(fpath)
elif basename.startswith('NOTES.'):
all_family_notes.append(fpath)
found_unused = False
for bs_notes in all_basis_notes:
base = os.path.splitext(bs_notes)[0]
metafile = base + '.metadata.json'
if metafile not in all_metadata_files:
print("File {} does not have a corresponding metadata file".format(bs_notes))
found_unused = True
for fam_notes in all_family_notes:
fam = os.path.splitext(fam_notes)[1][1:] # Removes period
if fam not in all_families:
print("File {} does not have a corresponding family".format(fam_notes))
found_unused = True
if found_unused:
raise RuntimeError("Found unused notes files")
| 34.731481
| 128
| 0.659024
|
e167e1cd7853cce0477d7a7d211b887237ff0b03
| 3,562
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/tetragenococcussolitariusnbrc100494.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-09-10T18:31:58.000Z
|
2022-03-24T04:28:04.000Z
|
bindings/python/ensmallen/datasets/string/tetragenococcussolitariusnbrc100494.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/tetragenococcussolitariusnbrc100494.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Tetragenococcus solitarius NBRC 100494.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def TetragenococcusSolitariusNbrc100494(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Tetragenococcus solitarius NBRC 100494 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Tetragenococcus solitarius NBRC 100494 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="TetragenococcusSolitariusNbrc100494",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.92381
| 223
| 0.684166
|
8644d3c9d57f0468b133f626a088f6f5531ebd02
| 988
|
py
|
Python
|
server/gdserver/protocol.py
|
daimin/AiwanGobang
|
26ed7488403986f061a2144a14a8e870bccf226c
|
[
"MIT"
] | null | null | null |
server/gdserver/protocol.py
|
daimin/AiwanGobang
|
26ed7488403986f061a2144a14a8e870bccf226c
|
[
"MIT"
] | null | null | null |
server/gdserver/protocol.py
|
daimin/AiwanGobang
|
26ed7488403986f061a2144a14a8e870bccf226c
|
[
"MIT"
] | null | null | null |
# coding:utf-8
from __future__ import absolute_import, division, print_function, \
with_statement
__author__ = 'daimin'
from message import Message
DEFAULT = Message(0x0000)
VERSION = Message(0x0001)
HEARTBEAT = Message(0x0002)
LOGIN = Message(0x0003)
RANDOM_CHAT = Message(0x0004)
FIND_CHAT = Message(0x0005)
SEND_CONT = Message(0x0006)
RECV_CONT = Message(0x0007)
OVER_CHAT = Message(0x0008)
LOGOUT = Message(0x0009)
OK = DEFAULT
# 大于等于0x8000用于错误表示
ERR_NONE = Message(0x8000)
ERR_VERSION = Message(0x8001, data=u'错误的版本')
ERR_NOT_LOGIN = Message(0x8002, data=u'你还没有登录')
ERR_LOGIN_FAIL = Message(0x8003, data=u'登录失败')
ERR_RANDOM_FIND = Message(0x8004)
ERR_FIND_CHAT = Message(0x8005)
ERR_NO_SUPPORT = Message(0x8006, data=u'不支持的协议')
ERR_SEND_CONT = Message(0x8007, data=u'发送消息失败')
def get_S2C_proto(tid):
return Message(int(tid) + 0x1000)
| 27.444444
| 67
| 0.665992
|
3f63a75dc4b20882fd1556b89394d7d97e8f7ca2
| 39,710
|
py
|
Python
|
discord/http.py
|
Rayster4/discord.py-1.7.3
|
4a4c60a8fab7bf00eac2e9ffbb5621f68a4c6b6f
|
[
"MIT"
] | 21
|
2021-03-29T05:49:35.000Z
|
2022-03-18T09:02:34.000Z
|
discord/http.py
|
Rayster4/discord.py-1.7.3
|
4a4c60a8fab7bf00eac2e9ffbb5621f68a4c6b6f
|
[
"MIT"
] | 15
|
2021-04-10T11:08:09.000Z
|
2022-03-22T07:48:58.000Z
|
discord/http.py
|
Rayster4/discord.py-1.7.3
|
4a4c60a8fab7bf00eac2e9ffbb5621f68a4c6b6f
|
[
"MIT"
] | 31
|
2021-03-29T05:54:57.000Z
|
2022-03-22T16:58:57.000Z
|
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import asyncio
import json
import logging
import sys
from urllib.parse import quote as _uriquote
import weakref
import aiohttp
from .errors import HTTPException, Forbidden, NotFound, LoginFailure, DiscordServerError, GatewayNotFound
from .gateway import DiscordClientWebSocketResponse
from . import __version__, utils
log = logging.getLogger(__name__)
async def json_or_text(response):
text = await response.text(encoding='utf-8')
try:
if response.headers['content-type'] == 'application/json':
return json.loads(text)
except KeyError:
# Thanks Cloudflare
pass
return text
class Route:
BASE = 'https://discord.com/api/v7'
def __init__(self, method, path, **parameters):
self.path = path
self.method = method
url = (self.BASE + self.path)
if parameters:
self.url = url.format(**{k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items()})
else:
self.url = url
# major parameters:
self.channel_id = parameters.get('channel_id')
self.guild_id = parameters.get('guild_id')
@property
def bucket(self):
# the bucket is just method + path w/ major parameters
return '{0.channel_id}:{0.guild_id}:{0.path}'.format(self)
class MaybeUnlock:
def __init__(self, lock):
self.lock = lock
self._unlock = True
def __enter__(self):
return self
def defer(self):
self._unlock = False
def __exit__(self, type, value, traceback):
if self._unlock:
self.lock.release()
# For some reason, the Discord voice websocket expects this header to be
# completely lowercase while aiohttp respects spec and does it as case-insensitive
aiohttp.hdrs.WEBSOCKET = 'websocket'
class HTTPClient:
"""Represents an HTTP client sending HTTP requests to the Discord API."""
SUCCESS_LOG = '{method} {url} has received {text}'
REQUEST_LOG = '{method} {url} with {json} has returned {status}'
def __init__(self, connector=None, *, proxy=None, proxy_auth=None, loop=None, unsync_clock=True):
self.loop = asyncio.get_event_loop() if loop is None else loop
self.connector = connector
self.__session = None # filled in static_login
self._locks = weakref.WeakValueDictionary()
self._global_over = asyncio.Event()
self._global_over.set()
self.token = None
self.bot_token = False
self.proxy = proxy
self.proxy_auth = proxy_auth
self.use_clock = not unsync_clock
user_agent = 'DiscordBot (https://github.com/Rapptz/discord.py {0}) Python/{1[0]}.{1[1]} aiohttp/{2}'
self.user_agent = user_agent.format(__version__, sys.version_info, aiohttp.__version__)
def recreate(self):
if self.__session.closed:
self.__session = aiohttp.ClientSession(connector=self.connector, ws_response_class=DiscordClientWebSocketResponse)
async def ws_connect(self, url, *, compress=0):
kwargs = {
'proxy_auth': self.proxy_auth,
'proxy': self.proxy,
'max_msg_size': 0,
'timeout': 30.0,
'autoclose': False,
'headers': {
'User-Agent': self.user_agent,
},
'compress': compress
}
return await self.__session.ws_connect(url, **kwargs)
async def request(self, route, *, files=None, form=None, **kwargs):
bucket = route.bucket
method = route.method
url = route.url
lock = self._locks.get(bucket)
if lock is None:
lock = asyncio.Lock()
if bucket is not None:
self._locks[bucket] = lock
# header creation
headers = {
'User-Agent': self.user_agent,
'X-Ratelimit-Precision': 'millisecond',
}
if self.token is not None:
headers['Authorization'] = 'Bot ' + self.token if self.bot_token else self.token
# some checking if it's a JSON request
if 'json' in kwargs:
headers['Content-Type'] = 'application/json'
kwargs['data'] = utils.to_json(kwargs.pop('json'))
try:
reason = kwargs.pop('reason')
except KeyError:
pass
else:
if reason:
headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ')
kwargs['headers'] = headers
# Proxy support
if self.proxy is not None:
kwargs['proxy'] = self.proxy
if self.proxy_auth is not None:
kwargs['proxy_auth'] = self.proxy_auth
if not self._global_over.is_set():
# wait until the global lock is complete
await self._global_over.wait()
await lock.acquire()
with MaybeUnlock(lock) as maybe_lock:
for tries in range(5):
if files:
for f in files:
f.reset(seek=tries)
if form:
form_data = aiohttp.FormData()
for params in form:
form_data.add_field(**params)
kwargs['data'] = form_data
try:
async with self.__session.request(method, url, **kwargs) as r:
log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), r.status)
# even errors have text involved in them so this is safe to call
data = await json_or_text(r)
# check if we have rate limit header information
remaining = r.headers.get('X-Ratelimit-Remaining')
if remaining == '0' and r.status != 429:
# we've depleted our current bucket
delta = utils._parse_ratelimit_header(r, use_clock=self.use_clock)
log.debug('A rate limit bucket has been exhausted (bucket: %s, retry: %s).', bucket, delta)
maybe_lock.defer()
self.loop.call_later(delta, lock.release)
# the request was successful so just return the text/json
if 300 > r.status >= 200:
log.debug('%s %s has received %s', method, url, data)
return data
# we are being rate limited
if r.status == 429:
if not r.headers.get('Via'):
# Banned by Cloudflare more than likely.
raise HTTPException(r, data)
fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"'
# sleep a bit
retry_after = data['retry_after'] / 1000.0
log.warning(fmt, retry_after, bucket)
# check if it's a global rate limit
is_global = data.get('global', False)
if is_global:
log.warning('Global rate limit has been hit. Retrying in %.2f seconds.', retry_after)
self._global_over.clear()
await asyncio.sleep(retry_after)
log.debug('Done sleeping for the rate limit. Retrying...')
# release the global lock now that the
# global rate limit has passed
if is_global:
self._global_over.set()
log.debug('Global rate limit is now over.')
continue
# we've received a 500 or 502, unconditional retry
if r.status in {500, 502}:
await asyncio.sleep(1 + tries * 2)
continue
# the usual error cases
if r.status == 403:
raise Forbidden(r, data)
elif r.status == 404:
raise NotFound(r, data)
elif r.status == 503:
raise DiscordServerError(r, data)
else:
raise HTTPException(r, data)
# This is handling exceptions from the request
except OSError as e:
# Connection reset by peer
if tries < 4 and e.errno in (54, 10054):
continue
raise
# We've run out of retries, raise.
if r.status >= 500:
raise DiscordServerError(r, data)
raise HTTPException(r, data)
async def get_from_cdn(self, url):
async with self.__session.get(url) as resp:
if resp.status == 200:
return await resp.read()
elif resp.status == 404:
raise NotFound(resp, 'asset not found')
elif resp.status == 403:
raise Forbidden(resp, 'cannot retrieve asset')
else:
raise HTTPException(resp, 'failed to get asset')
# state management
async def close(self):
if self.__session:
await self.__session.close()
def _token(self, token, *, bot=True):
self.token = token
self.bot_token = bot
self._ack_token = None
# login management
async def static_login(self, token, *, bot):
# Necessary to get aiohttp to stop complaining about session creation
self.__session = aiohttp.ClientSession(connector=self.connector, ws_response_class=DiscordClientWebSocketResponse)
old_token, old_bot = self.token, self.bot_token
self._token(token, bot=bot)
try:
data = await self.request(Route('GET', '/users/@me'))
except HTTPException as exc:
self._token(old_token, bot=old_bot)
if exc.response.status == 401:
raise LoginFailure('Improper token has been passed.') from exc
raise
return data
def logout(self):
return self.request(Route('POST', '/auth/logout'))
# Group functionality
def start_group(self, user_id, recipients):
payload = {
'recipients': recipients
}
return self.request(Route('POST', '/users/{user_id}/channels', user_id=user_id), json=payload)
def leave_group(self, channel_id):
return self.request(Route('DELETE', '/channels/{channel_id}', channel_id=channel_id))
def add_group_recipient(self, channel_id, user_id):
r = Route('PUT', '/channels/{channel_id}/recipients/{user_id}', channel_id=channel_id, user_id=user_id)
return self.request(r)
def remove_group_recipient(self, channel_id, user_id):
r = Route('DELETE', '/channels/{channel_id}/recipients/{user_id}', channel_id=channel_id, user_id=user_id)
return self.request(r)
def edit_group(self, channel_id, **options):
valid_keys = ('name', 'icon')
payload = {
k: v for k, v in options.items() if k in valid_keys
}
return self.request(Route('PATCH', '/channels/{channel_id}', channel_id=channel_id), json=payload)
def convert_group(self, channel_id):
return self.request(Route('POST', '/channels/{channel_id}/convert', channel_id=channel_id))
# Message management
def start_private_message(self, user_id):
payload = {
'recipient_id': user_id
}
return self.request(Route('POST', '/users/@me/channels'), json=payload)
def send_message(self, channel_id, content, *, tts=False, embed=None, nonce=None, allowed_mentions=None, message_reference=None):
r = Route('POST', '/channels/{channel_id}/messages', channel_id=channel_id)
payload = {}
if content:
payload['content'] = content
if tts:
payload['tts'] = True
if embed:
payload['embed'] = embed
if nonce:
payload['nonce'] = nonce
if allowed_mentions:
payload['allowed_mentions'] = allowed_mentions
if message_reference:
payload['message_reference'] = message_reference
return self.request(r, json=payload)
def send_typing(self, channel_id):
return self.request(Route('POST', '/channels/{channel_id}/typing', channel_id=channel_id))
def send_files(self, channel_id, *, files, content=None, tts=False, embed=None, nonce=None, allowed_mentions=None, message_reference=None):
r = Route('POST', '/channels/{channel_id}/messages', channel_id=channel_id)
form = []
payload = {'tts': tts}
if content:
payload['content'] = content
if embed:
payload['embed'] = embed
if nonce:
payload['nonce'] = nonce
if allowed_mentions:
payload['allowed_mentions'] = allowed_mentions
if message_reference:
payload['message_reference'] = message_reference
form.append({'name': 'payload_json', 'value': utils.to_json(payload)})
if len(files) == 1:
file = files[0]
form.append({
'name': 'file',
'value': file.fp,
'filename': file.filename,
'content_type': 'application/octet-stream'
})
else:
for index, file in enumerate(files):
form.append({
'name': 'file%s' % index,
'value': file.fp,
'filename': file.filename,
'content_type': 'application/octet-stream'
})
return self.request(r, form=form, files=files)
async def ack_message(self, channel_id, message_id):
r = Route('POST', '/channels/{channel_id}/messages/{message_id}/ack', channel_id=channel_id, message_id=message_id)
data = await self.request(r, json={'token': self._ack_token})
self._ack_token = data['token']
def ack_guild(self, guild_id):
return self.request(Route('POST', '/guilds/{guild_id}/ack', guild_id=guild_id))
def delete_message(self, channel_id, message_id, *, reason=None):
r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id)
return self.request(r, reason=reason)
def delete_messages(self, channel_id, message_ids, *, reason=None):
r = Route('POST', '/channels/{channel_id}/messages/bulk_delete', channel_id=channel_id)
payload = {
'messages': message_ids
}
return self.request(r, json=payload, reason=reason)
def edit_message(self, channel_id, message_id, **fields):
r = Route('PATCH', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id)
return self.request(r, json=fields)
def add_reaction(self, channel_id, message_id, emoji):
r = Route('PUT', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me',
channel_id=channel_id, message_id=message_id, emoji=emoji)
return self.request(r)
def remove_reaction(self, channel_id, message_id, emoji, member_id):
r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/{member_id}',
channel_id=channel_id, message_id=message_id, member_id=member_id, emoji=emoji)
return self.request(r)
def remove_own_reaction(self, channel_id, message_id, emoji):
r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me',
channel_id=channel_id, message_id=message_id, emoji=emoji)
return self.request(r)
def get_reaction_users(self, channel_id, message_id, emoji, limit, after=None):
r = Route('GET', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}',
channel_id=channel_id, message_id=message_id, emoji=emoji)
params = {'limit': limit}
if after:
params['after'] = after
return self.request(r, params=params)
def clear_reactions(self, channel_id, message_id):
r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions',
channel_id=channel_id, message_id=message_id)
return self.request(r)
def clear_single_reaction(self, channel_id, message_id, emoji):
r = Route('DELETE', '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}',
channel_id=channel_id, message_id=message_id, emoji=emoji)
return self.request(r)
def get_message(self, channel_id, message_id):
r = Route('GET', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id)
return self.request(r)
def get_channel(self, channel_id):
r = Route('GET', '/channels/{channel_id}', channel_id=channel_id)
return self.request(r)
def logs_from(self, channel_id, limit, before=None, after=None, around=None):
params = {
'limit': limit
}
if before is not None:
params['before'] = before
if after is not None:
params['after'] = after
if around is not None:
params['around'] = around
return self.request(Route('GET', '/channels/{channel_id}/messages', channel_id=channel_id), params=params)
def publish_message(self, channel_id, message_id):
return self.request(Route('POST', '/channels/{channel_id}/messages/{message_id}/crosspost',
channel_id=channel_id, message_id=message_id))
def pin_message(self, channel_id, message_id, reason=None):
return self.request(Route('PUT', '/channels/{channel_id}/pins/{message_id}',
channel_id=channel_id, message_id=message_id), reason=reason)
def unpin_message(self, channel_id, message_id, reason=None):
return self.request(Route('DELETE', '/channels/{channel_id}/pins/{message_id}',
channel_id=channel_id, message_id=message_id), reason=reason)
def pins_from(self, channel_id):
return self.request(Route('GET', '/channels/{channel_id}/pins', channel_id=channel_id))
# Member management
def kick(self, user_id, guild_id, reason=None):
r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)
if reason:
# thanks aiohttp
r.url = '{0.url}?reason={1}'.format(r, _uriquote(reason))
return self.request(r)
def ban(self, user_id, guild_id, delete_message_days=1, reason=None):
r = Route('PUT', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id)
params = {
'delete_message_days': delete_message_days,
}
if reason:
# thanks aiohttp
r.url = '{0.url}?reason={1}'.format(r, _uriquote(reason))
return self.request(r, params=params)
def unban(self, user_id, guild_id, *, reason=None):
r = Route('DELETE', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id)
return self.request(r, reason=reason)
def guild_voice_state(self, user_id, guild_id, *, mute=None, deafen=None, reason=None):
r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)
payload = {}
if mute is not None:
payload['mute'] = mute
if deafen is not None:
payload['deaf'] = deafen
return self.request(r, json=payload, reason=reason)
def edit_profile(self, password, username, avatar, **fields):
payload = {
'password': password,
'username': username,
'avatar': avatar
}
if 'email' in fields:
payload['email'] = fields['email']
if 'new_password' in fields:
payload['new_password'] = fields['new_password']
return self.request(Route('PATCH', '/users/@me'), json=payload)
def change_my_nickname(self, guild_id, nickname, *, reason=None):
r = Route('PATCH', '/guilds/{guild_id}/members/@me/nick', guild_id=guild_id)
payload = {
'nick': nickname
}
return self.request(r, json=payload, reason=reason)
def change_nickname(self, guild_id, user_id, nickname, *, reason=None):
r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)
payload = {
'nick': nickname
}
return self.request(r, json=payload, reason=reason)
def edit_my_voice_state(self, guild_id, payload):
r = Route('PATCH', '/guilds/{guild_id}/voice-states/@me', guild_id=guild_id)
return self.request(r, json=payload)
def edit_voice_state(self, guild_id, user_id, payload):
r = Route('PATCH', '/guilds/{guild_id}/voice-states/{user_id}', guild_id=guild_id, user_id=user_id)
return self.request(r, json=payload)
def edit_member(self, guild_id, user_id, *, reason=None, **fields):
r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)
return self.request(r, json=fields, reason=reason)
# Channel management
def edit_channel(self, channel_id, *, reason=None, **options):
r = Route('PATCH', '/channels/{channel_id}', channel_id=channel_id)
valid_keys = ('name', 'parent_id', 'topic', 'bitrate', 'nsfw',
'user_limit', 'position', 'permission_overwrites', 'rate_limit_per_user',
'type', 'rtc_region')
payload = {
k: v for k, v in options.items() if k in valid_keys
}
return self.request(r, reason=reason, json=payload)
def bulk_channel_update(self, guild_id, data, *, reason=None):
r = Route('PATCH', '/guilds/{guild_id}/channels', guild_id=guild_id)
return self.request(r, json=data, reason=reason)
def create_channel(self, guild_id, channel_type, *, reason=None, **options):
payload = {
'type': channel_type
}
valid_keys = ('name', 'parent_id', 'topic', 'bitrate', 'nsfw',
'user_limit', 'position', 'permission_overwrites', 'rate_limit_per_user',
'rtc_region')
payload.update({
k: v for k, v in options.items() if k in valid_keys and v is not None
})
return self.request(Route('POST', '/guilds/{guild_id}/channels', guild_id=guild_id), json=payload, reason=reason)
def delete_channel(self, channel_id, *, reason=None):
return self.request(Route('DELETE', '/channels/{channel_id}', channel_id=channel_id), reason=reason)
# Webhook management
def create_webhook(self, channel_id, *, name, avatar=None, reason=None):
payload = {
'name': name
}
if avatar is not None:
payload['avatar'] = avatar
r = Route('POST', '/channels/{channel_id}/webhooks', channel_id=channel_id)
return self.request(r, json=payload, reason=reason)
def channel_webhooks(self, channel_id):
return self.request(Route('GET', '/channels/{channel_id}/webhooks', channel_id=channel_id))
def guild_webhooks(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/webhooks', guild_id=guild_id))
def get_webhook(self, webhook_id):
return self.request(Route('GET', '/webhooks/{webhook_id}', webhook_id=webhook_id))
def follow_webhook(self, channel_id, webhook_channel_id, reason=None):
payload = {
'webhook_channel_id': str(webhook_channel_id)
}
return self.request(Route('POST', '/channels/{channel_id}/followers', channel_id=channel_id), json=payload, reason=reason)
# Guild management
def get_guilds(self, limit, before=None, after=None):
params = {
'limit': limit
}
if before:
params['before'] = before
if after:
params['after'] = after
return self.request(Route('GET', '/users/@me/guilds'), params=params)
def leave_guild(self, guild_id):
return self.request(Route('DELETE', '/users/@me/guilds/{guild_id}', guild_id=guild_id))
def get_guild(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}', guild_id=guild_id))
def delete_guild(self, guild_id):
return self.request(Route('DELETE', '/guilds/{guild_id}', guild_id=guild_id))
def create_guild(self, name, region, icon):
payload = {
'name': name,
'icon': icon,
'region': region
}
return self.request(Route('POST', '/guilds'), json=payload)
def edit_guild(self, guild_id, *, reason=None, **fields):
valid_keys = ('name', 'region', 'icon', 'afk_timeout', 'owner_id',
'afk_channel_id', 'splash', 'verification_level',
'system_channel_id', 'default_message_notifications',
'description', 'explicit_content_filter', 'banner',
'system_channel_flags', 'rules_channel_id',
'public_updates_channel_id', 'preferred_locale',)
payload = {
k: v for k, v in fields.items() if k in valid_keys
}
return self.request(Route('PATCH', '/guilds/{guild_id}', guild_id=guild_id), json=payload, reason=reason)
def get_template(self, code):
return self.request(Route('GET', '/guilds/templates/{code}', code=code))
def guild_templates(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/templates', guild_id=guild_id))
def create_template(self, guild_id, payload):
return self.request(Route('POST', '/guilds/{guild_id}/templates', guild_id=guild_id), json=payload)
def sync_template(self, guild_id, code):
return self.request(Route('PUT', '/guilds/{guild_id}/templates/{code}', guild_id=guild_id, code=code))
def edit_template(self, guild_id, code, payload):
valid_keys = (
'name',
'description',
)
payload = {
k: v for k, v in payload.items() if k in valid_keys
}
return self.request(Route('PATCH', '/guilds/{guild_id}/templates/{code}', guild_id=guild_id, code=code), json=payload)
def delete_template(self, guild_id, code):
return self.request(Route('DELETE', '/guilds/{guild_id}/templates/{code}', guild_id=guild_id, code=code))
def create_from_template(self, code, name, region, icon):
payload = {
'name': name,
'icon': icon,
'region': region
}
return self.request(Route('POST', '/guilds/templates/{code}', code=code), json=payload)
def get_bans(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/bans', guild_id=guild_id))
def get_ban(self, user_id, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id))
def get_vanity_code(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/vanity-url', guild_id=guild_id))
def change_vanity_code(self, guild_id, code, *, reason=None):
payload = {'code': code}
return self.request(Route('PATCH', '/guilds/{guild_id}/vanity-url', guild_id=guild_id), json=payload, reason=reason)
def get_all_guild_channels(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/channels', guild_id=guild_id))
def get_members(self, guild_id, limit, after):
params = {
'limit': limit,
}
if after:
params['after'] = after
r = Route('GET', '/guilds/{guild_id}/members', guild_id=guild_id)
return self.request(r, params=params)
def get_member(self, guild_id, member_id):
return self.request(Route('GET', '/guilds/{guild_id}/members/{member_id}', guild_id=guild_id, member_id=member_id))
def prune_members(self, guild_id, days, compute_prune_count, roles, *, reason=None):
payload = {
'days': days,
'compute_prune_count': 'true' if compute_prune_count else 'false'
}
if roles:
payload['include_roles'] = ', '.join(roles)
return self.request(Route('POST', '/guilds/{guild_id}/prune', guild_id=guild_id), json=payload, reason=reason)
def estimate_pruned_members(self, guild_id, days, roles):
params = {
'days': days
}
if roles:
params['include_roles'] = ', '.join(roles)
return self.request(Route('GET', '/guilds/{guild_id}/prune', guild_id=guild_id), params=params)
def get_all_custom_emojis(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/emojis', guild_id=guild_id))
def get_custom_emoji(self, guild_id, emoji_id):
return self.request(Route('GET', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id))
def create_custom_emoji(self, guild_id, name, image, *, roles=None, reason=None):
payload = {
'name': name,
'image': image,
'roles': roles or []
}
r = Route('POST', '/guilds/{guild_id}/emojis', guild_id=guild_id)
return self.request(r, json=payload, reason=reason)
def delete_custom_emoji(self, guild_id, emoji_id, *, reason=None):
r = Route('DELETE', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id)
return self.request(r, reason=reason)
def edit_custom_emoji(self, guild_id, emoji_id, *, name, roles=None, reason=None):
payload = {
'name': name,
'roles': roles or []
}
r = Route('PATCH', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id)
return self.request(r, json=payload, reason=reason)
def get_all_integrations(self, guild_id):
r = Route('GET', '/guilds/{guild_id}/integrations', guild_id=guild_id)
return self.request(r)
def create_integration(self, guild_id, type, id):
payload = {
'type': type,
'id': id
}
r = Route('POST', '/guilds/{guild_id}/integrations', guild_id=guild_id)
return self.request(r, json=payload)
def edit_integration(self, guild_id, integration_id, **payload):
r = Route('PATCH', '/guilds/{guild_id}/integrations/{integration_id}', guild_id=guild_id,
integration_id=integration_id)
return self.request(r, json=payload)
def sync_integration(self, guild_id, integration_id):
r = Route('POST', '/guilds/{guild_id}/integrations/{integration_id}/sync', guild_id=guild_id,
integration_id=integration_id)
return self.request(r)
def delete_integration(self, guild_id, integration_id):
r = Route('DELETE', '/guilds/{guild_id}/integrations/{integration_id}', guild_id=guild_id,
integration_id=integration_id)
return self.request(r)
def get_audit_logs(self, guild_id, limit=100, before=None, after=None, user_id=None, action_type=None):
params = {'limit': limit}
if before:
params['before'] = before
if after:
params['after'] = after
if user_id:
params['user_id'] = user_id
if action_type:
params['action_type'] = action_type
r = Route('GET', '/guilds/{guild_id}/audit-logs', guild_id=guild_id)
return self.request(r, params=params)
def get_widget(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/widget.json', guild_id=guild_id))
# Invite management
def create_invite(self, channel_id, *, reason=None, **options):
r = Route('POST', '/channels/{channel_id}/invites', channel_id=channel_id)
payload = {
'max_age': options.get('max_age', 0),
'max_uses': options.get('max_uses', 0),
'temporary': options.get('temporary', False),
'unique': options.get('unique', True)
}
return self.request(r, reason=reason, json=payload)
def get_invite(self, invite_id, *, with_counts=True):
params = {
'with_counts': int(with_counts)
}
return self.request(Route('GET', '/invites/{invite_id}', invite_id=invite_id), params=params)
def invites_from(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/invites', guild_id=guild_id))
def invites_from_channel(self, channel_id):
return self.request(Route('GET', '/channels/{channel_id}/invites', channel_id=channel_id))
def delete_invite(self, invite_id, *, reason=None):
return self.request(Route('DELETE', '/invites/{invite_id}', invite_id=invite_id), reason=reason)
# Role management
def get_roles(self, guild_id):
return self.request(Route('GET', '/guilds/{guild_id}/roles', guild_id=guild_id))
def edit_role(self, guild_id, role_id, *, reason=None, **fields):
r = Route('PATCH', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id)
valid_keys = ('name', 'permissions', 'color', 'hoist', 'mentionable')
payload = {
k: v for k, v in fields.items() if k in valid_keys
}
return self.request(r, json=payload, reason=reason)
def delete_role(self, guild_id, role_id, *, reason=None):
r = Route('DELETE', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id)
return self.request(r, reason=reason)
def replace_roles(self, user_id, guild_id, role_ids, *, reason=None):
return self.edit_member(guild_id=guild_id, user_id=user_id, roles=role_ids, reason=reason)
def create_role(self, guild_id, *, reason=None, **fields):
r = Route('POST', '/guilds/{guild_id}/roles', guild_id=guild_id)
return self.request(r, json=fields, reason=reason)
def move_role_position(self, guild_id, positions, *, reason=None):
r = Route('PATCH', '/guilds/{guild_id}/roles', guild_id=guild_id)
return self.request(r, json=positions, reason=reason)
def add_role(self, guild_id, user_id, role_id, *, reason=None):
r = Route('PUT', '/guilds/{guild_id}/members/{user_id}/roles/{role_id}',
guild_id=guild_id, user_id=user_id, role_id=role_id)
return self.request(r, reason=reason)
def remove_role(self, guild_id, user_id, role_id, *, reason=None):
r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}/roles/{role_id}',
guild_id=guild_id, user_id=user_id, role_id=role_id)
return self.request(r, reason=reason)
def edit_channel_permissions(self, channel_id, target, allow, deny, type, *, reason=None):
payload = {
'id': target,
'allow': allow,
'deny': deny,
'type': type
}
r = Route('PUT', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target)
return self.request(r, json=payload, reason=reason)
def delete_channel_permissions(self, channel_id, target, *, reason=None):
r = Route('DELETE', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target)
return self.request(r, reason=reason)
# Voice management
def move_member(self, user_id, guild_id, channel_id, *, reason=None):
return self.edit_member(guild_id=guild_id, user_id=user_id, channel_id=channel_id, reason=reason)
# Relationship related
def remove_relationship(self, user_id):
r = Route('DELETE', '/users/@me/relationships/{user_id}', user_id=user_id)
return self.request(r)
def add_relationship(self, user_id, type=None):
r = Route('PUT', '/users/@me/relationships/{user_id}', user_id=user_id)
payload = {}
if type is not None:
payload['type'] = type
return self.request(r, json=payload)
def send_friend_request(self, username, discriminator):
r = Route('POST', '/users/@me/relationships')
payload = {
'username': username,
'discriminator': int(discriminator)
}
return self.request(r, json=payload)
# Misc
def application_info(self):
return self.request(Route('GET', '/oauth2/applications/@me'))
async def get_gateway(self, *, encoding='json', v=6, zlib=True):
try:
data = await self.request(Route('GET', '/gateway'))
except HTTPException as exc:
raise GatewayNotFound() from exc
if zlib:
value = '{0}?encoding={1}&v={2}&compress=zlib-stream'
else:
value = '{0}?encoding={1}&v={2}'
return value.format(data['url'], encoding, v)
async def get_bot_gateway(self, *, encoding='json', v=6, zlib=True):
try:
data = await self.request(Route('GET', '/gateway/bot'))
except HTTPException as exc:
raise GatewayNotFound() from exc
if zlib:
value = '{0}?encoding={1}&v={2}&compress=zlib-stream'
else:
value = '{0}?encoding={1}&v={2}'
return data['shards'], value.format(data['url'], encoding, v)
def get_user(self, user_id):
return self.request(Route('GET', '/users/{user_id}', user_id=user_id))
def get_user_profile(self, user_id):
return self.request(Route('GET', '/users/{user_id}/profile', user_id=user_id))
def get_mutual_friends(self, user_id):
return self.request(Route('GET', '/users/{user_id}/relationships', user_id=user_id))
def change_hypesquad_house(self, house_id):
payload = {'house_id': house_id}
return self.request(Route('POST', '/hypesquad/online'), json=payload)
def leave_hypesquad_house(self):
return self.request(Route('DELETE', '/hypesquad/online'))
def edit_settings(self, **payload):
return self.request(Route('PATCH', '/users/@me/settings'), json=payload)
| 39.473161
| 143
| 0.604357
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.