hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5e919f500bdb61fdacf742cd30485e1fec67466a | 2,439 | py | Python | src/python/grpcio/grpc/experimental/aio/__init__.py | nondejus/grpc | e5e8d7dc70f97b1e52612facb3f57990b2240005 | [
"Apache-2.0"
] | 3 | 2020-10-12T15:47:01.000Z | 2022-01-14T19:51:26.000Z | src/python/grpcio/grpc/experimental/aio/__init__.py | nondejus/grpc | e5e8d7dc70f97b1e52612facb3f57990b2240005 | [
"Apache-2.0"
] | 11 | 2021-04-08T22:10:50.000Z | 2022-03-12T00:52:35.000Z | src/python/grpcio/grpc/experimental/aio/__init__.py | nondejus/grpc | e5e8d7dc70f97b1e52612facb3f57990b2240005 | [
"Apache-2.0"
] | 2 | 2019-11-13T05:27:48.000Z | 2020-01-21T06:35:19.000Z | # Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC's Asynchronous Python API."""
import abc
import types
import six
import grpc
from grpc._cython import cygrpc
from grpc._cython.cygrpc import init_grpc_aio
from ._server import server
from ._channel import Channel
from ._channel import UnaryUnaryMultiCallable
def insecure_channel(target, options=None, compression=None):
"""Creates an insecure asynchronous Channel to a server.
Args:
target: The server address
options: An optional list of key-value pairs (channel args
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel. This is an EXPERIMENTAL option.
Returns:
A Channel.
"""
from grpc.experimental.aio import _channel # pylint: disable=cyclic-import
return _channel.Channel(target, ()
if options is None else options, None, compression)
class _AioRpcError:
"""Private implementation of AioRpcError"""
class AioRpcError:
"""An RpcError to be used by the asynchronous API.
Parent classes: (cygrpc._AioRpcError, RpcError)
"""
# Dynamically registered as subclass of _AioRpcError and RpcError, because the former one is
# only available after the cython code has been compiled.
_class_built = _AioRpcError
def __new__(cls, *args, **kwargs):
if cls._class_built is _AioRpcError:
cls._class_built = types.new_class(
"AioRpcError", (cygrpc._AioRpcError, grpc.RpcError))
cls._class_built.__doc__ = cls.__doc__
return cls._class_built(*args, **kwargs)
################################### __all__ #################################
__all__ = (
'init_grpc_aio',
'Channel',
'UnaryUnaryMultiCallable',
'insecure_channel',
'AioRpcError',
)
| 31.269231 | 96 | 0.693727 | 695 | 0.284953 | 0 | 0 | 0 | 0 | 0 | 0 | 1,523 | 0.624436 |
5e924c02bab981c33ede2b50dac4e2d8e48fe849 | 6,634 | py | Python | trainer.py | Ayush8120/PocketTanks | 9498b441e4fbac007879d8963da6edf3913d02e3 | [
"MIT"
] | null | null | null | trainer.py | Ayush8120/PocketTanks | 9498b441e4fbac007879d8963da6edf3913d02e3 | [
"MIT"
] | 1 | 2022-02-11T21:31:03.000Z | 2022-02-12T00:56:54.000Z | trainer.py | Ayush8120/PocketTanks | 9498b441e4fbac007879d8963da6edf3913d02e3 | [
"MIT"
] | 2 | 2019-04-25T22:24:52.000Z | 2022-02-11T21:22:27.000Z | import os
import threading
import time
from collections import deque
import numpy as np
from threading import Thread
from agents.dqn_agent import DqnAgent
from main import App
# Number of games to play
from utils.logger import DataLogger
n_episodes = 10000
save_period = 50 # Saves off every n episodes' model
batch_size = 32 # multiples of 2
state_size = 10
action_size = 5 # 7 if we want to move, not doing that for now
output_dir = 'models/'
class Handler:
def __init__(self):
self.lock = threading.Lock()
self.callback_triggered = False
self.next_state = None
self.reward = None
self.game_over = None
def callback(self, next_state, reward, game_over):
with self.lock:
# print("SET TRUE")
self.callback_triggered = True
self.next_state = next_state
self.reward = reward
self.game_over = game_over
def wait_for_callback(self,):
while True:
with self.lock:
if self.callback_triggered:
# print("Next State received!")
self.callback_triggered = False
break
time.sleep(0.0001)
return self.next_state, self.reward, self.game_over
# Setup our output dir
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Create a game environment
handler = Handler()
game = App(training_mode=True, ml_step_callback=handler.callback)
thread = Thread(target=game.on_execute)
thread.start()
# Create the agent
agent = DqnAgent(state_size, action_size, force_continue=True) # Set true to continue with low epsilon and loaded model
# Create a data logger
logger = DataLogger(
n_episodes,
save_period,
batch_size,
state_size,
action_size
)
# Let the game start up
time.sleep(5)
# Track some times
last_play_time = 0
last_train_time = 0
# Sliding window so we can check the winning rate, and see if its increasing
winners_window = []
window_size = int(n_episodes*0.1)
p1_win_ratio = 0
p2_win_ratio = 0
# Track winner count
winners = {}
# Play n_episodes count games
for e in range(n_episodes): # iterate over new episodes of the game
try:
# Reset the state of the game with a restart, wait for it to take
print("Resetting game state...")
game.queue_ml_action(-1) # -1 restarts, -2 quits
_ = handler.wait_for_callback()
state = np.reshape(game.get_game_state(), [1, state_size])
game_over = False
print("Reset. Starting game " + str(e))
time_start = time.time()
msg = "Game " + str(e + 1) + " of " + str(n_episodes) + ", LPT: " + \
str(last_play_time) + ", LTT: " + str(last_train_time) + ", epsilon: " + str(agent.get_epsilon())
game.show_message(msg)
print(msg)
for winner in winners:
print(winner + " has " + str(winners[winner]) + " wins so far.")
while not game_over:
# print("**********************************************")
# print("****************** NEW ROUND *****************")
# print("**********************************************")
# Make our agent act
action = agent.act(state)
# print("queue action: " + str(action))
game.queue_ml_action(action) # Sends the 'step' commanad
# Get the next state, etc from the action
# print("wait for next state")
next_state, reward, game_over = handler.wait_for_callback()
# print("handle next state")
# Remember the action
next_state = np.reshape(next_state, [1, state_size])
agent.remember(state, action, reward, next_state, game_over)
# Save off this round
#logger.add_step({
# "state": state,
# "action": action,
# "reward": reward,
# "next_state": next_state,
# "game_over": game_over
#})
# Save the state as next state
state = next_state
if game_over:
print("GAME OVER: " + game.get_winner().get_name() + " wins!")
if game.get_winner().get_name() not in winners:
winners[game.get_winner().get_name()] = 1
else:
winners[game.get_winner().get_name()] += 1
winners_window.append(game.get_winner().get_name())
print("episode: {}/{}, e: {:.2}" # print the episode's score and agent's epsilon
.format(e, n_episodes, agent.get_epsilon()))
game_end = time.time()
# Train the agent off the game we just played
if len(agent.get_memory()) > batch_size:
agent.replay(batch_size)
train_end = time.time()
last_play_time = (int((game_end-time_start) / 60 * 10000)) / 10000
last_train_time = (int((train_end-game_end) / 60 * 10000)) / 10000
print("Playing took: " + str(last_play_time) + " minutes.")
print("Training took: " + str(last_train_time) + " minutes.")
if len(winners_window) == window_size:
win_count_1 = winners_window.count(game.get_player_1().get_name())
win_count_2 = winners_window.count(game.get_player_2().get_name())
p1_win_ratio = win_count_1/window_size
p2_win_ratio = win_count_2/window_size
winners_window = []
print("Player 1 win ratio: " + str(p1_win_ratio))
print("Player 2 win ratio: " + str(p2_win_ratio))
logger.add_game({
"winner": "Player 1" if game.get_winner() == game.get_player_1() else "Player 2",
"play_time": last_play_time,
"train_time": last_train_time,
"epsilon": agent.get_epsilon(),
"player_1_health": game.get_player_1().get_health(),
"player_2_health": game.get_player_2().get_health(),
"p1_win_ratio": p1_win_ratio,
"p2_win_ratio": p2_win_ratio
})
# Save off every 50 episodes
if e % save_period == 0:
agent.save(output_dir + "weights_" + '{:04d}'.format(e + agent.restart_file_number_offset) + ".hdf5")
logger.write_object_to_file()
logger.add_any('winners', winners)
except KeyboardInterrupt:
break
# End game
print("Ending game...")
game.queue_ml_action(-2)
print("Ended.")
print("Writing out log file...")
logger.write_object_to_file()
print("Log written")
print("Showing win graphs...")
logger.show_graphs()
print("Graphs closed.")
| 30.431193 | 120 | 0.590142 | 823 | 0.124058 | 0 | 0 | 0 | 0 | 0 | 0 | 1,837 | 0.276907 |
5e924cb1859e402ab872813f9e3537dccb1f09db | 2,840 | py | Python | tests/splinter/repeating_area/test_areas.py | jsfehler/stere | 066c38b11636850ce11a0451a7b1efc8887b1015 | [
"MIT"
] | 17 | 2017-12-11T15:49:10.000Z | 2021-06-16T19:29:45.000Z | tests/splinter/repeating_area/test_areas.py | jsfehler/stere | 066c38b11636850ce11a0451a7b1efc8887b1015 | [
"MIT"
] | 328 | 2018-02-06T02:29:21.000Z | 2022-03-30T10:47:33.000Z | tests/splinter/repeating_area/test_areas.py | jsfehler/stere | 066c38b11636850ce11a0451a7b1efc8887b1015 | [
"MIT"
] | 3 | 2019-04-05T19:40:55.000Z | 2021-11-04T06:50:32.000Z | import logging
import pytest
from selenium.webdriver.remote.remote_connection import LOGGER
from stere.areas import Area, Areas
LOGGER.setLevel(logging.WARNING)
def test_areas_append_wrong_type():
"""Ensure a TypeError is raised when non-Area objects are appended
to an Areas.
"""
a = Areas()
with pytest.raises(TypeError) as e:
a.append('1')
assert str(e.value) == (
'1 is not an Area. Only Area objects can be inside Areas.'
)
def test_areas_append():
"""Ensure Area objects can be appended to an Areas."""
a = Areas()
area = Area()
a.append(area)
assert 1 == len(a)
def test_areas_remove():
"""Ensure Areas.remove() behaves like list.remove()."""
a = Areas()
area = Area()
a.append(area)
a.remove(area)
assert 0 == len(a)
def test_areas_len():
"""Ensure Areas reports length correctly."""
a = Areas(['1', '2', '3'])
assert 3 == len(a)
def test_areas_containing_type(test_page):
"""Ensure Areas.containing() returns an Areas object."""
test_page.navigate()
found_areas = test_page.repeating_area.areas.containing(
'link', 'Repeating Link 2',
)
assert isinstance(found_areas, Areas)
def test_areas_containing(test_page):
"""Ensure Areas.containing() returns valid results."""
test_page.navigate()
found_areas = test_page.repeating_area.areas.containing(
'link', 'Repeating Link 2',
)
assert found_areas[0].text.value == 'Repeating Area 2'
def test_areas_containing_nested_attr(test_page):
"""Ensure Areas.containing() handles dot attrs."""
test_page.navigate()
found_areas = test_page.repeating_area.areas.containing(
'nested.ax', 'AX1',
)
assert found_areas[0].nested.ax.value == 'AX1'
def test_areas_containing_invalid_field_name(test_page):
test_page.navigate()
with pytest.raises(AttributeError) as e:
test_page.repeating_area.areas.containing(
'lunk', 'Repeating Link 2')
assert str(e.value) == "'Area' object has no attribute 'lunk'"
def test_areas_containing_nested_attr_invalid_field_name(test_page):
test_page.navigate()
with pytest.raises(AttributeError) as e:
test_page.repeating_area.areas.containing(
'nested.cx', 'CX1')
assert str(e.value) == "'Area' object has no attribute 'cx'"
def test_areas_contain(test_page):
"""Ensure Areas.contain() returns True when a result is found."""
test_page.navigate()
assert test_page.repeating_area.areas.contain("link", "Repeating Link 1")
def test_areas_contain_not_found(test_page):
"""Ensure Areas.contain() returns False when a result is not found."""
test_page.navigate()
assert not test_page.repeating_area.areas.contain(
"link", "Repeating Link 666",
)
| 23.666667 | 77 | 0.674648 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 862 | 0.303521 |
5e94ee09424c092167e8b387cf605930cd27a367 | 1,388 | py | Python | tests/sqlstore_tests.py | tistaharahap/oauth1-provider | c7059bce68734744d0aa3b83ecb218865a5c1341 | [
"MIT"
] | 1 | 2017-06-26T07:36:03.000Z | 2017-06-26T07:36:03.000Z | tests/sqlstore_tests.py | tistaharahap/oauth1-provider | c7059bce68734744d0aa3b83ecb218865a5c1341 | [
"MIT"
] | null | null | null | tests/sqlstore_tests.py | tistaharahap/oauth1-provider | c7059bce68734744d0aa3b83ecb218865a5c1341 | [
"MIT"
] | null | null | null | import with_sql as sqlprovider
import unittest
class SQLStoreTestCase(unittest.TestCase):
def setUp(self):
sqlprovider.app.config['TESTING'] = True
self.app = sqlprovider.app.test_client()
def error_mime_json(self):
return "Return payload data must be a JSON String"
def error_none(self):
return "Return must not be None"
def error_string(self):
return "Return must be a JSON String"
def error_200(self):
return "Not returning HTTP 200"
def error_404(self):
return "Not returning HTTP 404"
def get_unixtime(self):
import time
return int(time.time())
class XAuthTestCase(SQLStoreTestCase):
def test_app_is_not_none(self):
self.assertIsNotNone(self.app, msg=self.error_none())
def test_failed_without_oauth(self):
post = self.app.post('/oauth/access_token', data=dict(
username='username',
password='password'
), follow_redirects=True)
self.assertEqual(post.status_code, 400, msg='400 not given for naked auth without consumer token key')
class ProtectedResourceTestCase(SQLStoreTestCase):
def test_user_profile_without_auth(self):
get = self.app.get('/user/tista', follow_redirects=True)
self.assertEqual(get.status_code, 403, msg='403 not given for naked auth without consumer token key') | 28.916667 | 110 | 0.683718 | 1,333 | 0.960375 | 0 | 0 | 0 | 0 | 0 | 0 | 323 | 0.232709 |
5e9585f59d070272ad95cdcbd7dd8d78e2818f13 | 765 | py | Python | saph/users/views/auth.py | smallproblem/saph | eec9755ad26d5515f70604ebaa708583185649bb | [
"MIT"
] | null | null | null | saph/users/views/auth.py | smallproblem/saph | eec9755ad26d5515f70604ebaa708583185649bb | [
"MIT"
] | 2 | 2020-09-28T03:09:06.000Z | 2020-10-02T08:55:34.000Z | saph/users/views/auth.py | smallproblem/saph | eec9755ad26d5515f70604ebaa708583185649bb | [
"MIT"
] | null | null | null | from django.contrib.auth.models import User
from django.contrib.auth.views import LoginView
from django.contrib.auth.forms import AuthenticationForm
from django.views.generic import CreateView
from django.shortcuts import reverse, redirect
from users.forms import JoinusForm
class LoginView(LoginView):
template_name = 'users/login.html'
authentication_form = AuthenticationForm
class SignupView(CreateView):
form_class = JoinusForm
template_name = 'users/joinus.html'
def form_valid(self, form):
username = form.cleaned_data['username']
password = form.cleaned_data['password']
User.objects.create_user(
username=username,
password=password
).save()
return redirect('login') | 29.423077 | 56 | 0.729412 | 484 | 0.63268 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.08366 |
5e962450aea8934c38b6cac2187895b4964bca2f | 688 | py | Python | workers/portscanner.py | wotschel/Forker | 06f3b4cdcc55df8cea0ce57f6f3fa0cd507e192f | [
"MIT"
] | null | null | null | workers/portscanner.py | wotschel/Forker | 06f3b4cdcc55df8cea0ce57f6f3fa0cd507e192f | [
"MIT"
] | null | null | null | workers/portscanner.py | wotschel/Forker | 06f3b4cdcc55df8cea0ce57f6f3fa0cd507e192f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import socket
#debugon = False
#forks = 3
worklist = ["localhost", "127.0.0.1"]
def worker(var):
sock = None
ports = [21, 22, 25, 80, 110, 443, 445, 3306]
# for port in range(1, 65536):
for port in ports:
try:
sock = socket.create_connection((var, port), 5)
print("{} - {} - OPEN".format(var, port))
except ConnectionRefusedError:
print("{} - {} - ERRConnRefused".format(var, port))
except socket.timeout:
print("{} - {} - ERRConnTimeout".format(var, port))
if sock:
sock.close()
return(0)
if __name__ == "__main__":
worker("127.0.0.1")
| 19.657143 | 63 | 0.540698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 189 | 0.274709 |
5e964e9a43d5e710febe1660966f2e83ba9afc1c | 2,940 | py | Python | bin/sm_mp_incd.py | pjmartel/consequent | fa0a41e056a6b240d41819a8e5c310a9057fa7e9 | [
"MIT"
] | null | null | null | bin/sm_mp_incd.py | pjmartel/consequent | fa0a41e056a6b240d41819a8e5c310a9057fa7e9 | [
"MIT"
] | null | null | null | bin/sm_mp_incd.py | pjmartel/consequent | fa0a41e056a6b240d41819a8e5c310a9057fa7e9 | [
"MIT"
] | 1 | 2019-10-24T18:25:59.000Z | 2019-10-24T18:25:59.000Z | import numpy as np
from random import sample, seed
#import matplotlib.pyplot as plt
from sys import argv, stdout
#from scipy.stats import gumbel_r
from score_matrix import readScoreMatrix, getMatrix
from seqali import smithWaterman, smithFast, plotMat, plotTraceMat
from multiprocessing import Process, Manager
def scrambler_aligner(pn, ssd, N, sa, sb, ms, go, ge):
seed()
sscores = []
for i in range(N):
#print("Process {}, pass {} ".format(pn,i+1))
sa = "".join(sample(sa, len(sa)))
s, a, ma, ta = smithFast(
sa, sb, ms, gapO=go, gapE=ge)
sscores.append(s)
ssd[pn] = sscores
#seqB = "HEAGAWGHEE"
#seqA = "PAWHEAE"
# seqB = "GVTAH"
# seqA = "AVTLI"
seqB = "MVLSPADKTNVKAAWGKVGAHAGEYGAEALERMFLSFPTTKTYFPHFDLSHGSAQVKGHG"
seqA = "MVHLTPEEKSAVTALWGKVNVDEVGGEALGRLLVVYPWTQRFFESFGDLSTPDAVMGNPK"
#seqB = "MVLSPADKTNVKAAWGKVGAHAGEYG"
#seqA = "MVHLTPEEKSAVTALWGKVNVDEVGG"
gapOpen = -10
gapExtend = -1
#gapOpen = -8
#gapExtend = -8
matrix = "BLOSUM50"
if(len(argv) > 1):
N = int(argv[1])
else:
N = 100
# init score matrix
#matScore = np.zeros((26, 26), dtype=np.int8)
#readMat("blosum50.txt", matScore)
readScoreMatrix(matrix)
matScore = getMatrix()
# Calculate unscrambled aligment and score
s, a, ma, ta = smithWaterman(
seqA, seqB, matScore, gapO=gapOpen, gapE=gapExtend)
ua = a
uscore = s
print("Scoring matrix: ", matrix)
print("Unscrambled score:", uscore)
print("Unscrambled identity: {:.2%}".format(sum([ua[0][i] == ua[1][i] and
ua[0][i] != '-' for i in range(len(ua[0]))])/len(ua[0])))
print("Unscrambled alignment:")
print("SeqA - ", ua[0])
print("SeqB - ", ua[1])
print()
if N==0 :
exit(0)
print("Calculating distribution of scrambled alignment scores.")
proc_count = 4
procs = []
sscores_dict = Manager().dict()
for i in range(proc_count):
proc = Process(target=scrambler_aligner, args=(i, sscores_dict, N, seqA, seqB, matScore, gapOpen, gapExtend))
procs.append(proc)
proc.start()
for proc in procs:
proc.join()
#print(sscores_dict.values())
sscores = sum(sscores_dict.values(),[])
#print(sscores)
#exit(0)
N = len(sscores) # for 4 cores its 4 times the initial value
# Fit extreme value distribution to data
#miu, beta = gumbel_r.fit(sscores)
print("Length of sscores: ", len(sscores))
print("Calculed histogram for {} scramble scores".format(N))
print("Max scrambled score:", max(sscores))
print("Min scrambled score:", min(sscores))
print("Median of scrambled scores:", np.median(sscores))
print("Gumbel miu:", miu)
print("Gumbel beta:", beta)
print()
# print("Aligment matrix:")
# np.savetxt(sys.stdout, ma, fmt="%3d")
print("Saving data to","'smith_{}_{}_{}_{:3.1f}_{:3.1f}.npy'".format(
N, len(seqA), matrix, abs(gapOpen), abs(gapExtend)))
np.save("smith_{}_{}_{}_{:3.1f}_{:3.1f}".format(
N, len(seqA), matrix, abs(gapOpen), abs(gapExtend)),sscores)
| 26.486486 | 113 | 0.668027 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,210 | 0.411565 |
5e97096c2259e87531d4203f6d5a602384f504c8 | 883 | py | Python | Grayscale Image Denoising/noise.py | Yemen-Romanian/pattern-recognition | cb0ab31e590b071c57c2e89ec85f2383375860e3 | [
"MIT"
] | null | null | null | Grayscale Image Denoising/noise.py | Yemen-Romanian/pattern-recognition | cb0ab31e590b071c57c2e89ec85f2383375860e3 | [
"MIT"
] | null | null | null | Grayscale Image Denoising/noise.py | Yemen-Romanian/pattern-recognition | cb0ab31e590b071c57c2e89ec85f2383375860e3 | [
"MIT"
] | null | null | null | import numpy as np
def gaussian_noise(image, mean=0, var=1):
n_rows, n_cols = image.shape
noise = np.random.normal(mean, var**0.5, (n_rows, n_cols))
noise = noise.reshape((n_rows, n_cols))
result = (noise + image).astype(np.uint8)
# print(np.any(result[result > 255]))
# result = result.astype(np.uint8)
# result[result > 255] = 255
# result[result < 0] = 0
return result
def salt_and_pepper_noise(image, sp=0.5, p=0.04):
out = np.copy(image)
# Salt mode
num_salt = np.ceil(p * image.size * sp)
coords = [np.random.randint(0, i - 1, int(num_salt))
for i in image.shape]
out[coords] = 255
# Pepper mode
num_pepper = np.ceil(p * image.size * (1. - sp))
coords = [np.random.randint(0, i - 1, int(num_pepper))
for i in image.shape]
out[coords] = 0
return out.astype(np.uint8)
| 27.59375 | 62 | 0.602492 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 147 | 0.166478 |
5e9a150ff611f08ec820a625c4ec922654b7d1ab | 12,223 | py | Python | tweet.py | Mar199605/sentiment_visual | afee54ec4b2ed33f87927d1740cb8ad6cee3ad10 | [
"MIT"
] | null | null | null | tweet.py | Mar199605/sentiment_visual | afee54ec4b2ed33f87927d1740cb8ad6cee3ad10 | [
"MIT"
] | null | null | null | tweet.py | Mar199605/sentiment_visual | afee54ec4b2ed33f87927d1740cb8ad6cee3ad10 | [
"MIT"
] | null | null | null | import os
import time
import csv
import json
import re
import twint
from cleantext import clean
from textblob import TextBlob
from google.cloud import translate_v2
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = r"C:\\Users\\ht_ma\\env\\service-account-file.json" # Key needed
translate_client_0 = translate_v2.Client()
## Another translator
# from googletrans import Translator
# translate_client_1 = Translator()
def search(squares):
start_time = time.time()
print(f"\n--- {'{:.2f}'.format((start_time - start_time))} seconds ---")
# Read squares csvfile
squares = json.loads(squares)
jsonWriter(json.dumps(squares, indent=4), "data\\raw.json")
# # Get geo_location
geos = geo_locations(squares)
# Search tweet
limit = 20
search_tweets(geos, limit ,"data\\tweets_raw.csv", "600km", 1)
# # Process result
row_1 = process_tweets_row("data\\tweets_raw.csv", limit=10, translate=False, show=True)
print("Process result complete")
# jsonString_1 = row_to_json(row_1)
# jsonWriter(jsonString_1, "data\\tweets_1.json")
# Add more info
row_2 = add_info_row(row_1, squares)
print("Add more info complete")
# jsonString_2 = row_to_json(row_2)
jsonWriter(jsonString_2, "data\\tweets_2.json")
# Simplify
row_3 = simplify_row(row_2, average=True)
print("Simplify complete")
jsonString_3 = row_to_json(row_3)
jsonWriter(jsonString_3, "data\\tweets_end.json")
# Output Json
print("Output complete")
# Record Time
print(f"--- {'{:.2f}'.format((time.time() - start_time))} seconds ---")
# with open('data\\tweets_end_0.json', 'r') as myfile:
# jsonString_3 =myfile.read()
# time.sleep(2)
return jsonString_3
def csvWriter(rows, outputCsvPath):
with open(outputCsvPath, "w", newline="", encoding="utf-8") as write_obj:
csv_writer = csv.writer(write_obj)
for row in rows:
csv_writer.writerow(row)
return
def remove_content(text):
text = re.sub(r"http\S+", "", text) # remove urls
text = re.sub(r"\S+\.com\S+", "", text) # remove urls
text = re.sub(r"\@\w+", "", text) # remove mentions
text = text.replace("?", "") # remove question mark
return text
def text_clean(text):
text = clean(text,
fix_unicode=True, # fix various unicode errors
to_ascii=False, # transliterate to closest ASCII representation
lower=True, # lowercase text
# fully strip line breaks as opposed to only normalizing them
no_line_breaks=True,
no_urls=True, # replace all URLs with a special token
no_emails=True, # replace all email addresses with a special token
no_phone_numbers=True, # replace all phone numbers with a special token
no_numbers=True, # replace all numbers with a special token
no_digits=True, # replace all digits with a special token
no_currency_symbols=True, # replace all currency symbols with a special token
no_punct=True, # remove punctuations
replace_with_punct="", # instead of removing punctuations you may replace them
replace_with_url="",
replace_with_email="",
replace_with_phone_number="",
replace_with_number="",
replace_with_digit="",
replace_with_currency_symbol="",
lang="en" # set to 'de' for German special handling
)
return text
def row_to_json(rows):
jsonArray = []
count = 0
for row in rows:
if count > 0:
row = dict(zip(rows[0], rows[count]))
# add this python dict to json array
jsonArray.append(row)
count += 1
jsonString = json.dumps(jsonArray, indent=4)
return jsonString
def jsonWriter(jsonString, jsonFilePath):
# print(json.loads(jsonString))
# convert python jsonArray to JSON String and write to file
with open(jsonFilePath, 'w', encoding='utf-8') as jsonf:
jsonf.write(jsonString)
def read_squares(csvFilePath):
squares = []
with open(csvFilePath, "r") as csvfile:
reader = csv.reader(csvfile)
next(reader)
for row in reader:
squares.append(
{
"id": row[0],
"x": row[1],
"y": row[2],
"lon": row[3],
"lat": row[4],
"code": row[5],
}
)
return squares
def geo_locations(list):
geos = []
for square in list:
lon = square["lon"]
lat = square["lat"]
geos.append([str(lat) + "," + str(lon)])
return geos
def search_tweets(geos, limit, outputPath, radius, error_interval):
for geo in geos:
c = twint.Config()
c.Limit = limit
c.Output = outputPath
c.Custom["tweet"] = ["id", "geo", "username", "tweet"]
c.Store_csv = True
c.Geo = str(geo[0]) + "," + str(radius)
success = False
retries = 0
while not success:
if retries < 20:
try:
twint.run.Search(c)
success = True
except:
print("retrying", retries)
time.sleep(error_interval) # wait for token
retries += 1
else:
try:
twint.run.Search(c)
success = True
except:
print("retrying_wait", retries)
time.sleep(10) # wait for token
retries += 1
def sentiment_analyse(text, translate, show):
text_count = 0
text_origin = text
text = remove_content(text)
text = text_clean(text)
text_count += len(text)
text_translated = None
# google api translation
if translate and text != '' and text != None:
text_trans = translate_client_0.translate(
text, "en")['translatedText'] # translation
text = text_trans
text_translated = text
# if translate and text != '' and text != None:
# # translation and error detect
# success = False
# retries = 0
# while not success and retries <= 10:
# try:
# # translate_client_1= Translator(service_urls=['translate.google.com','translate.google.co.jp','translate.google.co.kr','translate.google.ca'])
# text_trans = translate_client_1.translate_1(text, "en") # translation
# if text_trans == text:
# raise Exception("same result")
# text = text_trans.text
# text_translated = text
# success = True
# except:
# if retries < 3:
# time.sleep(1)
# else:
# time.sleep(10)
# retries += 1
# print(f"Error text = {text}")
# print(f"Retry {retries} times")
blob = TextBlob(text)
sent_result = blob.sentiment
result = [sent_result, text_count, text_translated]
if show:
print(f"origin={text_origin}", f"\ntranslation = {result[2]}",
f"\n{result[0]}", f"characters = {result[1]}", end='\n\n')
return result
def process_tweets_row(inputCsvPath, limit, translate, show):
row_0 = []
with open(inputCsvPath, "r", encoding="unicode_escape") as read_obj:
csv_reader = csv.reader(read_obj)
limit = limit
count = 0
geo_0 = ''
geo_1 = 'geo'
text_count = 0
for row in csv_reader:
if len(row) == 4:
geo_0 = str(row[1])
if geo_1 == 'geo' and count == 0:
row.append("translation")
row.append("sentiment")
row_0.append(row)
elif count < limit and geo_1 != 'geo':
analyse_result = sentiment_analyse(
row[3], translate, show) # sentimental analyse text
text_count += analyse_result[1]
text_sent = analyse_result[0].polarity
text_trans = analyse_result[2] # add translation
row.append(text_trans) # add translation to row
row.append(text_sent) # add sentiment to row
row_0.append(row)
if geo_1 != geo_0:
count = 0
geo_1 = geo_0
else:
count += 1
print(f"Charactors in total = {text_count}")
return row_0
def add_info_row(rows, list):
squares = list
count = 0
for row in rows:
if count == 0:
row.append("x") # add x column
row.append("y") # add y column
row.append("code") # add code column
row.append("raw_id") # add code column
count += 1
else:
geo = row[1]
geo_lon = geo.split(",")[1]
geo_lat = geo.split(",")[0]
x = None
y = None
code = None
raw_id = None
success = False
for item in squares:
if str(item['lat']) == str(geo_lat) and str(item['lon']) == str(geo_lon):
x = item['x']
y = item['y']
code = item['code']
raw_id = item['id']
row.append(x)
row.append(y)
row.append(code)
row.append(raw_id)
success = True
break
if not success:
# print(row)
print("error")
continue
count += 1
return rows
def simplify_row(rows, average):
count = 0
geo_0 = ''
geo_1 = 'geo'
t_sent = 0
# delete = []
# delete.append(rows[0].index('username')) # remove username
# delete.append(rows[0].index('tweet')) # remove tweet
# delete.append(rows[0].index('translation')) # remove translation
# delete.sort(reverse=True)
# move = []
# move.append(rows[0].index('sentiment')-len(delete)) # move sentiment
for row in rows:
geo = None
geo_0 = str(row[1])
# for i in delete: # remove
# row.remove(row[i])
# change sentiment location
# for i in move:
# sen = row[i]
# row.remove(row[i])
# row.insert(len(row), sen)
if geo_1 == 'geo' and count == 0:
if average:
row.append("ave_sent")
count += 1
continue
geo = str(row[1]).split(',')
geo = geo[0]+','+geo[1]
row[1] = geo
if geo_1 != geo_0: # new location
t_sent = 0 # sentimental counter
t_sent += float(row[rows[0].index('sentiment')]) # calculate total
count = 0 # same location counter
ave_sent = t_sent/(count+1) # calculate average
row.append(ave_sent) # add average
geo_1 = geo_0
else: # resume location
count += 1
t_sent += float(row[rows[0].index('sentiment')])
ave_sent = t_sent/(count+1)
row.append(ave_sent)
return rows
if __name__ == "__main__":
search()
| 31.421594 | 162 | 0.502659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,908 | 0.319725 |
5e9b0b809eefab660e738e3c900de340f11bd12d | 2,792 | py | Python | hydroserver/physical_interfaces/camera_controller.py | gfvandehei/hydroponics-rpi-server | e9c0cdbb2a72f908dae299f28d00dad23d7b5bb6 | [
"MIT"
] | null | null | null | hydroserver/physical_interfaces/camera_controller.py | gfvandehei/hydroponics-rpi-server | e9c0cdbb2a72f908dae299f28d00dad23d7b5bb6 | [
"MIT"
] | null | null | null | hydroserver/physical_interfaces/camera_controller.py | gfvandehei/hydroponics-rpi-server | e9c0cdbb2a72f908dae299f28d00dad23d7b5bb6 | [
"MIT"
] | null | null | null | import time
import cv2
from threading import Thread
import numpy as np
from hydroserver.physical_interfaces.camera_streamer import CameraStreamer
from hydroserver.physical_interfaces.camera_storage import CameraStore
import hydroserver.model.model as Model
class CameraController(Thread):
def __init__(
self,
camera_store: CameraStore,
camera_stream: CameraStreamer,
camera_db_object: Model.Camera):
"""Controls a single camera connected to the system, camera
is selected via system index
:param camera_store: an object dedicated to handling storage of images from
the camera on the system
:type camera_store: CameraStore
:param camera_stream: an object dedicated to handling instantanios camera data,
and its interactions with other objects
:type camera_stream: CameraStreamer
:param camera_index: the index of the camera within the system
:type camera_index: int
"""
Thread.__init__(self)
self.image_store = camera_store
self.image_stream = camera_stream
self.camera_index = camera_db_object.index
self.camera_db_obj = camera_db_object
#self.rawCapture = PiRGBArray(self.camera, size=(640, 480))
self.camera = cv2.VideoCapture(self.camera_index)
self.most_recent_image = None
self._refresh_rate = 1
# sleep for a 1/10 second to allow camera to start up
time.sleep(.1)
def run(self):
"""
runs in a thread on start(), reads camera frames and adds frame to
storage and stream
:extends Thread.run
"""
while True:
ret, frame = self.camera.read()
frame: np.ndarray
# need to flip camera, it is upsidown
frame = np.flip(frame)
# add to stream
self.image_stream.add_new_image(frame)
# check if needs to be saved for timelapse
self.image_store.save_image(frame)
time.sleep(self._refresh_rate)
def update_refresh_rate(self, new_refresh: float):
"""changes the time between frame grabs
:param new_refresh: the new refresh rate in seconds
:type new_refresh: float
"""
self._refresh_rate = new_refresh
def json(self):
"""a function used to grab serializable information
about the class for web responses
:return: a serializable dict representing the camera
:rtype: Dict[str, Any]
"""
as_dict = self.camera_db_obj.__dict__.copy()
del as_dict["_sa_instance_state"]
as_dict['refresh_rate'] = self._refresh_rate
return as_dict
| 35.794872 | 88 | 0.63861 | 2,522 | 0.903295 | 0 | 0 | 0 | 0 | 0 | 0 | 1,306 | 0.467765 |
5e9b711115f13a486c2ea830660e274a63129004 | 478 | py | Python | src/bpmn_python/graph/classes/events/intermediate_catch_event_type.py | ToJestKrzysio/ProcessVisualization | 9a359a31816bf1be65e3684a571509e3a2c2c0ac | [
"MIT"
] | null | null | null | src/bpmn_python/graph/classes/events/intermediate_catch_event_type.py | ToJestKrzysio/ProcessVisualization | 9a359a31816bf1be65e3684a571509e3a2c2c0ac | [
"MIT"
] | null | null | null | src/bpmn_python/graph/classes/events/intermediate_catch_event_type.py | ToJestKrzysio/ProcessVisualization | 9a359a31816bf1be65e3684a571509e3a2c2c0ac | [
"MIT"
] | null | null | null | # coding=utf-8
"""
Class used for representing tIntermediateCatchEvent of BPMN 2.0 graph
"""
import graph.classes.events.catch_event_type as catch_event
class IntermediateCatchEvent(catch_event.CatchEvent):
"""
Class used for representing tIntermediateCatchEvent of BPMN 2.0 graph
"""
def __init__(self):
"""
Default constructor, initializes object fields with new instances.
"""
super(IntermediateCatchEvent, self).__init__()
| 26.555556 | 74 | 0.715481 | 322 | 0.67364 | 0 | 0 | 0 | 0 | 0 | 0 | 266 | 0.556485 |
5e9c05834cf6ad1608c5d29f26bb72785dc3ceb3 | 45 | py | Python | pyIOS/exceptions.py | jtdub/pyIOS | 1842b92068e3b0a980d53e0719efd41dbbdaf082 | [
"Apache-2.0"
] | 12 | 2016-01-09T17:47:05.000Z | 2022-02-09T18:09:41.000Z | pyIOS/exceptions.py | jtdub/pyIOS | 1842b92068e3b0a980d53e0719efd41dbbdaf082 | [
"Apache-2.0"
] | 16 | 2016-01-05T15:49:31.000Z | 2016-08-04T20:59:15.000Z | pyIOS/exceptions.py | jtdub/pyIOS | 1842b92068e3b0a980d53e0719efd41dbbdaf082 | [
"Apache-2.0"
] | 1 | 2016-04-06T16:00:32.000Z | 2016-04-06T16:00:32.000Z | class InvalidInputError(Exception):
pass
| 15 | 35 | 0.777778 | 44 | 0.977778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5e9cfe480f48cebb3ee33cedf5c2da8409e69016 | 71 | py | Python | purestorage/__init__.py | sile16/rest-client | 01604e00e8a64157e056fca614d320c3afd0f2d1 | [
"BSD-2-Clause"
] | 20 | 2018-10-26T01:33:15.000Z | 2022-03-31T19:56:08.000Z | purestorage/__init__.py | sile16/rest-client | 01604e00e8a64157e056fca614d320c3afd0f2d1 | [
"BSD-2-Clause"
] | 15 | 2018-08-09T20:42:21.000Z | 2022-01-14T15:59:58.000Z | purestorage/__init__.py | sile16/rest-client | 01604e00e8a64157e056fca614d320c3afd0f2d1 | [
"BSD-2-Clause"
] | 16 | 2018-10-22T18:31:42.000Z | 2021-08-09T15:33:35.000Z | from .purestorage import FlashArray, PureError, PureHTTPError, VERSION
| 35.5 | 70 | 0.84507 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5e9d51222054c4d9f5cea79d0046024a1b6003ba | 28,600 | py | Python | gerryopt/compile.py | pjrule/gerryopt | c9b5abca3b27caa39c9f86f904e1207eafc8751c | [
"MIT"
] | null | null | null | gerryopt/compile.py | pjrule/gerryopt | c9b5abca3b27caa39c9f86f904e1207eafc8751c | [
"MIT"
] | null | null | null | gerryopt/compile.py | pjrule/gerryopt | c9b5abca3b27caa39c9f86f904e1207eafc8751c | [
"MIT"
] | 1 | 2022-01-12T20:54:09.000Z | 2022-01-12T20:54:09.000Z | """Compiler/transpiler for the GerryOpt DSL."""
import ast
import json
import inspect
from copy import deepcopy
from textwrap import dedent
from dataclasses import dataclass, field, is_dataclass, asdict
from enum import Enum
from itertools import product
from typing import (Callable, Iterable, Sequence, Set, Dict, List, Union, Any,
Optional, Tuple, get_args, get_origin)
from gerrychain import Graph
from gerrychain.updaters import Tally
from gerryopt.vector import Vec
PRIMITIVE_TYPES = [int, float, bool]
DSL_DISALLOWED_STATEMENTS = {
ast.AsyncFunctionDef, ast.ClassDef, ast.Delete, ast.For, ast.AsyncFor,
ast.While, ast.With, ast.AsyncWith, ast.Raise, ast.Try, ast.Assert,
ast.Import, ast.ImportFrom, ast.Global, ast.Nonlocal, ast.Expr, ast.Pass,
ast.Break, ast.Continue
}
DSL_DISALLOWED_EXPRESSIONS = {
ast.Dict, ast.Set, ast.ListComp, ast.SetComp, ast.DictComp,
ast.GeneratorExp, ast.Await, ast.Yield, ast.YieldFrom, ast.FormattedValue,
ast.JoinedStr, ast.Starred, ast.List, ast.Tuple
}
Primitive = Union[int, float, bool]
Updaters = Dict[str, Callable]
class CompileError(Exception):
"""Raised when a function cannot be compiled to a GerryOpt AST."""
class DSLValidationVisitor(ast.NodeVisitor):
"""AST visitor for verifying that a function matches the GerryOpt DSL.
For now, this consists of checking for explicitly disallowed statement
or expression forms.
"""
def generic_visit(self, node):
if type(node) in DSL_DISALLOWED_STATEMENTS:
raise CompileError('Encountered statement outside of GerryOpt DSL '
f'(statement type {type(node)}).')
elif type(node) in DSL_DISALLOWED_EXPRESSIONS:
raise CompileError(
'Encountered expression outside of GerryOpt DSL '
f'(expression type {type(node)}).')
ast.NodeVisitor.generic_visit(self, node)
class AssignmentNormalizer(ast.NodeTransformer):
""""AST transformer for normalizing augmented and annotated assignments.
In general Python, augmented assignments are not *just* syntactic sugar for
assignments. However, for the purposes of the GerryOpt DSL, we treat them
as syntactic sugar. Type annotations are not relevant to the GerryOpt DSL,
as the type system is quite simple, so we simply strip them without validating
them. Multiple-target assignment (e.g. `x, y = y, x`) is not allowed.
"""
def visit_Assign(self, node: ast.Assign) -> ast.Assign:
if isinstance(node.targets[0], ast.Tuple):
# TODO
raise CompileError(
'Multiple-target assignment not supported by the GerryChain DSL.'
)
return node
def visit_AugAssign(self, node: ast.AugAssign) -> ast.Assign:
return ast.Assign(targets=[node.target],
value=ast.BinOp(left=ast.Name(id=node.target.id,
ctx=ast.Load()),
op=node.op,
right=node.value),
type_comment=None)
def visit_AnnAssign(self, node: ast.AnnAssign) -> ast.Assign:
return ast.Assign(targets=[node.target],
value=node.value,
type_comment=None)
class LoadedNamesVisitor(ast.NodeVisitor):
"""AST visitor for finding loaded names."""
def __init__(self, *args, **kwargs):
self.loaded = set()
super().__init__(*args, **kwargs)
def visit_Name(self, node):
if isinstance(node.ctx, ast.Load):
self.loaded.add(node.id)
class ClosureValuesTransformer(ast.NodeTransformer):
"""AST transformer that replaces references to captured values with
their literal values, performing basic type checks along the way.
"""
def __init__(self, *args, vals: Dict[str, Primitive], **kwargs):
self.vals = vals
super().__init__(*args, **kwargs)
def visit_Name(self, node):
if isinstance(node.ctx, ast.Load) and node.id in self.vals:
if type(self.vals[node.id]) in PRIMITIVE_TYPES:
return ast.Constant(value=self.vals[node.id], kind=None)
raise CompileError(
f'Cannot substitute non-primitive value (name "{node.id}" '
f'has type {type(self.vals[node.id])}).')
return node
def merge_closure_vars(ctx: inspect.ClosureVars) -> Dict[str, Any]:
"""Merges nonlocals, globals, and builtins in `ctx`."""
return {**ctx.globals, **ctx.nonlocals, **ctx.builtins}
def find_names(fn_ast: ast.FunctionDef, ctx: inspect.ClosureVars) -> Set[str]:
"""Determines the names of bound locals and closure variables in a compilable function."""
if ctx.unbound:
raise CompileError(f'Function has unbound names {ctx.unbound}.')
# TODO: filter closure variables to minimum necessary set.
closure_vars = set(merge_closure_vars(ctx).keys())
params = set(a.arg for a in fn_ast.args.args)
closure_vars -= params
bound_locals, _ = new_bindings(fn_ast.body, params, set(), closure_vars)
return bound_locals, closure_vars
def new_bindings(statements: List[ast.AST], bound_locals: Set[str],
loaded_names: Set[str], closure_vars: Set[str]):
"""Parses variable references in a list of statements.
Args:
statements:
We say that a local is unbound if either:
(a) Its name is neither in the closure variables nor was previously
on the l.h.s. of any assignment statement.
(b) Its name is in the closure context but is on the l.h.s. of some
assignment statement *after* its value is loaded.
"""
bound_locals = bound_locals.copy()
loaded_names = loaded_names.copy()
def load_expr(expr):
expr_visitor = LoadedNamesVisitor()
expr_visitor.visit(expr)
unbound = expr_visitor.loaded - bound_locals - closure_vars
if unbound:
raise CompileError(f'Unbound locals: cannot load names {unbound}.')
return expr_visitor.loaded
for statement in statements:
if isinstance(statement, ast.If):
loaded_names |= load_expr(statement.test)
if_bindings, if_loaded = new_bindings(statement.body, bound_locals,
loaded_names, closure_vars)
else_bindings, else_loaded = new_bindings(statement.orelse,
bound_locals,
loaded_names,
closure_vars)
bound_locals |= (if_bindings & else_bindings)
loaded_names |= (if_loaded | else_loaded)
elif isinstance(statement, ast.Assign):
statement_visitor = LoadedNamesVisitor()
statement_visitor.visit(statement.value)
loaded_names |= statement_visitor.loaded
targets = set(t.id for t in statement.targets)
unbound_b = targets & loaded_names & closure_vars
if unbound_b:
raise CompileError(
f'Unbound locals: cannot assign names {unbound_b} '
'that were previously loaded as globals or nonlocals.')
unbound_a = statement_visitor.loaded - bound_locals - closure_vars
if unbound_a:
raise CompileError(
f'Unbound locals: cannot load names {unbound_a}.')
bound_locals |= targets
elif isinstance(statement, ast.Return):
loaded_names |= load_expr(statement.value)
else:
raise CompileError(
f'Encountered invalid statement (type {type(statement)}).')
return bound_locals, loaded_names
def type_graph_column(graph: Graph, column: str):
"""Determines the type of a column in `graph`."""
column_types = set(type(v) for _, v in graph.nodes(column))
if len(column_types) > 1:
raise TypeError(
f'Column "{column}" has multiple types: {column_types}')
return next(iter(column_types))
def tally_columns(updaters: Updaters) -> Dict[str, str]:
"""Extracts the columns used by updaters.
Raises:
ValueError: If a non-tally updater is encountered, or if
a tally is multi-column.
"""
columns = {}
for updater_name, updater in updaters.items():
if not isinstance(updater, Tally):
raise ValueError(
'Cannot extract tally column from non-Tally updater.')
if len(updater.fields) != 1:
raise ValueError('Multi-column tallies not supported.')
columns[updater_name] = updater.fields[0]
return columns
def type_updater_columns(graph: Graph, updaters: Updaters) -> Dict:
"""Determines the types of graph columns used by Tally updaters."""
column_dependencies = tally_columns(updaters)
column_types = {
col: type_graph_column(graph, col)
for col in column_dependencies.values()
}
if set(column_types.values()) - set(PRIMITIVE_TYPES):
raise CompileError('Tallies with non-primitive types not supported.')
return column_types
def always_returns(statements: List[ast.AST]) -> bool:
"""Determines if a list of statements is guaranteed to `return`."""
# Recursively:
# * If the list of statements contains ≥1 return statements and
# does not branch (no if block), we are guaranteed to return.
# * If the list of statements *does* contain ≥1 if block, then
# (recursively) both parts of the block should be guaranteed to
# return *or* there should be a return statement *after* the block.
for statement in statements:
if isinstance(statement, ast.Return):
return True
if isinstance(statement, ast.If):
if_returns = always_returns(statement.body)
else_returns = always_returns(statement.orelse)
if if_returns and else_returns:
return True
return False
def load_function_ast(fn: Callable) -> ast.FunctionDef:
"""Loads the AST of a compilable function."""
raw_ast = ast.parse(dedent(inspect.getsource(fn)))
if (not isinstance(raw_ast, ast.Module) or len(raw_ast.body) != 1
or not isinstance(raw_ast.body[0], ast.FunctionDef)):
raise CompileError('Cannot compile a non-function.')
fn_ast = raw_ast.body[0]
arg_names = set(arg.arg for arg in fn_ast.args.args)
if arg_names != {'partition'} and arg_names != {'partition', 'store'}:
raise CompileError(
'Compiled functions must take a `partition` argument '
'and an optional `store` argument.')
return fn_ast
def preprocess_ast(fn_ast: ast.FunctionDef,
ctx: inspect.ClosureVars) -> ast.FunctionDef:
"""Validates and transforms the AST of a compilable function.
First, we validate that the AST represents a function within the GerryOpt
DSL (this mostly involves verifying that no disallowed statement or
expression forms are used). Then, we normalize assignment expressions and
replace closed-over variable names with constants.
Args:
fn_ast: The raw function AST.
ctx: The function's closure variables.
Returns:
The AST of the transformed function.
Raises:
CompileError: If validation or transformation fails---that is, the
function is outside of the GerryOpt DSL, uses unbound locals, or
closes over non-primitive variables.
"""
DSLValidationVisitor().visit(fn_ast)
fn_ast = AssignmentNormalizer().visit(fn_ast)
bound_locals, closure_vars = find_names(fn_ast, ctx)
all_closure_vals = merge_closure_vars(ctx)
filtered_closure_vals = {k: all_closure_vals[k] for k in closure_vars}
closed_ast = ClosureValuesTransformer(
vals=filtered_closure_vals).visit(fn_ast)
if not always_returns(closed_ast.body):
raise CompileError(
'GerryOpt functions must always return a non-`None` value.')
return closed_ast
def is_truthy(t: type) -> bool:
"""Determines if a type is considered truthy in the GerryOpt DSL."""
if get_origin(t) is Union:
return all(member_t in PRIMITIVE_TYPES for member_t in get_args(t))
return t in PRIMITIVE_TYPES
def scalar_type(t: type) -> type:
"""Returns the type of an element X of a Vec[X] (identity otherwise)."""
if get_origin(t) is Vec:
return get_args(t)[0]
return t
def is_vec(t: type) -> bool:
"""Determines if a type is an instance of Vec[T]."""
return get_origin(t) == Vec
def is_possibly_vec(t: type) -> bool:
"""Determines if a type is an instance of Vec[T] or Union[Vec[T], ...]."""
return (get_origin(t)
== Vec) or (get_origin(t) == Union
and any(get_origin(s) == Vec for s in get_args(t)))
class UndefinedVar:
"""A pseudotype for possibly undefined variables."""
TypeContext = TypeDelta = Dict[str, type]
ReturnType = Optional[type]
CompiledIdentifier = str
class AST:
pass
class Expr(AST):
pass
class Statement(AST):
pass
def type_and_transform_expr(expr: ast.Expr,
ctx: TypeContext) -> Tuple[type, Expr]:
raise NotImplementedError('stub for typing')
def type_and_transform_statements(
statements: List[ast.AST], ctx: TypeContext, return_type: ReturnType
) -> Tuple[TypeDelta, ReturnType, List[Statement]]:
raise NotImplementedError('stub for typing')
def type_union(*args: type) -> Optional[type]:
"""Finds the union of types, eliminating `None` where possible."""
union_t = None
for t in args:
if union_t is None:
union_t = t
elif t is not None:
union_t = Union[union_t, t]
return union_t
def ctx_union(ctx: TypeContext, name: str, *args: type) -> type:
"""Finds the union of types with the existing type of `name` in `ctx`.
If `name` is not available in `ctx`, we simply find the union of
the directly passed types.
"""
if name in ctx:
return type_union(ctx[name], *args)
return type_union(*args)
def defined_type_product(*args: type) -> Iterable:
"""Generates the Cartesian product of (union) types.
Raises:
CompileError: If the product contains `UndefinedVar`.
"""
unrolled = [get_args(t) if get_origin(t) is Union else (t, ) for t in args]
for types in unrolled:
if UndefinedVar in types:
raise CompileError(
'Cannot compute type product for potentially undefined variables.'
)
return product(*unrolled)
@dataclass
class If(Statement):
test: Expr
body: List['Statement']
orelse: List['Statement']
def type_and_transform(
cls, statement: ast.If, ctx: TypeContext, return_type: ReturnType
) -> Tuple[TypeDelta, ReturnType, Statement]:
delta = {}
test_type, test_ast = type_and_transform_expr(statement.test)
if_types, if_return_type, if_asts = type_and_transform_statements(
statement.body, ctx, return_type)
else_types, else_return_type, else_asts = type_and_transform_statements(
statement.orelse, ctx, return_type)
if_names = set(if_types.keys())
else_names = set(else_types.keys())
for name in if_names & else_names:
delta[name] = ctx_union(ctx, if_types[name], else_types[name])
for name in if_names - else_names:
delta[name] = ctx_union(ctx, if_types[name], UndefinedVar)
for name in else_names - if_names:
delta[name] = ctx_union(ctx, else_types[name], UndefinedVar)
if if_return_type is not None:
return_type = Union[return_type, if_return_type]
if else_return_type is not None:
return_type = Union[return_type, else_return_type]
return delta, return_type, cls(test_ast, if_asts, else_asts)
@dataclass
class Return(Statement):
value: Expr
@classmethod
def type_and_transform(cls, statement: ast.If,
ctx: TypeContext) -> Tuple[ReturnType, Statement]:
branch_return_type, return_ast = type_and_transform_expr(
statement.value, ctx)
return branch_return_type, Return(return_ast)
@dataclass
class Assign(Statement):
target: CompiledIdentifier
value: Expr
@classmethod
def type_and_transform(cls, statement: ast.Assign,
ctx: TypeContext) -> Tuple[TypeDelta, Statement]:
delta = {}
rhs_type, rhs_ast = type_and_transform_expr(statement.value, ctx)
lhs = statement.targets[0].id
if lhs in ctx:
ctx[lhs] = Union[ctx[lhs], rhs_type]
else:
ctx[lhs] = rhs_type
delta[lhs] = ctx[lhs]
return delta, cls(lhs, rhs_ast)
@dataclass
class Name(Expr):
id: CompiledIdentifier
@classmethod
def type_and_transform(cls, expr: ast.Name,
ctx: TypeContext) -> Tuple[type, 'Name']:
if isinstance(expr.ctx, ast.Store):
raise CompileError('Cannot type name in store context.')
try:
return ctx[expr.id], Name(expr.id)
except KeyError:
raise CompileError(
f'Could not resolve type for unbound local "{expr.id}".')
@dataclass
class Constant(Expr):
value: Primitive
@classmethod
def type_and_transform(cls, expr: ast.Constant,
ctx: TypeContext) -> Tuple[type, 'Constant']:
val = expr.value
if isinstance(val, get_args(Primitive)):
return type(val), Constant(val)
raise CompileError(f'Cannot type non-primitive constant {val}')
BoolOpcode = Enum('BoolOpcode', 'AND OR')
@dataclass
class BoolOp(Expr):
op: BoolOpcode
values: Iterable[Expr]
OPS = {ast.And: BoolOpcode.AND, ast.Or: BoolOpcode.OR}
@classmethod
def type_and_transform(cls, expr: ast.BoolOp,
ctx: TypeContext) -> Tuple[type, 'BoolOp']:
arg_types, arg_asts = list(
zip(*(type_and_transform_expr(e, ctx) for e in expr.values)))
if not all(is_truthy(t) for t in arg_types):
raise CompileError(
'All arguments to a boolean operator must be truthy.')
compiled_expr = cls(BoolOp.OPS[type(expr.op)], arg_asts)
return bool, compiled_expr
UnaryOpcode = Enum('UnaryOpcode', 'UADD USUB INVERT NOT')
@dataclass
class UnaryOp(Expr):
op: UnaryOpcode
operand: Expr
OPS = {
ast.UAdd: UnaryOpcode.UADD,
ast.USub: UnaryOpcode.USUB,
ast.Invert: UnaryOpcode.INVERT,
ast.Not: UnaryOpcode.NOT,
}
OP_TYPES = {
(ast.UAdd, float): float,
(ast.USub, float): float,
# Invert not supported on floats
(ast.Not, float): bool,
(ast.UAdd, int): int,
(ast.USub, int): int,
(ast.Invert, int): int,
(ast.Not, int): bool,
(ast.UAdd, bool): int,
(ast.USub, bool): int,
(ast.Invert, bool): int,
(ast.Not, bool): bool,
}
@classmethod
def type_and_transform(cls, expr: ast.UnaryOp,
ctx: TypeContext) -> Tuple[type, 'UnaryOp']:
operand_type, operand_ast = type_and_transform_expr(expr.operand, ctx)
type_lb = None
op_type = type(expr.op)
try:
expr_ast = cls(UnaryOp.OPS[op_type], operand_ast)
except KeyError:
raise CompileError(f'Unary operation {op_type} not supported.')
for (t, ) in defined_type_product(operand_type):
try:
expr_type = UnaryOp.OP_TYPES[(op_type, scalar_type(t))]
except KeyError:
raise CompileError(
f'Unary operation {op_type} not supported for type {t}.')
if is_vec(t):
type_lb = type_union(Vec[expr_type], type_lb)
else:
type_lb = type_union(expr_type, type_lb)
return type_lb, expr_ast
@dataclass
class IfExpr(Expr):
test: Expr
body: Expr
orelse: Expr
@classmethod
def type_and_transform(cls, expr: ast.IfExp,
ctx: TypeContext) -> Tuple[type, 'IfExpr']:
test_type, test_ast = type_and_transform_expr(expr.test, ctx)
if_type, if_ast = type_and_transform_expr(expr.body, ctx)
else_type, else_ast = type_and_transform_expr(expr.orelse, ctx)
if not is_truthy(test_type):
raise CompileError('Test in conditional expression is not truthy.')
return Union[if_type, else_type], cls(test_ast, if_ast, else_ast)
CmpOpcode = Enum('CmpOpcode', 'EQ NOT_EQ LT LTE GT GTE')
@dataclass
class CmpOp(Expr):
ops: Sequence[CmpOpcode]
comps: Sequence[Expr]
OPS = {
ast.Eq: CmpOpcode.EQ,
ast.NotEq: CmpOpcode.NOT_EQ,
ast.Lt: CmpOpcode.LT,
ast.LtE: CmpOpcode.LTE,
ast.Gt: CmpOpcode.GT,
ast.GtE: CmpOpcode.GTE,
}
@classmethod
def type_and_transform(cls, expr: ast.Compare,
ctx: TypeContext) -> Tuple[type, 'CmpOp']:
raw_comps = [expr.left] + expr.comparators
typed_exprs = [type_and_transform_expr(e, ctx) for e in raw_comps]
types = [t for t, _ in typed_exprs]
exprs = [e for _, e in typed_exprs]
illegal_ops = set(type(op) for op in expr.ops) - set(CmpOp.OPS.keys())
if illegal_ops:
raise CompileError('Operations', illegal_ops, 'not supported.')
type_lb = None
if len(expr.ops) > 1 and any(is_possibly_vec(t) for t in types):
# We roughly mimic NumPy semantics.
raise CompileError('Cannot chain vector comparisons.')
if all(is_possibly_vec(t) for t in types):
type_lb = type_union(Vec[bool], type_lb)
if not all(is_vec(t) for t in types):
type_lb = type_union(bool, type_lb)
transformed_expr = cls([CmpOp.OPS[op] for op in expr.ops], exprs)
return type_lb, transformed_expr
BinOpcode = Enum(
'BinOpcode',
'ADD SUB MULT DIV FLOOR_DIV MOD POW L_SHIFT R_SHIFT BIT_OR BIT_XOR BIT_AND MAT_MULT'
)
@dataclass
class BinOp(Expr):
left: Expr
op: BinOpcode
right: Expr
OPS = {
ast.Add: BinOpcode.ADD,
ast.Sub: BinOpcode.SUB,
ast.Mult: BinOpcode.MULT,
ast.Div: BinOpcode.DIV,
ast.FloorDiv: BinOpcode.FLOOR_DIV,
ast.Mod: BinOpcode.MOD,
ast.Pow: BinOpcode.POW,
ast.LShift: BinOpcode.L_SHIFT,
ast.RShift: BinOpcode.R_SHIFT,
ast.BitOr: BinOpcode.BIT_OR,
ast.BitXor: BinOpcode.BIT_XOR,
ast.BitAnd: BinOpcode.BIT_AND,
ast.MatMult: BinOpcode.MAT_MULT
}
REAL_OPCODES = {
BinOpcode.ADD, BinOpcode.SUB, BinOpcode.MULT, BinOpcode.DIV,
BinOpcode.FLOOR_DIV, BinOpcode.MOD, BinOpcode.POW
}
BIT_OPCODES = {
BinOpcode.L_SHIFT, BinOpcode.R_SHIFT, BinOpcode.BIT_OR,
BinOpcode.BIT_XOR, BinOpcode.BIT_AND
}
@classmethod
def type_and_transform(cls, expr: ast.BinOp,
ctx: TypeContext) -> Tuple[type, 'BinOp']:
opcode = BinOp.OPS[type(expr.op)]
lhs_type, lhs_ast = type_and_transform_expr(expr.left, ctx)
rhs_type, rhs_ast = type_and_transform_expr(expr.right, ctx)
type_lb = None
for (lhs, rhs) in defined_type_product(lhs_type, rhs_type):
if opcode in BinOp.REAL_OPCODES:
# In general, we have:
# {float, int, bool} * float -> float
# float * {float, int, bool} -> float
# int * int -> int
# int * bool -> int
# bool * int -> int
# bool * bool -> bool
# There are a few exceptions to mirror NumPy semantics.
# * Subtraction of boolean vectors is not permitted.
# * DIV: {float, int, bool} * {float, int, bool} -> float
# * FLOOR_DIV, MOD, POW: {bool, int} * {bool, int} -> int
if (is_vec(lhs) or is_vec(rhs)) and opcode == BinOpcode.SUB:
raise CompileError(
'Subtraction of boolean vectors is not permitted.')
# Determine elementwise type.
lhs_scalar = scalar_type(lhs)
rhs_scalar = scalar_type(rhs)
op_scalar = None
if opcode == BinOpcode.DIV or lhs_scalar == float or rhs_scalar == float:
op_scalar = float
elif (lhs_scalar == int or rhs_scalar == int
or (opcode in (BoolOpcode.FLOOR_DIV, BoolOpcode.MOD,
BoolOpcode.POW) and
(lhs_scalar == bool or rhs_scalar == bool))):
op_scalar = int
else:
op_scalar = bool
# Apply broadcasting rules.
if is_vec(lhs) or is_vec(rhs):
type_lb = type_union(type_lb, Vec[op_scalar])
else:
type_lb = type_union(type_lb, op_scalar)
elif opcode in BinOp.BIT_OPCODES:
# We have:
# int * {int, bool} -> int
# {int, bool} * int -> int
# bool -> bool
# Bitwise operations are not supported on floats.
pass
elif opcode == BinOpcode.MAT_MULT:
pass
else:
raise CompileError(f'Unsupported binary operation {opcode}.')
class ASTEncoder(json.JSONEncoder):
"""JSON serializer for compiled ASTs."""
# dataclass encoding: https://stackoverflow.com/a/51286749
def default(self, o):
if is_dataclass(o):
# TODO: inject node type.
return asdict(o)
return super().default(o)
AST_EXPR_TO_COMPILED = {
ast.UnaryOp: UnaryOp,
ast.BoolOp: BoolOp,
ast.Compare: CmpOp,
ast.IfExp: IfExpr,
ast.Constant: Constant,
ast.Name: Name
}
def type_and_transform_expr(
expr: ast.Expr,
ctx: Optional[TypeContext] = None) -> Tuple[type, Expr]:
if ctx is None:
ctx = {}
try:
return AST_EXPR_TO_COMPILED[type(expr)].type_and_transform(expr, ctx)
except KeyError:
raise CompileError(f'expression type {type(expr)} unsupported or TODO')
def type_and_transform_statements(
statements: List[ast.AST], ctx: TypeContext, return_type: ReturnType
) -> Tuple[TypeDelta, ReturnType, List[Statement]]:
new_ctx = ctx.copy()
compiled_statements = []
delta = {}
for statement in statements:
new_return_type = None
if isinstance(statement, ast.Assign):
stmt_delta, statement = Assign.type_and_transform(statement, ctx)
elif isinstance(statement, ast.If):
stmt_delta, new_return_type, statement = If.type_and_transform(
statement, ctx, return_type)
elif isinstance(statement, ast.Return):
new_return_type, statement = Return.type_and_transform(
statement, ctx)
else:
raise CompileError(
f'Encountered invalid statement (type {type(statement)}).')
compiled_statements.append(statement)
for name, t in stmt_delta.items():
delta[name] = new_ctx[name] = ctx_union(ctx, t)
if return_type is None and new_return_type is not None:
return_type = new_return_type
else:
return_type = Union[return_type, new_return_type]
return delta, return_type, compiled_statements
def to_ast(fn: Callable, fn_type: str, graph: Graph, updaters: Updaters):
"""Compiles a function to a GerryOpt AST."""
if fn_type not in ('accept', 'constraint', 'score'):
raise CompileError(
'Can only compile acceptance, constraint, and score functions.')
column_types = type_updater_columns(graph, updaters)
fn_context = inspect.getclosurevars(fn)
raw_fn_ast = load_function_ast(fn)
fn_ast = preprocess_ast(raw_fn_ast, fn_context)
for stmt in fn_ast.body:
if isinstance(stmt, ast.Assign):
pass
elif isinstance(stmt, ast.If):
pass
elif isinstance(stmt, ast.Return):
pass
return fn_ast
| 36.294416 | 94 | 0.620629 | 14,391 | 0.503111 | 0 | 0 | 10,720 | 0.374773 | 0 | 0 | 6,652 | 0.232555 |
5e9f6762a15db846cc32799b17cfd011720d5591 | 3,122 | py | Python | tdi/dev_support/DevHelp.py | orozda/mdsplus | 101265b0f94554e06c0e461b59862a6d3b656f8c | [
"BSD-2-Clause"
] | null | null | null | tdi/dev_support/DevHelp.py | orozda/mdsplus | 101265b0f94554e06c0e461b59862a6d3b656f8c | [
"BSD-2-Clause"
] | null | null | null | tdi/dev_support/DevHelp.py | orozda/mdsplus | 101265b0f94554e06c0e461b59862a6d3b656f8c | [
"BSD-2-Clause"
] | null | null | null | #
# Copyright (c) 2017, Massachusetts Institute of Technology All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
def DevHelp(arg, full=0, device_list=[None]):
import pydoc
import MDSplus
if isinstance(arg, MDSplus.TreeNode):
try:
elt = int(arg.conglomerate_elt)
if elt == 0:
return ""
if elt == 1:
return DevHelp(arg.record.model, full)
else:
cls = arg.head.record.getDevice()
return cls.parts[elt-2].get('help', "")
except Exception as e:
return "ERROR: %s" % (e,)
else:
full = int(full)
devtype = arg.upper()
if device_list[0] is None:
alldevices = MDSplus.Data.execute('MDSDEVICES()')
device_list[0] = [item[0].strip() for item in alldevices]
if ('*' in devtype) or ('?' in devtype):
devnames = []
for device in device_list[0]:
if MDSplus.Data.execute(
'MdsShr->StrMatchWild(descr($), descr($))',
(device.upper(), devtype)) & 1:
devnames.append(DevHelp(device, -1))
return '\n'.join(devnames)
else:
try:
cls = MDSplus.Device.PyDevice(devtype)
if full == 1:
return pydoc.TextDoc().docclass(cls)
elif full == -1:
return "%s: python device" % devtype
else:
return cls.__doc__
except Exception as e:
for device in device_list[0]:
if device.upper() == devtype:
return "%s: tdi, java or shared library device" % (
device,)
return "Error obtaining help on device %s: %s" % (devtype, e)
| 43.361111 | 80 | 0.606342 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,515 | 0.485266 |
5e9f9dc3d6e213db9657bb6eb351051a106357c9 | 6,805 | py | Python | ncbi_single_use_genome.py | marco-mariotti/ncbi_single_use_genome | 976da1dc195955c344119e222cc731453797d893 | [
"MIT"
] | null | null | null | ncbi_single_use_genome.py | marco-mariotti/ncbi_single_use_genome | 976da1dc195955c344119e222cc731453797d893 | [
"MIT"
] | null | null | null | ncbi_single_use_genome.py | marco-mariotti/ncbi_single_use_genome | 976da1dc195955c344119e222cc731453797d893 | [
"MIT"
] | null | null | null | #! /usr/bin/env -S python3 -u
import os, shutil, sys, glob, traceback
from easyterm import *
help_msg="""This program downloads one specific NCBI assembly, executes certains operations, then cleans up data
### Input/Output:
-a genome NCBI accession
-o folder to download to
### Actions:
-c bash command template
-cf bash command template read from this file
-p python command template
-pf python command template read from this file
In all templates above, these placeholders can be used:
{accession} genome NCBI accession, e.g. GCA_000209535.1
{genomefile} path to genome fasta file
{taxid} taxonomy id
{species} species name, e.g. "Drosophila melanogaster"
{mspecies} masked species, e.g. "Drosophila_melanogaster"
### Other options:
-k keep files instead of cleaning them up at the end
-w max workers for downloads at once
-sh open shells for bash commands. Required for complex commands
(e.g. sequential commands, or using redirections)
-print_opt print currently active options
-h | --help print this help and exit"""
command_line_synonyms={'t':'temp'}
def_opt= {'a':'',
'o':'./',
'c':'',
'cf':'',
'p':'',
'pf':'',
'k':False,
'sh':False,
'w':1,
'temp':'/tmp/'}
temp_folder=None
##### start main program function
def main(args={}):
"""We're encapsulating nearly the whole program in a function which is executed when
the script is directly executed. This provides the alternative of being able
to run the same thing as module: importing this 'main' function and running it with
a 'args' dictionary containing options and arguments, equivalent to opt
"""
### loading options
if not args:
opt=command_line_options(def_opt, help_msg, synonyms=command_line_synonyms)
else:
opt=args
# if not opt['cf'] and not opt['c']:
# raise NoTracebackError("ERROR you must define a template command with -c or -cf")
if opt['c'] or opt['cf']:
bash_template_command=(opt['c']
if opt['c'] else
'\n'.join([x.strip() for x in open(opt['cf'])]))
if opt['p'] or opt['pf']:
py_template_command=(opt['p']
if opt['p'] else
'\n'.join([x.strip() for x in open(opt['pf'])]))
if not opt['o']:
raise NoTracebackError("ERROR you must provide an output folder with -o")
outfolder=opt['o'].rstrip('/')
if not os.path.exists(outfolder):
os.makedirs(outfolder)
if not opt['a']:
raise NoTracebackError("ERROR you must provide an accession with -a")
accession=opt['a']
datadir=f'{outfolder}/dataset.{accession}'
zipfile=datadir+'.zip'
write('*** Options accepted: ', how='green')
write(opt)
write('')
write('*** Download metadata (dehydrated)', how='green')
## download dehydrated
cmd_download_dehydra = f"""\
datasets download genome accession {accession} \
--reference --dehydrated \
--exclude-genomic-cds --exclude-gff3 --exclude-protein --exclude-rna \
--filename {zipfile} """
run_cmd(cmd_download_dehydra,
stdout=None, stderr=None) # messages printed to screen
write('*** Reformatting metadata', how='green')
## get some metadata
cmd_format_tsv = f"""
dataformat tsv genome \
--package {zipfile} \
--fields tax-id,organism-name"""
x = run_cmd(cmd_format_tsv).stdout
taxid, species = x.split('\n')[1].split('\t')
mspecies=mask_chars(species)
write(f'accession: {accession}')
write(f'taxid: {taxid}')
write(f'species: {species}')
write(f'mspecies: {mspecies}')
write('*** Unzipping metadata, removing zipfile', how='green')
## prep for download: unzip
cmd_unzip_dehydra=f"unzip -o -d {datadir} {zipfile}"
run_cmd(cmd_unzip_dehydra,
stdout=None, stderr=None) # messages printed to screen
write(f'removing {zipfile}')
os.remove(zipfile)
write('')
write('*** Downloading genome data', how='green')
## download / hydrate
progressbar='' if sys.stdout.isatty() else ' --no-progressbar '
cmd_download_hydra=f"""
datasets rehydrate \
--directory {datadir} \
--match "/{accession}/" \
--max-workers {opt['w']} \
{progressbar} """
run_cmd(cmd_download_hydra,
stdout=None, stderr=None) # messages printed to screen
write('')
write('*** Compacting chromosomes into a single fasta', how='green')
fasta_regexp=f'{datadir}/ncbi_dataset/data/{accession}/*fna'
genomefile= f'{datadir}/ncbi_dataset/data/{accession}/{accession}.fasta'
index=0
with open(genomefile, 'wb') as wfd:
for index, chromfile in enumerate(glob.iglob(fasta_regexp)):
service(chromfile)
with open(chromfile,'rb') as fd:
shutil.copyfileobj(fd, wfd)
# cmd_compact_fasta=f'cat {fasta_regexp} > {genomefile}'
# run_cmd(cmd_compact_fasta)
write(f'Concatenating {index+1} chromosomes or contigs \n to genomefile: {genomefile}')
write('*** Removing chromosomes fasta files', how='green')
for chromfile in glob.iglob(fasta_regexp):
os.remove(chromfile)
if not any( [opt[k] for k in ['c', 'p', 'cf', 'pf']] ):
write('')
write('*** <No commands to be executed>', how='green')
try:
if opt['c'] or opt['cf']:
write('')
write('*** Running bash command', how='green')
#template='{genomefile} {species} {mspecies}'
bash_cmd=bash_template_command.format(**locals())
write(bash_cmd)
run_cmd(bash_cmd,
shell=opt['sh'],
stdout=None, stderr=None) # messages printed to screen, if not redicted
if opt['p'] or opt['pf']:
write('')
write('*** Running python command', how='green')
py_cmd=py_template_command.format(**locals())
write(py_cmd)
exec(py_cmd)
except Exception:
write('')
write('*** an ERROR occured !', how='red')
traceback.print_exc()
if not opt['k']:
write('')
write('*** Cleaning up all data', how='green')
write(f'removing {datadir}')
shutil.rmtree(datadir)
else:
write('')
write('*** Leaving data in place', how='green')
write(f'check {datadir}')
# creating a temporary folder with random name inside the -temp argument
# temp_folder=random_folder(opt['temp'])
# write(f'Using temporary folder={temp_folder}')
### insert your code here
##### end main program function
### function executed when program execution is over:
def close_program():
pass
# if temp_folder is not None and os.path.isdir(temp_folder):
# # deleting temporary folder
# shutil.rmtree(temp_folder)
if __name__ == "__main__":
try:
main()
close_program()
except Exception as e:
close_program()
raise e from None
| 31.799065 | 112 | 0.638648 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,874 | 0.569287 |
5ea0d191ed54273a061ed7e8a50245385460de2f | 224 | py | Python | World_1/002Usingformat.py | wesleyendliche/Python_exercises | 44cdcb921201eb0b11ff1ac4b01b4a86859c2ffe | [
"MIT"
] | null | null | null | World_1/002Usingformat.py | wesleyendliche/Python_exercises | 44cdcb921201eb0b11ff1ac4b01b4a86859c2ffe | [
"MIT"
] | null | null | null | World_1/002Usingformat.py | wesleyendliche/Python_exercises | 44cdcb921201eb0b11ff1ac4b01b4a86859c2ffe | [
"MIT"
] | null | null | null | nome = input('Digite seu nome: ')
name = input('Type your name: ')
print('É um prazer te conhecer, {}{}{}!'.format('\033[1;36m', nome, '\033[m'))
print('It is nice to meet you, {}{}{}!'.format('\033[4;30m', name, '\033[m'))
| 44.8 | 78 | 0.584821 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 145 | 0.644444 |
5ea112ef558fca9073cce1f7c0ebf8ece1f28a9a | 2,289 | py | Python | Module_06/tests/sauce_lab/test_checkout_details.py | JoseGtz/2021_python_selenium | c7b39479c78839ba2e2e2633a0f673a8b02fb4cb | [
"Unlicense"
] | null | null | null | Module_06/tests/sauce_lab/test_checkout_details.py | JoseGtz/2021_python_selenium | c7b39479c78839ba2e2e2633a0f673a8b02fb4cb | [
"Unlicense"
] | null | null | null | Module_06/tests/sauce_lab/test_checkout_details.py | JoseGtz/2021_python_selenium | c7b39479c78839ba2e2e2633a0f673a8b02fb4cb | [
"Unlicense"
] | null | null | null | """Test cases for inventory item"""
import pytest
from Module_06.src.elements.inventory_item import InventoryItem
from Module_06.src.pages.login import LoginPage
from Module_06.tests.common.test_base import TestBase
from Module_06.src.pages.checkout_details import CheckoutDetailsPage
from Module_06.src.pages.checkout_information import CheckoutInformationPage
_DEF_USER = 'standard_user'
_DEF_PASSWORD = 'secret_sauce'
class TestCheckoutDetails(TestBase):
@pytest.mark.sanity
@pytest.mark.regression
@pytest.mark.checkout_details
def test_checkout_details(self):
"""Test inventory prices"""
login = LoginPage(self.driver)
login.open()
inventory = login.login(_DEF_USER, _DEF_PASSWORD)
first_item = inventory.products[0]
first_item: InventoryItem
first_item.add_to_cart()
inventory.header.goto_cart()
checkout_item = CheckoutDetailsPage(self.driver, 5)
checkout_item.continue_shopping()
inventory.products.reload()
print(f'Total elements in cart: {inventory.header.get_total_cart_items()}')
@pytest.mark.regression
@pytest.mark.checkout_details
def test_checkout_information(self):
"""Test inventory prices"""
login = LoginPage(self.driver)
login.open()
inventory = login.login(_DEF_USER, _DEF_PASSWORD)
first_item = inventory.products[0]
first_item: InventoryItem
first_item.add_to_cart()
inventory.header.goto_cart()
checkout_item = CheckoutDetailsPage(self.driver, 5)
checkout_item.checkout_btn()
checkout_page = CheckoutInformationPage(self.driver, 5)
checkout_page.cancel_checkout()
print("Checkout Canceled")
@pytest.mark.regression
@pytest.mark.checkout_details
def test_checkout_remove(self):
"""Test inventory prices"""
login = LoginPage(self.driver)
login.open()
inventory = login.login(_DEF_USER, _DEF_PASSWORD)
first_item = inventory.products[0]
first_item: InventoryItem
first_item.add_to_cart()
inventory.header.goto_cart()
checkout_item = CheckoutDetailsPage(self.driver, 5)
checkout_item.remove_item_checkout()
print("Checkout Canceled")
| 36.333333 | 83 | 0.706859 | 1,863 | 0.813893 | 0 | 0 | 1,810 | 0.790738 | 0 | 0 | 251 | 0.109655 |
5ea1b42b0e5d60db5690353440e0919475138e12 | 2,217 | py | Python | tests/test_cuda_graph_helpers.py | hpi-epic/gpucsl | f461c47ce17105f7cf25aa65d39cb671021f07e4 | [
"MIT"
] | 5 | 2022-03-22T14:56:05.000Z | 2022-03-26T18:41:58.000Z | tests/test_cuda_graph_helpers.py | hpi-epic/gpucsl | f461c47ce17105f7cf25aa65d39cb671021f07e4 | [
"MIT"
] | 12 | 2022-03-26T10:39:04.000Z | 2022-03-31T20:43:54.000Z | tests/test_cuda_graph_helpers.py | hpi-epic/gpucsl | f461c47ce17105f7cf25aa65d39cb671021f07e4 | [
"MIT"
] | null | null | null | import cupy as cp
import numpy as np
import pandas as pd
import itertools
import math
import networkx as nx
from gpucsl.pc.kernel_management import get_module
function_names = [
"compact<6,6>",
]
module = get_module("helpers/graph_helpers.cu", function_names, ("-D", "PYTHON_TEST"))
def test_compact_on_random_skeleton():
kernel = module.get_function("compact<6,6>")
d_skeleton = cp.array(
[
[0, 1, 1, 0, 0, 1],
[1, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 1, 1],
[1, 0, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 0],
],
np.uint16,
)
expected_result = np.array(
[
[3, 1, 2, 5, 0, 0],
[3, 0, 2, 4, 0, 0],
[0, 0, 0, 0, 0, 0],
[5, 0, 1, 2, 4, 5],
[3, 0, 2, 5, 0, 0],
[3, 1, 2, 3, 0, 0],
],
np.uint32,
)
d_compacted_skeleton = cp.zeros((6, 6), np.uint32)
kernel((1,), (6,), (d_skeleton, d_compacted_skeleton, 0, 6))
assert cp.isclose(expected_result, d_compacted_skeleton).all()
def test_compact_on_fully_connected_skeleton():
kernel = module.get_function("compact<6,6>")
d_skeleton = cp.ones((6, 6), np.uint16)
expected_result = np.array(
[
[5, 1, 2, 3, 4, 5],
[5, 0, 2, 3, 4, 5],
[5, 0, 1, 3, 4, 5],
[5, 0, 1, 2, 4, 5],
[5, 0, 1, 2, 3, 5],
[5, 0, 1, 2, 3, 4],
],
np.uint32,
)
d_compacted_skeleton = cp.zeros((6, 6), np.uint32)
kernel((1,), (6,), (d_skeleton, d_compacted_skeleton, 0, 6))
assert cp.array_equal(expected_result, d_compacted_skeleton.get())
def test_compact_on_random_big_skeleton():
kernel = module.get_function("compact<6,6>")
size = 5000
d_skeleton = cp.random.choice([0, 1], size=(size, size)).astype(np.uint16)
d_compacted_skeleton = cp.zeros((6, 6), np.uint32)
print((math.ceil(size / 512),))
print((min(512, size),))
cp.cuda.profiler.start()
kernel(
(math.ceil(size / 512),),
(min(512, size),),
(d_skeleton, d_compacted_skeleton, 0, size),
)
cp.cuda.profiler.stop()
| 24.097826 | 86 | 0.519621 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 99 | 0.044655 |
5ea267b7599a251894f7be39dbaec041e355e0b1 | 4,583 | py | Python | capsul/engine/module/python.py | servoz/capsul | 2d72228c096f1c43ecfca7f3651b353dc35e209e | [
"CECILL-B"
] | 5 | 2018-10-31T10:16:53.000Z | 2022-03-11T08:11:32.000Z | capsul/engine/module/python.py | servoz/capsul | 2d72228c096f1c43ecfca7f3651b353dc35e209e | [
"CECILL-B"
] | 78 | 2018-10-30T17:04:38.000Z | 2022-03-17T17:39:41.000Z | capsul/engine/module/python.py | servoz/capsul | 2d72228c096f1c43ecfca7f3651b353dc35e209e | [
"CECILL-B"
] | 11 | 2019-01-21T10:05:08.000Z | 2022-02-22T17:02:22.000Z | # -*- coding: utf-8 -*-
''' Python configuration module for CAPSUL
This config module allows the customization of python executable and python path in process execution. It can (as every config module) assign specific config values for different environments (computing resources, typically).
Python configuration is slightly different from other config modules in the way that it cannot be handled during execution inside a python library: python executable and modules path have to be setup before starting python and loading modules. So the config here sometimes has to be prepared from client side and hard-coded in the job to run.
For this reason, what we call here "python jobs" have a special handling. "python jobs" are :class:`~capsul.process.process.Process` classes defining a :meth:`~capsul.process.process.Process._run_process` method, and not :meth:`~capsul.process.process.Process.get_commandline`. Processing are thus python functions or methods, and need the capsul library to run.
Python jobs are handled in workflow building (:mod:`capsul.pipeline.pipeline_workflow`), and jobs on engine side should not have to bother about it.
The python config module is not mandatory: if no specific configuration is needed, jobs are run using the python command from the path, following the client ``sys.executable`` short name (if the client runs ``/usr/bin/python3``, the engine will try to use ``python3`` from the ``PATH``.
The python config module is used optionally (if there is a config, it is used, otherwise no error is produced), and automatically for all jobs: no need to declare it in jobs :meth:`~capsul.process.process.Process.requirements` method.
Inside process execution, the module is otherwise handled like any other.
'''
from __future__ import absolute_import
import capsul.engine
import os
import sys
def init_settings(capsul_engine):
with capsul_engine.settings as settings:
settings.ensure_module_fields('python',
[dict(name='executable',
type='string',
description='Full path of the python executable'),
dict(name='path',
type='list_string',
description='paths to prepend to sys.path'),
])
def activate_configurations():
'''
Activate the python module from the global configurations
'''
py_conf = capsul.engine.configurations.get('capsul.engine.module.python')
if py_conf:
py_path = py_conf.get('path')
if py_path:
sys.path = py_path + [p for p in sys.path if p not in py_path]
def edition_widget(engine, environment):
''' Edition GUI for python config - see
:class:`~capsul.qt_gui.widgets.settings_editor.SettingsEditor`
'''
from soma.qt_gui.qt_backend import Qt
from soma.qt_gui.controller_widget import ScrollControllerWidget
from soma.controller import Controller
import types
import traits.api as traits
def validate_config(widget):
controller = widget.controller_widget.controller
with widget.engine.settings as session:
conf = session.config('python', widget.environment)
values = {'config_id': 'python',
'path': controller.path}
if controller.executable in (None,
traits.Undefined, ''):
values['executable'] = None
else:
values['executable'] = controller.executable
if conf is None:
session.new_config('python', widget.environment, values)
else:
for k in ('path', 'executable'):
setattr(conf, k, values[k])
controller = Controller()
controller.add_trait('executable',
traits.Str(desc='Full path of the python executable'))
controller.add_trait('path',
traits.List(traits.Directory(), [],
desc='paths to prepend to sys.path'))
conf = engine.settings.select_configurations(
environment, {'python': 'any'})
if conf:
controller.executable = conf.get(
'capsul.engine.module.python', {}).get('executable',
traits.Undefined)
controller.path = conf.get(
'capsul.engine.module.python', {}).get('path', [])
widget = ScrollControllerWidget(controller, live=True)
widget.engine = engine
widget.environment = environment
widget.accept = types.MethodType(validate_config, widget)
return widget
| 47.247423 | 362 | 0.669867 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,346 | 0.511892 |
5ea28c59393a6e808bc7631cfe435d3d54ff90cc | 3,600 | py | Python | face.py/DetectFace.py | DNSKT/python | 081e439e9e9f2e9b58ace5ff0cba93b8c46d5e36 | [
"MIT"
] | 4 | 2021-10-02T02:14:06.000Z | 2022-01-13T01:54:16.000Z | face.py/DetectFace.py | DNSKT/python | 081e439e9e9f2e9b58ace5ff0cba93b8c46d5e36 | [
"MIT"
] | null | null | null | face.py/DetectFace.py | DNSKT/python | 081e439e9e9f2e9b58ace5ff0cba93b8c46d5e36 | [
"MIT"
] | null | null | null | import asyncio
import io
import glob
import os
import sys
import time
import uuid
import requests
from urllib.parse import urlparse
from io import BytesIO
# To install this module, run:
# python -m pip install Pillow
from PIL import Image, ImageDraw
from azure.cognitiveservices.vision.face import FaceClient
from msrest.authentication import CognitiveServicesCredentials
from azure.cognitiveservices.vision.face.models import TrainingStatusType, Person
# This key will serve all examples in this document.
KEY = "650def957dcc45b080ffde1f72b8bac3"
# This endpoint will be used in all examples in this quickstart.
ENDPOINT = "https://facediscord.cognitiveservices.azure.com/"
# Create an authenticated FaceClient.
face_client = FaceClient(ENDPOINT, CognitiveServicesCredentials(KEY))
# Detect a face in an image that contains a single face
single_face_image_url = 'https://media.discordapp.net/attachments/912550999003127898/926672974092861470/IMG_20211231_221649_613.jpg'
single_image_name = os.path.basename(single_face_image_url)
# We use detection model 3 to get better performance.
detected_faces = face_client.face.detect_with_url(url=single_face_image_url, detection_model='detection_03')
if not detected_faces:
raise Exception('No face detected from image {}'.format(single_image_name))
# Display the detected face ID in the first single-face image.
# Face IDs are used for comparison to faces (their IDs) detected in other images.
print('Detected face ID from', single_image_name, ':')
for face in detected_faces: print (face.face_id)
print()
# Save this ID for use in Find Similar
first_image_face_ID = detected_faces[0].face_id
# Detect the faces in an image that contains multiple faces
# Each detected face gets assigned a new ID
multi_face_image_url = " https://cdn.discordapp.com/attachments/766489274471940106/792984874545709126/IMG_20201228_011741.jpg"
multi_image_name = os.path.basename(multi_face_image_url)
# We use detection model 3 to get better performance.
detected_faces2 = face_client.face.detect_with_url(url=multi_face_image_url, detection_model='detection_03')
# Search through faces detected in group image for the single face from first image.
# First, create a list of the face IDs found in the second image.
second_image_face_IDs = list(map(lambda x: x.face_id, detected_faces2))
# Next, find similar face IDs like the one detected in the first image.
similar_faces = face_client.face.find_similar(face_id=first_image_face_ID, face_ids=second_image_face_IDs)
if not similar_faces:
print('No similar faces found in', multi_image_name, '.')
# Print the details of the similar faces detected
else:
print('Similar faces found in', multi_image_name + ':')
for face in similar_faces:
first_image_face_ID = face.face_id
# The similar face IDs of the single face image and the group image do not need to match,
# they are only used for identification purposes in each image.
# The similar faces are matched using the Cognitive Services algorithm in find_similar().
face_info = next(x for x in detected_faces2 if x.face_id == first_image_face_ID)
if face_info:
print(' Face ID: ', first_image_face_ID)
print(' Face rectangle:')
print(' Left: ', str(face_info.face_rectangle.left))
print(' Top: ', str(face_info.face_rectangle.top))
print(' Width: ', str(face_info.face_rectangle.width))
print(' Height: ', str(face_info.face_rectangle.height)) | 48.648649 | 133 | 0.755833 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,708 | 0.474444 |
5ea2f5c49120331dea159521e5e8c35ca8b6be45 | 2,741 | py | Python | examples/tesselation.py | 2dx/moderngl | 5f932560a535469626d79d22e4205f400e18f328 | [
"MIT"
] | 916 | 2019-03-11T19:15:20.000Z | 2022-03-31T19:22:16.000Z | examples/tesselation.py | 2dx/moderngl | 5f932560a535469626d79d22e4205f400e18f328 | [
"MIT"
] | 218 | 2019-03-11T06:05:52.000Z | 2022-03-30T16:59:22.000Z | examples/tesselation.py | 2dx/moderngl | 5f932560a535469626d79d22e4205f400e18f328 | [
"MIT"
] | 110 | 2019-04-06T18:32:24.000Z | 2022-03-21T20:30:47.000Z | #!/usr/bin/env python3
'''Simple example of using tesselation to render a cubic Bézier curve'''
import numpy as np
import moderngl
from ported._example import Example
class Tessellation(Example):
title = "Tessellation"
gl_version = (4, 0)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.prog = self.ctx.program(
vertex_shader='''
#version 400 core
in vec2 in_pos;
void main() { gl_Position = vec4(in_pos, 0.0, 1.0); }
''',
tess_control_shader='''
#version 400 core
layout(vertices = 4) out;
void main() {
// set tesselation levels, TODO compute dynamically
gl_TessLevelOuter[0] = 1;
gl_TessLevelOuter[1] = 32;
// pass through vertex positions
gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
}
''',
tess_evaluation_shader='''
#version 400 core
layout(isolines, fractional_even_spacing, ccw) in;
// compute a point on a bezier curve with the points p0, p1, p2, p3
// the parameter u is in [0, 1] and determines the position on the curve
vec3 bezier(float u, vec3 p0, vec3 p1, vec3 p2, vec3 p3) {
float B0 = (1.0 - u) * (1.0 - u) * (1.0 - u);
float B1 = 3.0 * (1.0 - u) * (1.0 - u) * u;
float B2 = 3.0 * (1.0 - u) * u * u;
float B3 = u * u * u;
return B0 * p0 + B1 * p1 + B2 * p2 + B3 * p3;
}
void main() {
float u = gl_TessCoord.x;
vec3 p0 = vec3(gl_in[0].gl_Position);
vec3 p1 = vec3(gl_in[1].gl_Position);
vec3 p2 = vec3(gl_in[2].gl_Position);
vec3 p3 = vec3(gl_in[3].gl_Position);
gl_Position = vec4(bezier(u, p0, p1, p2, p3), 1.0);
}
''',
fragment_shader='''
#version 400 core
out vec4 frag_color;
void main() { frag_color = vec4(1.0); }
'''
)
# four vertices define a cubic Bézier curve; has to match the shaders
self.ctx.patch_vertices = 4
self.ctx.line_width = 5.0
vertices = np.array([
[-1.0, 0.0],
[-0.5, 1.0],
[0.5, -1.0],
[1.0, 0.0],
])
vbo = self.ctx.buffer(vertices.astype('f4'))
self.vao = self.ctx.simple_vertex_array(self.prog, vbo, 'in_pos')
def render(self, time, frame_time):
self.ctx.clear(0.2, 0.4, 0.7)
self.vao.render(mode=moderngl.PATCHES)
if __name__ == '__main__':
Tessellation.run()
| 28.552083 | 85 | 0.511127 | 2,518 | 0.917973 | 0 | 0 | 0 | 0 | 0 | 0 | 1,848 | 0.673715 |
5ea4c56b19379cf79c7f01fa78cbab208ced0a25 | 326 | py | Python | setup.py | rahul0705/smv | 01a4545be55f9355c8cc1d28918a8ab24d4a02e1 | [
"MIT"
] | null | null | null | setup.py | rahul0705/smv | 01a4545be55f9355c8cc1d28918a8ab24d4a02e1 | [
"MIT"
] | null | null | null | setup.py | rahul0705/smv | 01a4545be55f9355c8cc1d28918a8ab24d4a02e1 | [
"MIT"
] | null | null | null | """
author: Rahul Mohandas
"""
import setuptools
setuptools.setup(
name="smv",
version="0.1",
packages=setuptools.find_packages(exclude=["tests"]),
author="Rahul Mohandas",
author_email="rahul@rahulmohandas.com",
description="It's like scp but for moving",
license="MIT",
test_suite="tests"
)
| 20.375 | 57 | 0.671779 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 130 | 0.398773 |
5ea4f8d3a7d608af36cb9f970a681dca5774ff4b | 905 | py | Python | py/ssm_report/charts.py | acorg/ssm-report | bddc33cb167f6ca766c56747529828807bebd803 | [
"MIT"
] | null | null | null | py/ssm_report/charts.py | acorg/ssm-report | bddc33cb167f6ca766c56747529828807bebd803 | [
"MIT"
] | null | null | null | py/ssm_report/charts.py | acorg/ssm-report | bddc33cb167f6ca766c56747529828807bebd803 | [
"MIT"
] | null | null | null | import logging; module_logger = logging.getLogger(__name__)
from pathlib import Path
# ----------------------------------------------------------------------
def get_chart(virus_type, assay, lab, infix="", chart_dir=Path("merges")):
if virus_type in ["bvic", "byam"]:
vt = virus_type[:2] # virus_type[0] + "-" + virus_type[1:]
# elif virus_type in ["h1"]:
# vt = "h1pdm"
else:
vt = virus_type
chart_filename = chart_dir.joinpath(f"{lab.lower()}-{vt}-{assay.lower()}{infix}.ace")
if not chart_filename.exists():
raise RuntimeError(f"{chart_filename} not found")
return chart_filename # do not .resolve(), better to use symlink to avoid regenerating .sh scripts when changing charts
# ======================================================================
### Local Variables:
### eval: (if (fboundp 'eu-rename-buffer) (eu-rename-buffer))
### End:
| 41.136364 | 123 | 0.554696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 513 | 0.566851 |
5ea524e0a2aaf6646e0647b7e3b3bb487ad9b0d6 | 14,650 | py | Python | sciunit/base.py | russelljjarvis/sciun | f8f6ede84299dc700afe94b07ae4e98f87a19116 | [
"MIT"
] | 1 | 2020-05-28T00:35:23.000Z | 2020-05-28T00:35:23.000Z | sciunit/base.py | ChihweiLHBird/sciunit | f5669d165fa505c3a17ac17af3d3c78aafd44ae2 | [
"MIT"
] | 1 | 2020-12-29T04:28:57.000Z | 2020-12-29T04:28:57.000Z | sciunit/base.py | russelljjarvis/sciunit | f8f6ede84299dc700afe94b07ae4e98f87a19116 | [
"MIT"
] | null | null | null | """The base class for many SciUnit objects."""
import sys
PLATFORM = sys.platform
PYTHON_MAJOR_VERSION = sys.version_info.major
if PYTHON_MAJOR_VERSION < 3: # Python 2
raise Exception('Only Python 3 is supported')
import json, git, pickle, hashlib
import numpy as np
import pandas as pd
from pathlib import Path
from git.exc import GitCommandError, InvalidGitRepositoryError
from git.cmd import Git
from git.remote import Remote
from git.repo.base import Repo
from typing import Dict, List, Optional, Tuple, Union, Any
from io import StringIO
try:
import tkinter
except ImportError:
tkinter = None
KERNEL = ('ipykernel' in sys.modules)
HERE = Path(__file__).resolve().parent.name
class Versioned(object):
"""A Mixin class for SciUnit objects.
Provides a version string based on the Git repository where the model
is tracked. Provided in part by Andrew Davison in issue #53.
"""
def get_repo(self, cached: bool=True) -> Repo:
"""Get a git repository object for this instance.
Args:
cached (bool, optional): Whether to use cached data. Defaults to True.
Returns:
Repo: The git repo for this instance.
"""
module = sys.modules[self.__module__]
# We use module.__file__ instead of module.__path__[0]
# to include modules without a __path__ attribute.
if hasattr(self.__class__, '_repo') and cached:
repo = self.__class__._repo
elif hasattr(module, '__file__'):
path = Path(module.__file__).resolve()
try:
repo = git.Repo(path, search_parent_directories=True)
except InvalidGitRepositoryError:
repo = None
else:
repo = None
self.__class__._repo = repo
return repo
def get_version(self, cached: bool=True) -> str:
"""Get a git version (i.e. a git commit hash) for this instance.
Args:
cached (bool, optional): Whether to use the cached data. Defaults to True.
Returns:
str: The git version for this instance.
"""
if cached and hasattr(self.__class__, '_version'):
version = self.__class__._version
else:
repo = self.get_repo()
if repo is not None:
head = repo.head
version = head.commit.hexsha
if repo.is_dirty():
version += "*"
else:
version = None
self.__class__._version = version
return version
version = property(get_version)
def get_remote(self, remote: str='origin') -> Remote:
"""Get a git remote object for this instance.
Args:
remote (str, optional): The remote Git repo. Defaults to 'origin'.
Returns:
Remote: The git remote object for this instance.
"""
repo = self.get_repo()
if repo is not None:
remotes = {r.name: r for r in repo.remotes}
r = repo.remotes[0] if remote not in remotes else remotes[remote]
else:
r = None
return r
def get_remote_url(self, remote: str='origin', cached: bool=True) -> str:
"""Get a git remote URL for this instance.
Args:
remote (str, optional): The remote Git repo. Defaults to 'origin'.
cached (bool, optional): Whether to use cached data. Defaults to True.
Raises:
ex: A Git command error.
Returns:
str: The git remote URL for this instance.
"""
if hasattr(self.__class__, '_remote_url') and cached:
url = self.__class__._remote_url
else:
r = self.get_remote(remote)
try:
url = list(r.urls)[0]
except GitCommandError as ex:
if 'correct access rights' in str(ex):
# If ssh is not setup to access this repository
cmd = ['git', 'config', '--get', 'remote.%s.url' % r.name]
url = Git().execute(cmd)
else:
raise ex
except AttributeError:
url = None
if url is not None and url.startswith('git@'):
domain = url.split('@')[1].split(':')[0]
path = url.split(':')[1]
url = "http://%s/%s" % (domain, path)
self.__class__._remote_url = url
return url
remote_url = property(get_remote_url)
class SciUnit(Versioned):
"""Abstract base class for models, tests, and scores."""
def __init__(self):
"""Instantiate a SciUnit object."""
self.unpicklable = []
#: A list of attributes that cannot or should not be pickled.
unpicklable = []
#: A URL where the code for this object can be found.
_url = None
#: A verbosity level for printing information.
verbose = 1
def __getstate__(self) -> dict:
"""Copy the object's state from self.__dict__.
Contains all of the instance attributes. Always uses the dict.copy()
method to avoid modifying the original state.
Returns:
dict: The state of this instance.
"""
state = self.__dict__.copy()
# Remove the unpicklable entries.
if hasattr(self, 'unpicklable'):
for key in set(self.unpicklable).intersection(state):
del state[key]
return state
def _state(self, state: dict=None, keys: list=None,
exclude: List[str]=None) -> dict:
"""Get the state of the instance.
Args:
state (dict, optional): The dict instance that contains a part of state info of this instance.
Defaults to None.
keys (list, optional): Some keys of `state`. Values in `state` associated with these keys will be kept
and others will be discarded. Defaults to None.
exclude (List[str], optional): The list of keys. Values in `state` that associated with these keys
will be removed from `state`. Defaults to None.
Returns:
dict: The state of the current instance.
"""
if state is None:
state = self.__getstate__()
if keys:
state = {key: state[key] for key in keys if key in state.keys()}
if exclude:
state = {key: state[key] for key in state.keys()
if key not in exclude}
state = deep_exclude(state, exclude)
return state
def _properties(self, keys: list=None, exclude: list=None) -> dict:
"""Get the properties of the instance.
Args:
keys (list, optional): If not None, only the properties that are in `keys` will be included in
the return data. Defaults to None.
exclude (list, optional): The list of properties that will not be included in return data. Defaults to None.
Returns:
dict: The dict of properties of the instance.
"""
result = {}
props = self.raw_props()
exclude = exclude if exclude else []
exclude += ['state', 'id']
for prop in set(props).difference(exclude):
if prop == 'properties':
pass # Avoid infinite recursion
elif not keys or prop in keys:
result[prop] = getattr(self, prop)
return result
def raw_props(self) -> list:
"""Get the raw properties of the instance.
Returns:
list: The list of raw properties.
"""
class_attrs = dir(self.__class__)
return [p for p in class_attrs
if isinstance(getattr(self.__class__, p, None), property)]
@property
def state(self) -> dict:
"""Get the state of the instance.
Returns:
dict: The state of the instance.
"""
return self._state()
@property
def properties(self) -> dict:
"""Get the properties of the instance.
Returns:
dict: The properties of the instance.
"""
return self._properties()
@classmethod
def dict_hash(cls, d: dict) -> str:
"""SHA224 encoded value of `d`.
Args:
d (dict): The dict instance to be SHA224 encoded.
Returns:
str: SHA224 encoded value of `d`.
"""
od = [(key, d[key]) for key in sorted(d)]
try:
s = pickle.dumps(od)
except AttributeError:
s = json.dumps(od, cls=SciUnitEncoder).encode('utf-8')
return hashlib.sha224(s).hexdigest()
@property
def hash(self) -> str:
"""A unique numeric identifier of the current model state.
Returns:
str: The unique numeric identifier of the current model state.
"""
return self.dict_hash(self.state)
def json(self, add_props: bool=False, keys: list=None, exclude: list=None, string: bool=True,
indent: None=None) -> str:
"""Generate a Json format encoded sciunit instance.
Args:
add_props (bool, optional): Whether to add additional properties of the object to the serialization. Defaults to False.
keys (list, optional): Only the keys in `keys` will be included in the json content. Defaults to None.
exclude (list, optional): The keys in `exclude` will be excluded from the json content. Defaults to None.
string (bool, optional): The json content will be `str` type if True, `dict` type otherwise. Defaults to True.
indent (None, optional): If indent is a non-negative integer or string, then JSON array elements and object members
will be pretty-printed with that indent level. An indent level of 0, negative, or "" will only
insert newlines. None (the default) selects the most compact representation. Using a positive integer
indent indents that many spaces per level. If indent is a string (such as "\t"), that string is
used to indent each level (source: https://docs.python.org/3/library/json.html#json.dump).
Defaults to None.
Returns:
str: The Json format encoded sciunit instance.
"""
result = json.dumps(self, cls=SciUnitEncoder,
add_props=add_props, keys=keys, exclude=exclude,
indent=indent)
if not string:
result = json.loads(result)
return result
@property
def _id(self) -> Any:
return id(self)
@property
def _class(self) -> dict:
url = '' if self.url is None else self.url
import_path = '{}.{}'.format(
self.__class__.__module__,
self.__class__.__name__
)
return {'name': self.__class__.__name__,
'import_path': import_path,
'url': url}
@property
def id(self) -> str:
return str(self.json)
@property
def url(self) -> str:
return self._url if self._url else self.remote_url
class SciUnitEncoder(json.JSONEncoder):
"""Custom JSON encoder for SciUnit objects."""
def __init__(self, *args, **kwargs):
for key in ['add_props', 'keys', 'exclude']:
if key in kwargs:
setattr(self.__class__, key, kwargs[key])
kwargs.pop(key)
super(SciUnitEncoder, self).__init__(*args, **kwargs)
def default(self, obj: Any) -> Union[str, dict, list]:
"""Try to encode the object.
Args:
obj (Any): Any object to be encoded
Raises:
e: Could not JSON serialize the object.
Returns:
Union[str, dict, list]: Encoded object.
"""
try:
if isinstance(obj, pd.DataFrame):
o = obj.to_dict(orient='split')
if isinstance(obj, SciUnit):
for old, new in [('data', 'scores'),
('columns', 'tests'),
('index', 'models')]:
o[new] = o.pop(old)
elif isinstance(obj, np.ndarray) and len(obj.shape):
o = obj.tolist()
elif isinstance(obj, SciUnit):
state = obj.state
if self.add_props:
state.update(obj.properties)
o = obj._state(state=state, keys=self.keys,
exclude=self.exclude)
elif isinstance(obj, (dict, list, tuple, str, type(None), bool,
float, int)):
o = json.JSONEncoder.default(self, obj)
else: # Something we don't know how to serialize;
# just represent it as truncated string
o = "%.20s..." % obj
except Exception as e:
print("Could not JSON encode object %s" % obj)
raise e
return o
class TestWeighted(object):
"""Base class for objects with test weights."""
@property
def weights(self) -> List[float]:
"""Returns a normalized list of test weights.
Returns:
List[float]: The normalized list of test weights.
"""
n = len(self.tests)
if self.weights_:
assert all([x >= 0 for x in self.weights_]),\
"All test weights must be >=0"
summ = sum(self.weights_) # Sum of test weights
assert summ > 0, "Sum of test weights must be > 0"
weights = [x/summ for x in self.weights_] # Normalize to sum
else:
weights = [1.0/n for i in range(n)]
return weights
def deep_exclude(state: dict, exclude: list) -> dict:
"""[summary]
Args:
state (dict): A dict that represents the state of an instance.
exclude (list): Attributes that will be marked as 'removed'
Returns:
dict: [description]
"""
tuples = [key for key in exclude if isinstance(key, tuple)]
s = state
for loc in tuples:
for key in loc:
try:
s[key]
except Exception:
pass
else:
if key == loc[-1]:
s[key] = '*removed*'
else:
s = s[key]
return state
| 34.551887 | 138 | 0.553925 | 13,287 | 0.906962 | 0 | 0 | 2,320 | 0.158362 | 0 | 0 | 6,465 | 0.441297 |
5ea72399ea15c71aa2c24c3a7b572b15a149a2ff | 741 | py | Python | arpym/tools/regularized_payoff.py | dpopadic/arpmRes | ddcc4de713b46e3e9dcb77cc08c502ce4df54f76 | [
"MIT"
] | 6 | 2021-04-10T13:24:30.000Z | 2022-03-26T08:20:42.000Z | arpym/tools/regularized_payoff.py | dpopadic/arpmRes | ddcc4de713b46e3e9dcb77cc08c502ce4df54f76 | [
"MIT"
] | null | null | null | arpym/tools/regularized_payoff.py | dpopadic/arpmRes | ddcc4de713b46e3e9dcb77cc08c502ce4df54f76 | [
"MIT"
] | 6 | 2019-08-13T22:02:17.000Z | 2022-02-09T17:49:12.000Z | # -*- coding: utf-8 -*-"
from numpy import pi, exp, sqrt
from scipy.special import erf
def regularized_payoff(x, k_strk, h, method):
"""For details, see here:
Parameters
----------
x : array, shape (j_,)
k : scalar
method: string
h: scalar
Returns
-------
v_h : array, shape (j_,)
"""
# Step 1: Compute the payoff
if method == "call":
v_h = h / sqrt(2 * pi) * exp(-(k_strk - x)**2 / (2 * h**2)) \
+ (x - k_strk) / 2 * (1 + erf((x - k_strk) / sqrt(2 * h**2)))
elif method == "put":
v_h = h / sqrt(2 * pi) * exp(-(x - k_strk)**2 / (2 * h**2)) \
- (x - k_strk)/2 * (1 - erf((x - k_strk)/sqrt(2 * h**2)))
return v_h
| 22.454545 | 73 | 0.45749 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 277 | 0.373819 |
5eab19f2aca9d090175ce4ce170870b18995458b | 623 | py | Python | functions/solution/functions_numstr_human.py | giserh/book-python | ebd4e70cea1dd56986aa8efbae3629ba3f1ba087 | [
"MIT"
] | 1 | 2019-01-02T15:04:08.000Z | 2019-01-02T15:04:08.000Z | functions/solution/functions_numstr_human.py | giserh/book-python | ebd4e70cea1dd56986aa8efbae3629ba3f1ba087 | [
"MIT"
] | null | null | null | functions/solution/functions_numstr_human.py | giserh/book-python | ebd4e70cea1dd56986aa8efbae3629ba3f1ba087 | [
"MIT"
] | null | null | null | from typing import Union
NUMBER_DICT = {
'0': 'zero',
'1': 'one',
'2': 'two',
'3': 'three',
'4': 'four',
'5': 'five',
'6': 'six',
'7': 'seven',
'8': 'eight',
'9': 'nine',
'.': 'and',
}
def number_to_str(number: Union[int, float]) -> str:
"""
>>> number_to_str(1969)
'one thousand nine hundred sixty nine'
>>> number_to_str(31337)
'thirty one thousand three hundred thirty seven'
>>> number_to_str(13.37)
'thirteen and thirty seven hundredths'
>>> number_to_str(31.337)
'thirty one three hundreds thirty seven thousands'
"""
pass
| 18.878788 | 54 | 0.548957 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 422 | 0.677368 |
5eacda4c41904fde53e82605bee0dcf17f4ef068 | 5,343 | py | Python | cJobObject.py | SkyLined/mWindowsAPI | d64d57bbf87d2a7b33cf7de89263553793484a84 | [
"CC-BY-4.0"
] | 7 | 2017-10-09T14:32:22.000Z | 2021-01-30T07:25:50.000Z | cJobObject.py | SkyLined/mWindowsAPI | d64d57bbf87d2a7b33cf7de89263553793484a84 | [
"CC-BY-4.0"
] | 2 | 2017-12-12T02:53:18.000Z | 2019-02-19T09:23:18.000Z | cJobObject.py | SkyLined/mWindowsAPI | d64d57bbf87d2a7b33cf7de89263553793484a84 | [
"CC-BY-4.0"
] | 1 | 2017-12-12T02:42:18.000Z | 2017-12-12T02:42:18.000Z | from mWindowsSDK import *;
from .fbIsValidHandle import fbIsValidHandle;
from .fbLastErrorIs import fbLastErrorIs;
from .fohOpenForProcessIdAndDesiredAccess import fohOpenForProcessIdAndDesiredAccess;
from .fsGetPythonISA import fsGetPythonISA;
from .fThrowLastError import fThrowLastError;
from .oSystemInfo import oSystemInfo;
JOBOBJECT_EXTENDED_LIMIT_INFORMATION = {
"x86": JOBOBJECT_EXTENDED_LIMIT_INFORMATION32,
"x64": JOBOBJECT_EXTENDED_LIMIT_INFORMATION64,
}[fsGetPythonISA()];
class cJobObject(object):
def __init__(oSelf, *auProcessIds):
oKernel32 = foLoadKernel32DLL();
oSelf.__ohJob = oKernel32.CreateJobObjectW(NULL, NULL);
if not fbIsValidHandle(oSelf.__ohJob):
fThrowLastError("CreateJobObject(NULL, NULL)");
for uProcessId in auProcessIds:
assert oSelf.fbAddProcessForId(uProcessId, bThrowAllErrors = True), \
"Yeah, well, you know, that's just like ehh.. your opinion, man.";
def fbAddProcessForId(oSelf, uProcessId, bThrowAllErrors = False):
ohProcess = fohOpenForProcessIdAndDesiredAccess(uProcessId, PROCESS_SET_QUOTA | PROCESS_TERMINATE);
oKernel32 = foLoadKernel32DLL();
try:
if oKernel32.AssignProcessToJobObject(oSelf.__ohJob, ohProcess):
return True;
if bThrowAllErrors or not fbLastErrorIs(ERROR_ACCESS_DENIED):
fThrowLastError("AssignProcessToJobObject(%s, %s)" % (repr(oSelf.__ohJob), repr(ohProcess)));
finally:
if not oKernel32.CloseHandle(ohProcess):
fThrowLastError("CloseHandle(%s)" % (repr(ohProcess),));
# We cannot add the process to the job, but maybe it is already added?
ohProcess = fohOpenForProcessIdAndDesiredAccess(uProcessId, PROCESS_QUERY_LIMITED_INFORMATION);
try:
obProcessInJob = BOOLEAN();
if not oKernel32.IsProcessInJob(ohProcess, oSelf.__ohJob, obProcessInJob.foCreatePointer()):
fThrowLastError("IsProcessInJob(0x%X, ..., ...)" % (ohProcess,));
return obProcessInJob != 0;
finally:
if not oKernel32.CloseHandle(ohProcess):
fThrowLastError("CloseHandle(0x%X)" % (ohProcess,));
def __foQueryExtendedLimitInformation(oSelf):
oExtendedLimitInformation = JOBOBJECT_EXTENDED_LIMIT_INFORMATION();
odwReturnLength = DWORD();
oKernel32 = foLoadKernel32DLL();
if not oKernel32.QueryInformationJobObject(
oSelf.__ohJob, # hJob
JobObjectExtendedLimitInformation, # JobObjectInfoClass
LPVOID(oExtendedLimitInformation, bCast = True), # lpJobObjectInfo
oExtendedLimitInformation.fuGetSize(), # cbJobObjectInfoLength,
odwReturnLength.foCreatePointer(), # lpReturnLength
):
fThrowLastError("QueryInformationJobObject(hJob=%s, JobObjectInfoClass=0x%X, lpJobObjectInfo=0x%X, cbJobObjectInfoLength=0x%X, lpReturnLength=0x%X)" % (
repr(oSelf.__ohJob),
JobObjectExtendedLimitInformation,
oExtendedLimitInformation.fuGetAddress(),
oExtendedLimitInformation.fuGetSize(),
odwReturnLength.fuGetAddress()
));
assert odwReturnLength == oExtendedLimitInformation.fuGetSize(), \
"QueryInformationJobObject(hJob=%s, JobObjectInfoClass=0x%X, lpJobObjectInfo=0x%X, cbJobObjectInfoLength=0x%X, lpReturnLength=0x%X) => wrote 0x%X bytes" % (
repr(oSelf.__ohJob),
JobObjectExtendedLimitInformation,
oExtendedLimitInformation.fuGetAddress(),
oExtendedLimitInformation.fuGetSize(),
odwReturnLength.fuGetAddress(),
odwReturnLength.fuGetValue()
);
return oExtendedLimitInformation;
def __fSetExtendedLimitInformation(oSelf, oExtendedLimitInformation):
oKernel32 = foLoadKernel32DLL();
if not oKernel32.SetInformationJobObject(
oSelf.__ohJob, # hJob
JobObjectExtendedLimitInformation, # JobObjectInfoClass
LPVOID(oExtendedLimitInformation, bCast = True), # lpJobObjectInfo
oExtendedLimitInformation.fuGetSize(), # cbJobObjectInfoLength,
):
fThrowLastError("SetInformationJobObject(hJob=0x%X, JobObjectInfoClass=0x%X, lpJobObjectInfo=0x%X, cbJobObjectInfoLength=0x%X)" % \
(oSelf.__ohJob, JobObjectExtendedLimitInformation, oExtendedLimitInformation.fuGetAddress(),
oExtendedLimitInformation.fuGetSize()));
def fSetMaxProcessMemoryUse(oSelf, uMemoryUseInBytes):
oExtendedLimitInformation = oSelf.__foQueryExtendedLimitInformation();
oExtendedLimitInformation.ProcessMemoryLimit = int(uMemoryUseInBytes);
oExtendedLimitInformation.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PROCESS_MEMORY;
oSelf.__fSetExtendedLimitInformation(oExtendedLimitInformation);
def fSetMaxTotalMemoryUse(oSelf, uMemoryUseInBytes):
oExtendedLimitInformation = oSelf.__foQueryExtendedLimitInformation();
oExtendedLimitInformation.JobMemoryLimit = int(uMemoryUseInBytes);
oExtendedLimitInformation.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_JOB_MEMORY;
oSelf.__fSetExtendedLimitInformation(oExtendedLimitInformation);
def fuGetMaxProcessMemoryUse(oSelf):
oExtendedLimitInformation = oSelf.__foQueryExtendedLimitInformation();
return int(oExtendedLimitInformation.PeakProcessMemoryUsed);
def fuGetMaxTotalMemoryUse(oSelf):
oExtendedLimitInformation = oSelf.__foQueryExtendedLimitInformation();
return int(oExtendedLimitInformation.PeakJobMemoryUsed);
| 49.934579 | 164 | 0.763242 | 4,850 | 0.90773 | 0 | 0 | 0 | 0 | 0 | 0 | 821 | 0.153659 |
5ead65667024bc75288d96cebf09e19da2212ee1 | 325 | py | Python | pyatmos/class_atmos.py | lcx366/ATMOS | e079601ea4704d26defd7447ceed0b8be1f89e2f | [
"MIT"
] | 11 | 2019-12-12T01:15:22.000Z | 2022-03-11T02:38:10.000Z | pyatmos/class_atmos.py | lcx366/ATMOS | e079601ea4704d26defd7447ceed0b8be1f89e2f | [
"MIT"
] | 2 | 2021-06-19T23:41:18.000Z | 2022-03-24T22:59:11.000Z | pyatmos/class_atmos.py | lcx366/ATMOS | e079601ea4704d26defd7447ceed0b8be1f89e2f | [
"MIT"
] | null | null | null | class ATMOS(object):
'''
class ATMOS
- attributes:
- self defined
- methods:
- None
'''
def __init__(self,info):
self.info = info
for key in info.keys():
setattr(self, key, info[key])
def __repr__(self):
return 'Instance of class ATMOS' | 19.117647 | 43 | 0.513846 | 324 | 0.996923 | 0 | 0 | 0 | 0 | 0 | 0 | 124 | 0.381538 |
5ead8819552a60d1c0e97dd3a85e065b78457769 | 9,746 | py | Python | core/views/directions.py | Egorka96/med-org-portal | 3f55fb59daea03684b3bc6b5c394cd06cfd6e2df | [
"MIT"
] | 2 | 2020-03-02T17:29:14.000Z | 2020-05-28T13:19:49.000Z | core/views/directions.py | Egorka96/med-org-portal | 3f55fb59daea03684b3bc6b5c394cd06cfd6e2df | [
"MIT"
] | 88 | 2020-02-17T09:46:57.000Z | 2022-03-12T00:24:32.000Z | core/views/directions.py | Egorka96/med-org-portal | 3f55fb59daea03684b3bc6b5c394cd06cfd6e2df | [
"MIT"
] | 1 | 2020-04-17T15:56:51.000Z | 2020-04-17T15:56:51.000Z | import dataclasses
import datetime
import json
import os
import tempfile
from core.excel.directions import DirectionsExcel
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.files import File
from django.shortcuts import redirect
from django.urls import reverse, reverse_lazy
from django.views.generic.base import View
from docx.shared import Mm
from mis.direction import Direction
from mis.org import Org
from mis.service_client import Mis
from core.datatools.report import get_report_period
import core.generic.mixins
import core.generic.views
import core.datatools.barcode
from core import forms, models
class Search(PermissionRequiredMixin, core.generic.mixins.FormMixin, core.generic.mixins.RestListMixin,
core.generic.views.ListView):
form_class = forms.DirectionSearch
title = 'Направления'
permission_required = 'core.view_direction'
paginate_by = 50
excel_workbook_maker = DirectionsExcel
template_name = settings.TEMPLATES_DICT.get("direction_list")
mis_request_path = Mis.DIRECTIONS_LIST_URL
def get_workbook_maker_kwargs(self, **kwargs):
kwargs = super().get_workbook_maker_kwargs(**kwargs)
user_orgs = self.request.user.core.get_orgs()
kwargs['show_orgs'] = False if user_orgs and len(user_orgs) < 2 else True
kwargs['mis_request_path'] = self.mis_request_path
kwargs['filter_params'] = self.get_filter_params()
return kwargs
def get_excel_title(self):
title = self.get_title()
form = self.get_form()
if form.is_valid():
title += get_report_period(
date_from=form.cleaned_data.get('date_from'),
date_to=form.cleaned_data.get('date_to')
)
if orgs := form.cleaned_data.get('orgs'):
title += f'. Организации: {", ".join(str(org) for org in orgs)}'
return title
def get_initial(self):
initial = super().get_initial()
initial['date_from'] = datetime.date.today()
return initial
def get_filter_params(self):
form = self.get_form()
if form.is_valid():
filter_params = form.cleaned_data
else:
filter_params = self.get_initial()
return filter_params
def process_response_results(self, objects):
return [Direction.dict_to_obj(obj) for obj in objects]
class Edit(PermissionRequiredMixin, core.generic.views.EditView):
template_name = 'core/directions/edit.html'
form_class = forms.DirectionEdit
data_method = 'post'
pk_url_kwarg = 'number'
def has_permission(self):
perms = self.get_permission_required()
return any([self.request.user.has_perm(perm) for perm in perms])
def get_permission_required(self):
permission_required = [self.get_edit_permission()]
if self.request.method == 'GET' and self.kwargs.get(self.pk_url_kwarg):
permission_required.append('core.view_direction')
return permission_required
def get_edit_permission(self):
if self.kwargs.get(self.pk_url_kwarg):
return 'core.change_direction'
else:
return 'core.add_direction'
def get_success_url(self):
return reverse_lazy('core:direction_list')
def get_initial(self):
initial = super().get_initial()
obj = self.get_object()
if obj:
initial.update(dataclasses.asdict(obj))
if initial.get('org'):
initial['org'] = initial['org']['id']
if initial.get('law_items'):
for l_i in initial['law_items']:
field_name = f'law_items_{l_i["law"]["name"].replace("н", "")}'
if l_i["law"]["name"] == '302н':
field_name += f'_section_{l_i["section"]}'
initial.setdefault(field_name, []).append(l_i["id"])
if initial.get('pay_method'):
initial['pay_method'] = initial['pay_method']['id']
if initial.get('insurance_policy'):
initial['insurance_number'] = initial['insurance_policy']['number']
return initial
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['current_user'] = self.request.user
return kwargs
def get_object(self):
object_pk = self.kwargs.get(self.pk_url_kwarg)
if object_pk:
self.object = Direction.get(direction_id=object_pk)
return self.object
def post(self, request, *args, **kwargs):
obj = self.get_object()
if obj and obj.confirm_date:
messages.error(self.request, 'Редактирование направления запрещено: '
'по нему уже создана заявка на осмотр в медицинской информационной системе')
return super().get(request, *args, **kwargs)
return super().post(request, *args, **kwargs)
def form_valid(self, form):
if self.kwargs.get(self.pk_url_kwarg):
success, description = Direction.edit(direction_id=self.kwargs[self.pk_url_kwarg], params=form.cleaned_data)
else:
success, description = Direction.create(params=form.cleaned_data)
if success:
messages.success(self.request, description)
else:
messages.error(self.request, description)
return self.form_invalid(form)
return redirect(self.get_success_url())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['can_edit'] = True
if not self.request.user.has_perm(self.get_edit_permission()):
context['can_edit'] = False
obj = self.get_object()
if obj and obj.confirm_date:
context['can_edit'] = False
messages.warning(self.request, 'Редактирование направления запрещено: '
'по нему уже создана заявка на осмотр в медицинской информационной системе')
return context
class Delete(PermissionRequiredMixin, core.generic.views.DeleteView):
success_url = reverse_lazy('core:direction_list')
breadcrumb = 'Удалить'
permission_required = 'core.delete_direction'
pk_url_kwarg = 'number'
def get_object(self, *args, **kwargs):
if self.object is None:
object_pk = self.kwargs.get(self.pk_url_kwarg)
self.object = Direction.get(direction_id=object_pk)
return self.object
def get_breadcrumbs(self):
direction = self.get_object()
return [
('Главная', reverse('core:index')),
('Направления', reverse('core:direction_list')),
(direction, reverse('core:direction_edit', kwargs={'number': direction.number})),
(self.breadcrumb, ''),
]
def delete(self, *args, **kwargs):
success, description = Direction.delete(direction_id=self.kwargs.get(self.pk_url_kwarg))
if success:
messages.success(self.request, description)
else:
messages.error(self.request, description)
return self.render_to_response(self.get_context_data())
return redirect(self.success_url)
class Print(PermissionRequiredMixin, core.generic.mixins.DocxMixin, View):
permission_required = 'core.view_direction'
print_message = 'Печать направления'
pk_url_kwarg = 'number'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.object = None
def get_file_name(self):
return str(self.get_object())
def get_print_template(self):
obj = self.get_object()
docx_template_file = None
docx_templates = models.DirectionDocxTemplate.objects.exclude(org_ids='')
for template in docx_templates:
if obj.org.id in json.loads(template.org_ids):
docx_template_file = template.file.path
break
if not docx_template_file:
docx_template = models.DirectionDocxTemplate.objects.filter(org_ids='').first()
if not docx_template:
docx_template = models.DirectionDocxTemplate.objects.create(
name='Основной шаблон',
)
with open(os.path.join(settings.BASE_DIR, 'core/templates/core/directions/print.docx'), 'rb') as f:
docx_template.file.save(
name='direction_print.docx',
content=File(f)
)
docx_template_file = docx_template.file.path
return docx_template_file
def get_object(self, *args, **kwargs):
if self.object is None:
object_pk = self.kwargs.get(self.pk_url_kwarg)
self.object = Direction.get(direction_id=object_pk)
return self.object
def get_print_context_data(self, **kwargs):
context = super().get_print_context_data(**kwargs)
context['object'] = self.get_object()
context['user'] = self.request.user
if context['object'].org:
context['org'] = Org.get(self.object.org.id)
# добавим штрих-код заявки
direction_barcode_path = core.datatools.barcode.create_jpg(
context['object'].number,
tmp_dir=tempfile.mkdtemp(dir=settings.DIR_FOR_TMP_FILES),
module_height=5,
write_text=False
)
context['images'] = {
'direction_barcode': core.generic.mixins.DocxImage(
direction_barcode_path, width=Mm(40), height=Mm(15)
)
}
return context
| 35.184116 | 120 | 0.635543 | 9,320 | 0.928194 | 0 | 0 | 0 | 0 | 0 | 0 | 1,502 | 0.149587 |
5eae8ac7a005dd1b0f71911185c7c5a0a7158078 | 361 | py | Python | problem/10000~19999/17206/17206.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 1 | 2019-04-19T16:37:44.000Z | 2019-04-19T16:37:44.000Z | problem/10000~19999/17206/17206.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 1 | 2019-04-20T11:42:44.000Z | 2019-04-20T11:42:44.000Z | problem/10000~19999/17206/17206.py3.py | njw1204/BOJ-AC | 1de41685725ae4657a7ff94e413febd97a888567 | [
"MIT"
] | 3 | 2019-04-19T16:37:47.000Z | 2021-10-25T00:45:00.000Z | t=int(input())
if not (1 <= t <= 100000): exit(-1)
dp=[0]*100001
dp[3]=3
for i in range(4,100001):
if i%3==0 or i%7==0: dp[i]=i
dp[i]+=dp[i-1]
query=[*map(int,input().split())]
if len(query)!=t: exit(-1)
for i in query:
if not (10 <= int(i) <= 80000): exit(-1)
print(dp[int(i)])
succ=False
try: input()
except: succ=True
if not succ: exit(-1) | 19 | 44 | 0.565097 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5eae8d82dc5183916e03e85619c769e8bdd57923 | 21,108 | py | Python | quantarhei/qm/liouvillespace/heom.py | slamavl/quantarhei | d822bc2db86152c418e330a9152e7866869776f7 | [
"MIT"
] | 14 | 2016-10-16T13:26:05.000Z | 2021-11-09T11:40:52.000Z | quantarhei/qm/liouvillespace/heom.py | slamavl/quantarhei | d822bc2db86152c418e330a9152e7866869776f7 | [
"MIT"
] | 61 | 2016-09-19T10:45:56.000Z | 2021-11-10T13:53:06.000Z | quantarhei/qm/liouvillespace/heom.py | slamavl/quantarhei | d822bc2db86152c418e330a9152e7866869776f7 | [
"MIT"
] | 21 | 2016-08-30T09:09:28.000Z | 2022-03-30T03:16:35.000Z | # -*- coding: utf-8 -*-
"""
Representation of Hierarchical Equations of Motion
"""
import numpy
from ... import REAL, COMPLEX
from ..propagators.dmevolution import DensityMatrixEvolution
from ..hilbertspace.operators import ReducedDensityMatrix
from ..hilbertspace.operators import UnityOperator
from ...core.units import kB_int
from ..corfunctions.correlationfunctions import CorrelationFunction
class KTHierarchy:
""" Kubo-Tanimura Hierarchy
Parameters
----------
ham : Hamiltonian
System Hamiltonian
sbi : SystemBathInteraction
System bath interaction object
depth : int
Depth of the hierarchy
>>> import quantarhei as qr
>>> m1 = qr.Molecule([0.0, 1.0])
>>> m2 = qr.Molecule([0.0, 1.0])
>>> agg = qr.Aggregate([m1, m2])
>>> agg.set_resonance_coupling(0,1,0.1)
>>> agg.build()
>>> ham = agg.get_Hamiltonian()
>>> print(ham)
<BLANKLINE>
quantarhei.Hamiltonian object
=============================
units of energy 1/fs
Rotating Wave Approximation (RWA) enabled : True
Number of blocks : 2
Block average energies:
0 : 0.0
1 : 1.0
data =
[[ 0. 0. 0. ]
[ 0. 1. 0.1]
[ 0. 0.1 1. ]]
>>> sbi = qr.qm.TestSystemBathInteraction("dimer-2-env")
>>> Hy = KTHierarchy(ham, sbi, 4)
Warning: OverdampedBrownian spectral density
- only high-temperature limit of this function is used in HEOM
Warning: OverdampedBrownian spectral density
- only high-temperature limit of this function is used in HEOM
>>> print(Hy.dim)
3
>>> print(Hy.nbath)
2
>>> print(Hy.gamma)
[ 0.01 0.01]
>>> print(Hy.lam)
[ 0.0037673 0.0037673]
>>> print(Hy.depth)
4
>>> print(Hy.temp)
300
>>> print(Hy.hsize)
15
>>> print(Hy.Vs[0,:,:])
[[ 0. 0. 0.]
[ 0. 1. 0.]
[ 0. 0. 0.]]
>>> print(Hy.levels)
[ 0 1 3 6 10]
>>> print(Hy.levlengths)
[1 2 3 4 5]
>>> print(Hy.hinds)
[[0 0]
[1 0]
[0 1]
[2 0]
[1 1]
[0 2]
[3 0]
[2 1]
[1 2]
[0 3]
[4 0]
[3 1]
[2 2]
[1 3]
[0 4]]
"""
def __init__(self, ham, sbi, depth=2):
self.ham = ham
self.sbi = sbi
self.depth = depth
# dimension of the ADOs
self.dim = ham.dim
# number of baths
self.nbath = self.sbi.N
# FIXME
# check that sbi only has correlation functions of Lorentz type
for ii in range(self.nbath):
cc = self.sbi.CC.get_correlation_function(ii,ii)
prms = cc.params[0]
if prms["ftype"] == CorrelationFunction.allowed_types[1]:
tp = CorrelationFunction.allowed_types[1]
print("Warning: "+tp+" spectral density\n"+
" - only high-temperature limit "+
"of this function is used in HEOM")
elif prms["ftype"] in [CorrelationFunction.allowed_types[0],
"Lorentz-Drude"]:
pass
else:
raise Exception("Spectral density/Correlation function type:"+
prms["ftype"]+
"\nHEOM is not implemented for this function")
self.gamma = numpy.zeros(self.nbath, dtype=REAL)
for ii in range(self.nbath):
self.gamma[ii] = 1.0/self.sbi.get_correlation_time(ii)
self.lam = numpy.zeros(self.nbath, dtype=REAL)
for ii in range(self.nbath):
self.lam[ii] = self.sbi.get_reorganization_energy(ii)
self.temp = self.sbi.get_temperature()
self.kBT = self.temp*kB_int
# generation of hierarchy indices
indxs = self.generate_indices(self.nbath, level=self.depth)
self.hsize = 0
for levels in indxs:
self.hsize += len(levels)
self.Vs = self.sbi.KK
#self.rho = ReducedDensityMatrix(data=numpy.zeros((self.dim, self.dim),
# dtype=COMPLEX))
# This needs to be basis controlled
self.ado = None
self.reset_ados()
#
# numpy representation of the hierarchy indices
#
# indices where the levels start
self.levels = numpy.zeros(depth+1, dtype=numpy.int)
# lengths of the levels
self.levlengths = numpy.zeros(depth+1, dtype=numpy.int)
# indices
self.hinds = self._convert_2_matrix(indxs)
self.nm1 = numpy.zeros((self.hsize, self.nbath), dtype=numpy.int)
self.np1 = numpy.zeros((self.hsize, self.nbath), dtype=numpy.int)
self._make_nmp1()
self.Gamma = numpy.zeros(self.hsize, dtype=REAL)
self._make_Gamma()
self.hpop = None
def generate_indices(self, N, level=0):
"""Generates indices of the hierarchy up a certain level
Parameters
----------
N : int
Number of indices in the hierarchy
level : int
Highest level of the hierarchy to be generated
"""
if False:
return self._generate_indices_2_0to4(N, level)
else:
lret = []
if False:
level_prev = []
inilist = [0]*N # lowest level
level_prev.append(inilist)
lret.append(level_prev)
for kk in range(level):
last_level = kk
new_level_prev = []
for old_level in level_prev:
doit = False
for nn in range(N):
if old_level[nn] == last_level:
doit = True
if doit:
nlist = old_level.copy()
nlist[nn] += 1
new_level_prev.append(nlist)
level_prev = new_level_prev
lret.append(level_prev)
else:
level_prev = []
inilist = [0]*N
level_prev.append(inilist)
lret.append(level_prev)
for kk in range(level):
last_level = kk
new_level_prev = []
for old_level in level_prev:
for nn in range(N):
nlist = old_level.copy()
nlist[nn] += 1
#check if it is already in
if nlist not in new_level_prev:
new_level_prev.append(nlist)
level_prev = new_level_prev
lret.append(level_prev)
return lret
def _generate_indices_2_0to4(self, N=2, level=0):
"""Generation of hierarchy indices for a give problem
"""
if N != 2:
raise Exception("Experimental code, N different from 2"+
" not implemented")
if level > 4:
raise Exception("Experimental code, level > 4 not implemented")
lret = []
level0 = []
level0.append([0,0])
lret.append(level0)
if level == 0:
return lret
level1 = []
level1.append([1,0])
level1.append([0,1])
lret.append(level1)
if level == 1:
return lret
level2 = []
level2.append([2,0])
level2.append([1,1])
level2.append([0,2])
lret.append(level2)
if level == 2:
return lret
level3 = []
level3.append([3,0])
level3.append([2,1])
level3.append([1,2])
level3.append([0,3])
lret.append(level3)
if level == 3:
return lret
level4 = []
level4.append([4,0])
level4.append([3,1])
level4.append([2,2])
level4.append([1,3])
level4.append([0,4])
lret.append(level4)
return lret
def _convert_2_matrix(self, indxs):
"""Convert the list of levels of the hierarchy into an numpy array
"""
hsize = 0
lvl = 0
start = 0
self.levels[lvl] = start
for levels in indxs:
if lvl > 0:
self.levels[lvl] = start
lngth = len(levels)
self.levlengths[lvl] = lngth
hsize += lngth
lvl +=1
start = start+lngth
mat = numpy.zeros((hsize, self.nbath), dtype=numpy.int)
ii = 0
for level in indxs:
for inds in level:
kk = 0
for ind in inds:
mat[ii, kk] = ind
kk += 1
ii += 1
return mat
def _make_nmp1(self):
""" Makes the list of indices obtained from n by -1 or +1 operations
"""
for nn in range(self.hsize):
for kk in range(self.nbath):
indxm = numpy.zeros(self.nbath, dtype=numpy.int)
indxm[:] = self.hinds[nn,:]
indxm[kk] -= 1
indxp = numpy.zeros(self.nbath, dtype=numpy.int)
indxp[:] = self.hinds[nn,:]
indxp[kk] += 1
venm = -1
for ll in range(nn):
if numpy.array_equal(self.hinds[ll,:], indxm):
venm = ll
venp = -1
for ll in range(self.hsize):
if numpy.array_equal(self.hinds[ll,:], indxp):
venp = ll
self.nm1[nn, kk] = venm
self.np1[nn, kk] = venp
def _make_Gamma(self):
""" Decay factor of a given ADO
"""
for nn in range(self.hsize):
for kk in range(self.nbath):
self.Gamma[nn] += self.hinds[nn,kk]*self.gamma[kk]
def reset_ados(self):
"""Creates memory of ADOs and sets them to zero
"""
self.ado = numpy.zeros((self.hsize, self.dim, self.dim),
dtype=COMPLEX)
def get_kernel(self, timeaxis):
"""Returns integration kernel for the time-non-local equation
"""
N = self.dim
Nt = timeaxis.length
kernel = numpy.zeros((Nt,N,N,N,N), dtype=COMPLEX)
khprop = KTHierarchyPropagator(timeaxis, self)
rhoi = ReducedDensityMatrix(dim=self.dim)
for ii in range(N):
for jj in range(N):
print("Starting:", ii, jj)
rhoi = ReducedDensityMatrix(dim=self.dim)
rhoi.data[ii,jj] = 1.0
rhot = khprop.propagate(rhoi, free_hierarchy=True)
kernel[:,:,:,ii,jj] = -rhot.data[:,:,:]
print("... finished.")
qhp = self._QHPsop()
phq = self._PHQsop()
for tk in range(Nt):
k1 = numpy.tensordot(kernel[tk,:,:,:,:], qhp)
kernel[tk,:,:,:,:] = numpy.tensordot(phq, k1)
return kernel
def _QHPsop(self):
N = self.dim
delta = UnityOperator(dim=N).data
qhp = numpy.zeros((N, N, N, N), dtype=COMPLEX)
for kk in range(self.nbath):
# Theta+ and Psi+
nk = self.hinds[1,kk]
jj = self.nm1[1,kk]
print(kk, nk, jj)
if nk*jj > 0:
print("Calculating QHP:")
for ii_i in range(N):
for jj_i in range(N):
for kk_i in range(N):
for ll_i in range(N):
# Theta
qhp[ii_i,jj_i,kk_i,ll_i] += \
nk*self.lam[kk]*self.gamma[kk]*\
(self.Vs[kk,ii_i,kk_i]*delta[jj_i,ll_i]
+ delta[kk_i,ii_i]*self.Vs[kk,ll_i,jj_i])
# Psi
qhp[ii_i,jj_i,kk_i,ll_i] += \
1j*2.0*nk*self.lam[kk]*self.kBT* \
(self.Vs[kk,ii_i,kk_i]*delta[jj_i,ll_i]
- delta[kk_i,ii_i]*self.Vs[kk,ll_i,jj_i])
print(" ...done")
return qhp
def _PHQsop(self):
N = self.dim
delta = UnityOperator(dim=N).data
phq = numpy.zeros((N, N, N, N), dtype=COMPLEX)
for ii in range(N):
for jj in range(N):
for kk in range(N):
for ll in range(N):
phq[ii,jj,kk,ll] = delta[ii,kk]*delta[jj,ll]
return phq
class KTHierarchyPropagator:
"""Propagator of the Kubo-Tanimura hierarchy
>>> import numpy
>>> import quantarhei as qr
>>> with qr.energy_units("1/cm"):
... m1 = qr.Molecule([0.0, 10000.0])
... m2 = qr.Molecule([0.0, 10000.0])
... #m3 = qr.Molecule([0.0, 10000.0])
>>> agg = qr.Aggregate([m1, m2])
>>> with qr.energy_units("1/cm"):
... agg.set_resonance_coupling(0,1,80.0)
... #agg.set_resonance_coupling(0,2,100.0)
>>> agg.build()
>>> ham = agg.get_Hamiltonian()
>>> sbi = qr.qm.TestSystemBathInteraction("dimer-2-env")
>>> Hy = KTHierarchy(ham, sbi, 4)
Warning: OverdampedBrownian spectral density
- only high-temperature limit of this function is used in HEOM
Warning: OverdampedBrownian spectral density
- only high-temperature limit of this function is used in HEOM
>>> rhoi = qr.ReducedDensityMatrix(dim=ham.dim)
>>> rhoi.data[2,2] = 1.0
>>> print(rhoi)
<BLANKLINE>
quantarhei.ReducedDensityMatrix object
======================================
data =
[[ 0.+0.j 0.+0.j 0.+0.j]
[ 0.+0.j 0.+0.j 0.+0.j]
[ 0.+0.j 0.+0.j 1.+0.j]]
>>> time = qr.TimeAxis(0.0, 1000, 1.0)
>>> kprop = KTHierarchyPropagator(time, Hy)
>>> rhot = kprop.propagate(rhoi)
#>>> import matplotlib.pyplot as plt
#>>> N = time.length
#>>> with qr.eigenbasis_of(ham):
#... plt.plot(time.data[0:N], rhot.data[0:N,1,1],"-b")
#... plt.plot(time.data[0:N], rhot.data[0:N,2,2],"-r")
#... plt.plot(time.data[0:N], numpy.real(rhot.data[0:N,1,2]),"-g")
#>>> plt.show()
"""
def __init__(self, timeaxis, hierarchy):
self.timeaxis = timeaxis
self.Nt = timeaxis.length
self.dt = timeaxis.step
self.hy = hierarchy
#
# RWA
#
if self.hy.ham.has_rwa:
self.RWA = self.hy.ham.rwa_indices
self.RWU = numpy.zeros(self.RWA.shape, dtype=self.RWA.dtype)
HH = self.hy.ham.data
shape = HH.shape
HOmega = numpy.zeros(shape, dtype=REAL)
for ii in range(shape[0]):
HOmega[ii,ii] = self.hy.ham.rwa_energies[ii]
self.HOmega = HOmega
def propagate(self, rhoi, L=4, report_hierarchy=False,
free_hierarchy=False):
"""Propagates the Kubo-Tanimura Hierarchy including the RDO
"""
rhot = DensityMatrixEvolution(timeaxis=self.timeaxis, rhoi=rhoi)
if free_hierarchy:
# first act with lifting superoperators
self.hy.ado[1,:,:] = rhoi.data
N = rhoi.dim
Nt = self.timeaxis.length
ker = numpy.zeros((Nt,N,N), dtype=COMPLEX)
ker[0,:,:] = rhoi.data
# Now we propagate normally; slevel is set to 1 so zero's order
# does not update and stays zero
slevel = 1
else:
# normally inital condition goes here
self.hy.ado[0,:,:] = rhoi.data
slevel = 0
ado1 = self.hy.ado
ado2 = self.hy.ado
# no fine time-step for integro-differential solver
self.Nref = 1
if report_hierarchy:
# we report population of hierarchy ADO
Nt = rhot.data.shape[0]
self.hy.hpop = numpy.zeros((Nt, self.hy.hsize), dtype=REAL)
for kk in range(self.hy.hsize):
self.hy.hpop[0,kk] = numpy.trace(self.hy.ado[kk,:,:])
indx = 1
for ii in self.timeaxis.data[1:self.Nt]:
for jj in range(0,self.Nref):
for ll in range(1,L+1):
ado1 = self._ado_cros_rhs(ado1, (self.dt/ll), slevel) \
+ self._ado_self_rhs(ado1, (self.dt/ll), slevel)
ado2 = ado2 + ado1
ado1 = ado2
self.hy.ado = ado2
if free_hierarchy:
ker[indx,:,:] = ado2[1,:,:]
else:
rhot.data[indx,:,:] = ado2[0,:,:]
if report_hierarchy:
# we report population of hierarchy ADO
for kk in range(self.hy.hsize):
self.hy.hpop[indx, kk] = numpy.trace(ado2[kk,:,:])
indx += 1
return rhot
def _ado_self_rhs(self, ado1, dt, slevel=0):
"""Self contribution of the equation for the hierarchy ADOs
"""
ado3 = numpy.zeros(ado1.shape, dtype=ado1.dtype)
if self.hy.ham.has_rwa:
HH = self.hy.ham.data - self.HOmega
else:
HH = self.hy.ham.data
for nn in range(slevel, self.hy.hsize):
#
ado3[nn,:,:] = -dt*(1j*(numpy.dot(HH, ado1[nn,:,:])
- numpy.dot(ado1[nn,:,:],HH))
+ self.hy.Gamma[nn]*ado1[nn,:,:])
return ado3
def _ado_cros_rhs(self, ado1, dt, slevel=0):
"""All cross-terms of the Hierarchy
"""
ado3 = numpy.zeros(ado1.shape, dtype=ado1.dtype)
rl = numpy.zeros((ado1.shape[1],ado1.shape[2]), dtype=ado1.dtype)
rr = numpy.zeros((ado1.shape[1],ado1.shape[2]), dtype=ado1.dtype)
for nn in range(slevel, self.hy.hsize):
for kk in range(self.hy.nbath):
# Theta+ and Psi+
nk = self.hy.hinds[nn,kk]
jj = self.hy.nm1[nn,kk]
if nk*jj >= 0:
rr = numpy.dot(self.hy.Vs[kk,:,:], ado1[jj,:,:])
rl = numpy.dot(ado1[jj,:,:],self.hy.Vs[kk,:,:])
# Theta
ado3[nn,:,:] += dt*nk*self.hy.lam[kk]*self.hy.gamma[kk]* \
(rr+rl)
# Psi
ado3[nn,:,:] += (1j*dt)*2.0*nk* \
self.hy.lam[kk]*self.hy.kBT* \
(rr-rl)
# Psi-
jj = self.hy.np1[nn,kk]
if jj > 0:
rr = numpy.dot(self.hy.Vs[kk,:,:], ado1[jj, :,:])
rl = numpy.dot(ado1[jj,:,:],self.hy.Vs[kk,:,:])
ado3[nn,:,:] += (1j*dt)*(rr-rl)
return ado3
# def _QHP(self, rhoi, slevel=1):
# """One application of the hierarchy operators
#
# """
# ado1 = numpy.zeros((self.hy.hsize, self.hy.dim, self.hy.dim),
# dtype=COMPLEX)
# ado1[0,:,:] = rhoi.data[:,:]
#
# ador = self._ado_cros_rhs(ado1, dt=1.0, slevel=slevel)
# ador[0,:,:] = 0.0 # nullify the 0 order so that it cannot contribute
#
# return ador
# def _PHQ(self, adof, slevel=0):
# """
#
# """
# return self._ado_cros_rhs(adof, dt=1.0, slevel=slevel)
| 30.724891 | 93 | 0.45054 | 20,074 | 0.951014 | 0 | 0 | 0 | 0 | 0 | 0 | 6,287 | 0.297849 |
5eaeca45e629aa19975f979a92e385f19c770edc | 17,980 | py | Python | pyOSA/_OSA_Jupyter.py | gregmoille/InstrumentControl | 4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133 | [
"MIT"
] | 3 | 2018-05-02T20:14:15.000Z | 2020-10-18T03:57:09.000Z | pyOSA/.ipynb_checkpoints/_OSA_Jupyter-checkpoint.py | gregmoille/InstrumentControl | 4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133 | [
"MIT"
] | 1 | 2019-05-23T15:21:08.000Z | 2019-05-23T15:21:08.000Z | pyOSA/.ipynb_checkpoints/_OSA_Jupyter-checkpoint.py | gregmoille/InstrumentControl | 4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133 | [
"MIT"
] | 2 | 2019-05-16T20:36:25.000Z | 2020-09-22T18:26:49.000Z | import numpy as np
import threading
import os
import plotly.graph_objects as go
import pandas as pd
import sys
import re
from scipy import constants as cts
from IPython.display import display, HTML
import time
work_dir = os.path.join(os.path.dirname(__file__), '../')
work_dir = os.path.abspath(work_dir)
path = os.path.abspath(work_dir + '/../')
if not work_dir in sys.path:
sys.path.insert(0, work_dir)
from pyOSA import Yokogawa
from pyOSA import uiOSA
# if len(sys.argv)>1:
# DEBUG = bool(re.search('true', sys.argv[0].lower()))
# print(f'Debugging: {DEBUG}')
# else:
# DEBUG = False
# DEBUG = False
# print(DEBUG)
class OSAjupyter(uiOSA):
OSAmodel = {'AQ6375B': dict(span=[1200.0, 2400.0]),
'AQ6374': dict(span=[350.0, 1750.0]),
'AQ6370D': dict(span=[600.0, 1700.0])}
def __init__(self, **kwargs):
super().__init__()
self.DEBUG = kwargs.get('DEBUG', False)
if self.DEBUG:
print('Debuging mode ON')
self.createUI()
self.connected = False
self._id_trace = 0
self._scan = False
# -- connect the button --
self._connectUI()
def connect(self, change):
ip = self.ui.ip.value
if change.new:
try_connect = 0
while try_connect < 5:
with Yokogawa(ip=ip) as osa:
# -- fetch the OSA state
if osa.connected:
try_connect += 1
print(try_connect)
identity = osa.identity
if self.DEBUG:
print(f'Model:{identity}')
print(f'Model:{self.OSAmodel}')
print(f"Model:{self.OSAmodel[identity['model']]}")
if self.DEBUG:
print('Connected to the OSA')
try:
para = osa.settings
if self.DEBUG:
print('Fetched parameters')
except Exception as err:
print(err)
trace = osa.trace
if self.DEBUG:
print('Fetched traces')
break
else:
try_connect += 1
print('Did not connect, retrying...')
time.sleep(0.5)
self.figOSA.data[0].x = []
self.figOSA.data[0].y = []
# time.sleep(1)
# close the socket, no need anymore
# -- updating the UI
if try_connect >=5:
print("Couldn't connect to the OSA, please check the IP")
else:
self.connected = True
if self.DEBUG:
print('Finished Connecting')
model = identity['model']
if self.DEBUG:
print(f"Model: {model}")
self.ui.model.value = f"Maker: {identity['maker']}\n" + \
f"Model: {model}\n" + \
f"SN: {identity['SN']}\n\n" + \
f"Spectral range:\n\t {self.OSAmodel[model]['span'][0]}nm - {self.OSAmodel[model]['span'][1]}nm\n"
lbd_start = para['centwlgth'] - para['span']/2
lbd_end = para['centwlgth'] + para['span']/2
if self.DEBUG:
print(f'Start: {lbd_start}')
print(f'End: {lbd_end}')
self.ui.λ.min = self.OSAmodel[model]['span'][0]
self.ui.λ.max = self.OSAmodel[model]['span'][1]
self.ui.λ.value = (1e9*lbd_start, 1e9*lbd_end)
try:
self.ui.bandwidth.value = self._Bdwt_val[1e9*para['bdwdth']]
except Exception as err:
if self.DEBUG:
print(f'Badnwidth Error: {err}')
print(f"Value: {1e9*para['bdwdth']}")
try:
self.ui.res.index = int(para['resol'])
except Exception as err:
if self.DEBUG:
print(f'Res Error: {err}')
print(f"Value: {para['resol']}")
try:
self.ui.pts.value = int(para['pts'])
except Exception as err:
if self.DEBUG:
print(f'Pts Error: {err}')
print(f"Value: {para['pts']}")
self.figOSA.data[0].x = trace.lbd.values*1e9
self.figOSA.data[0].y = trace.S.values
self.figOSA.update_xaxes(autorange = True)
self.figOSA.update_xaxes(autorange = False)
self.figOSA.update_xaxes(range = [self.figOSA.layout.xaxis.range[0],
self.figOSA.layout.xaxis.range[1]])
self.figOSA.update_yaxes(autorange = True)
time.sleep(0.2)
self.figOSA.update_yaxes(autorange = False)
self.figOSA.update_yaxes(range = [-59, self.figOSA.layout.yaxis.range[-1]])
time.sleep(0.5)
self.figOSA.update_yaxes(range = [-85, self.figOSA.layout.yaxis.range[-1]])
else:
self.connected = False
def refreshTrace(self, change):
ip = self.ui.ip.value
if self.connected:
with Yokogawa(ip=ip) as osa:
if osa.connected:
trace = osa.trace
x = trace.lbd*1e9
y = trace.S
if self.ui.freq_scale.value.lower() == 'frequency':
x = 1e-12*cts.c/(x*1e-9)
self.figOSA.data[0].x = x
self.figOSA.data[0].y = trace.S
def _stopScan(self):
self._scan = False
ip = self.ui.ip.value
print(ip)
time.sleep(0.5)
with Yokogawa(ip=ip) as osa:
osa.scan = 'stop'
print('stopped')
self._scan = False
def _singleScan(self):
self._scan = True
ip = self.ui.ip.value
with Yokogawa(ip=ip) as osa:
self.figOSA.data[0].x = []
self.figOSA.data[0].y = []
osa.scan = 'single'
print('Launching a single scan')
while True:
print('getting traces')
time.sleep(0.01)
trace = osa.trace
if trace:
x = trace.lbd*1e9
y = trace.S
if self.ui.freq_scale.value.lower() == 'frequency':
x = 1e-12*cts.c/(x*1e-9)
self.figOSA.data[0].x = x
self.figOSA.data[0].y = trace.S
else:
print(trace)
time.sleep(0.25)
if self._scan == False:
print('!!!stop the loop!!!')
break
def _repeatScan(self):
self._scan = True
ip = self.ui.ip.value
with Yokogawa(ip=ip) as osa:
print('Launching a Continuous scan')
self.figOSA.data[0].x = []
self.figOSA.data[0].y = []
osa.scan = 'repeat'
print('Launching a Continuous scan')
while True:
time.sleep(0.01)
trace = osa.trace
if not(trace is None):
x = trace.lbd*1e9
y = trace.S
if self.ui.freq_scale.value.lower() == 'frequency':
x = 1e-12*cts.c/(x*1e-9)
self.figOSA.data[0].x = x
self.figOSA.data[0].y = trace.S
else:
time.sleep(0.25)
if self._scan == False:
print('!!!stop the loop!!!')
break
def scanType(self, change):
print(change.new.lower())
if change.new.lower() == 'stop':
self._stopScan()
if not self._scan:
if change.new.lower() == 'single':
t = threading.Thread(target=self._singleScan)
t.start()
if change.new.lower() == 'repeat':
t = threading.Thread(target=self._repeatScan)
t.start()
def select_trace(self, change):
ip = self.ui.ip.value
if self.connected:
with Yokogawa(ip=ip) as osa:
osa.trace = change.new.replace('Trace ', '')
def update_λ(self, change):
ip = self.ui.ip.value
if self.connected:
# print(change.new)
centwlgth = (change.new[1] + change.new[0])/2
span = (change.new[1] - change.new[0])
time.sleep(1)
with Yokogawa(ip=ip) as osa:
para = osa.settings
if self.DEBUG:
print(para)
para['centwlgth'] = centwlgth*1e-9
para['span'] = span*1e-9
if self.DEBUG:
print(para)
with Yokogawa(ip=ip) as osa:
osa.settings = para
self.figOSA.update_xaxes(range = change.new)
def update_res(self, change):
ip = self.ui.ip.value
if self.connected:
with Yokogawa(ip=ip) as osa:
para = osa.settings
para['resol'] = change.new
with Yokogawa(ip=ip) as osa:
osa.settings = para
def update_bdwt(self, change):
ip = self.ui.ip.value
if self.connected:
with Yokogawa(ip=ip) as osa:
para = osa.settings
para['bdwdth'] = float(change.new.replace(' nm', ''))*1e-9
with Yokogawa(ip=ip) as osa:
osa.settings = para
para = osa.settings
self.ui.bandwidth.value = self._Bdwt_val[1e9*para['bdwdth']]
def update_points(self, change):
ip = self.ui.ip.value
if self.connected:
with Yokogawa(ip=ip) as osa:
para = osa.settings
para['pts'] = change.new
with Yokogawa(ip=ip) as osa:
osa.settings = para
para = osa.settings
self.ui.pts.value = int(para['pts'])
def clear_all_trace(self, change):
self._id_trace = 0
self.figOSA.data = [self.figOSA.data[0]]
self.figOSA.data[0].x = []
self.figOSA.data[0].y = []
def clear_keep_trace(self, change):
self._id_trace = 0
self.figOSA.data = [self.figOSA.data[0]]
def keep_trace(self, change):
self._id_trace += 1
print('Keeping trace')
tr = go.Scatter(x = self.figOSA.data[0].x,
y = self.figOSA.data[0].y)
self.figOSA.add_trace(tr)
print('Trace kept')
def freq_scale(self, change):
print(change.new.lower)
xdata = [None]*len(self.figOSA.data)
newx = [None]*len(self.figOSA.data)
for ii in range(len(self.figOSA.data)):
xdata[ii] = self.figOSA.data[ii].x
if change.new.lower() == 'frequency':
for ii in range(len(self.figOSA.data)):
newx[ii] = 1e-12 * cts.c/(xdata[ii]*1e-9)
xlabel = 'Frequency (THz)'
elif change.new.lower() == 'wavelength':
for ii in range(len(self.figOSA.data)):
newx[ii] = 1e-12 * cts.c/(xdata[ii]*1e-9)
xlabel = 'Wavelength (nm)'
for ii in range(len(self.figOSA.data)):
self.figOSA.data[ii].x = newx[ii]
self.figOSA.update_xaxes(title = xlabel, range = [np.min(newx), np.max(newx)])
# print(change.new)
# if change.new:
# newx = 1e-12 * cts.c/(xdata*1e-9)
# xlabel = 'Frequency (THz)'
# else:
# newx = 1e9 * cts.c/(xdata*1e12)
# xlabel = 'Wavelength (nm)'
#
# self.figOSA.data[0].x = newx
# # figOSA.data[0].y = ydata
# self.figOSA.update_xaxes(title = xlabel, range = [newx.min(), newx.max()])
# #
def save_data(self, change):
ip = self.ui.ip.value
fname = self.ui.picker.selected
if fname:
if not os.path.exists(self.ui.picker.selected):
if self.ui.to_save.value.lower() == 'pc':
lbd = self.figOSA.data[0].x*1e-9
S = self.figOSA.data[0].y
df = pd.DataFrame(dict(lbd = lbd, S = S))
if len(self.figOSA.data) > 1:
for ii in range(1, len(self.figOSA.data)):
lbd = self.figOSA.data[0].x*1e-9
S = self.figOSA.data[0].y
dum = pd.DataFrame({f'lbd{ii}': lbd, f'S{ii}': S})
df = pd.concat([df, dum], axis = 1)
df.to_parquet(fname)
else:
with Yokogawa(ip=ip) as osa:
if osa.connected:
trace = osa.trace
save_ok = True
else:
save_ok = False
print("Cannot coonect!!")
if save_ok:
trace.to_parquet(fname)
def _connectUI(self):
self.ui.cnct.observe(self.connect, 'value')
# self.ui.scan.observe(self.scan_osa,'value')
self.ui.refresh_trace.on_click(self.refreshTrace)
self.ui.trace.observe(self.select_trace, 'value')
self.ui.λ.observe(self.update_λ, 'value')
self.ui.bandwidth.observe(self.update_bdwt, 'value')
self.ui.pts.observe(self.update_points, 'value')
self.ui.res.observe(self.update_res, 'index')
self.ui.clr.on_click(self.clear_all_trace)
self.ui.clr_keep.on_click(self.clear_keep_trace)
self.ui.keep.on_click(self.keep_trace)
self.ui.save.on_click(self.save_data)
self.ui.freq_scale.observe(self.freq_scale, 'value')
self.ui.scan.observe(self.scanType, 'value')
# # ----------------------------------
# # -- Worker for scanning
# # ----------------------------------
# run_thread = True
# def worker(f, instr):
# while run_thread:
# try:
# #with Yokogawa(ip=ip) as instr:
# trace = instr.trace
# # x = np.linspace(600, 1700, 50001)
# # y = np.log10(np.random.rand(50001)*(1/np.cosh((x-(700+1850)/2)/10))**2)
# f.data[0].x = trace.lbd.values*1e9
# f.data[0].y = trace.S.values
# except:
# print('Comunication error')
# time.sleep(0.1)
# #with Yokogawa(ip=ip) as instr:
# trace = instr.trace
# # x = np.linspace(600, 1700, 50001)
# # y = np.log10(np.random.rand(50001)*(1/np.cosh((x-(700+1850)/2)/10))**2)
# f.data[0].x = trace.lbd.values*1e9
# f.data[0].y = trace.S.values
# time.sleep(0.1)
#
#
#
# # ----------------------------------
# # -- Setup the Connectors
# # ----------------------------------
#
#
# def scan_osa(change):
# global thread_osa
# global run_thread
# run_thread = False
# ip = ui.ip.value
# if connected:
# # osa.scan = change.new.lower()
# run_thread = False
# if change.new.lower() == 'single' or change.new.lower() == 'repeat':
# with Yokogawa(ip=ip) as osa:
# osa.scan = change.new.lower()
# run_thread = True
# thread_osa = threading.Thread(target=worker, args=(figOSA, osa))
# thread_osa.start()
# if change.new.lower() == 'stop':
# with Yokogawa(ip=ip) as osa:
# osa.scan = change.new.lower()
# print('Trying to kill the stuff')
# run_thread = False
#
#
# # ----------------------------------
# # -- connect callbacks and traits
# # ----------------------------------
# ui.cnct.observe(connect, 'value')
# ui.scan.observe(scan_osa,'value')
# ui.trace.observe(select_trace, 'value')
# ui.λ.observe(update_λ, 'value')
# ui.bandwidth.observe(update_bdwt, 'value')
# ui.pts.observe(update_points, 'value')
# ui.res.observe(update_res, 'index')
# ui.clr.on_click(clear_trace)
# ui.save.on_click(save_data)
# ui.freq_scale.observe(freq_scale, 'value')
#
#
# # ----------------------------------
# # -- Display
# # ----------------------------------
# box_layout = wdg.Layout(display='flex',
# flex_flow='column',
# flex_wrap = 'wrap',
# align_content = 'stretch',
# justify_content = 'center',
# align_items='stretch',
# width='28%')
# outp_layout = wdg.Layout(display='flex',
# flex_flow='column',
# flex_wrap = 'wrap',
# align_content = 'stretch',
# justify_content = 'center',
# align_items='stretch',
# width='72%')
# ui.picker.layout = wdg.Layout(display='flex',
# flex_flow='column',
# flex_wrap = 'wrap',
# align_content = 'stretch',
# justify_content = 'center',
# align_items='stretch',
# width='100%')
# cc = [ui.cnct,ui.freq_scale, ui.ip,ui.scan, ui.trace, ui.res,ui.bandwidth, ui.pts,ui.λ, ui.clr,ui.save,ui.picker]
# ctrl = wdg.Box(children = cc,layout = box_layout)
# otp = wdg.Box(children = [figOSA], layout = outp_layout)
# display(wdg.HBox([ctrl, otp]))
| 35.745527 | 136 | 0.469689 | 13,762 | 0.765023 | 0 | 0 | 0 | 0 | 0 | 0 | 5,421 | 0.301351 |
5eafe280d36c9d0913ae120c305742245a644cb5 | 5,036 | py | Python | tests/timelog.py | cmanley/viewvc | 18ce398586ff99ee13ac64f85c205efdf9c23bad | [
"BSD-2-Clause"
] | 2 | 2015-04-03T14:15:48.000Z | 2019-08-06T07:09:58.000Z | tests/timelog.py | cmanley/viewvc | 18ce398586ff99ee13ac64f85c205efdf9c23bad | [
"BSD-2-Clause"
] | null | null | null | tests/timelog.py | cmanley/viewvc | 18ce398586ff99ee13ac64f85c205efdf9c23bad | [
"BSD-2-Clause"
] | 1 | 2022-01-11T13:02:46.000Z | 2022-01-11T13:02:46.000Z |
import time
import profile
from vclib.ccvs import rcsparse
import viewvc
try:
import tparse
except ImportError:
tparse = None
def lines_changed(delta):
idx = 0
added = deleted = 0
while idx < len(delta):
op = delta[idx]
i = delta.find(' ', idx + 1)
j = delta.find('\n', i + 1)
line = int(delta[idx+1:i])
count = int(delta[i+1:j])
idx = j + 1
if op == 'd':
deleted = deleted + count
else: # 'a' for adding text
added = added + count
# skip new text
while count > 0:
nl = delta.find('\n', idx)
assert nl > 0, 'missing a newline in the delta in the RCS file'
idx = nl + 1
count = count - 1
return added, deleted
class FetchSink(rcsparse.Sink):
def __init__(self, which_rev=None):
self.head = self.branch = ''
self.tags = { }
self.meta = { }
self.revs = [ ]
self.base = { }
self.entries = { }
self.which = which_rev
def set_head_revision(self, revision):
self.head = revision
def set_principal_branch(self, branch_name):
self.branch = branch_name
def define_tag(self, name, revision):
self.tags[name] = revision
def define_revision(self, revision, timestamp, author, state,
branches, next):
self.meta[revision] = (timestamp, author, state)
self.base[next] = revision
for b in branches:
self.base[b] = revision
def set_revision_info(self, revision, log, text):
timestamp, author, state = self.meta[revision]
entry = viewvc.LogEntry(revision, int(timestamp) - time.timezone, author,
state, None, log)
# .revs is "order seen" and .entries is for random access
self.revs.append(entry)
self.entries[revision] = entry
if revision != self.head:
added, deleted = lines_changed(text)
if revision.count('.') == 1:
# on the trunk. reverse delta.
changed = '+%d -%d' % (deleted, added)
self.entries[self.base[revision]].changed = changed
else:
# on a branch. forward delta.
changed = '+%d -%d' % (added, deleted)
self.entries[revision].changed = changed
def parse_completed(self):
if self.which:
self.revs = [ self.entries[self.which] ]
def fetch_log2(full_name, which_rev=None):
sink = FetchSink(which_rev)
rcsparse.parse(open(full_name, 'rb'), sink)
return sink.head, sink.branch, sink.tags, sink.revs
def fetch_log3(full_name, which_rev=None):
sink = FetchSink(which_rev)
tparse.parse(full_name, sink)
return sink.head, sink.branch, sink.tags, sink.revs
def compare_data(d1, d2):
if d1[:3] != d2[:3]:
print 'd1:', d1[:3]
print 'd2:', d2[:3]
return
if len(d1[3]) != len(d2[3]):
print 'len(d1[3])=%d len(d2[3])=%d' % (len(d1[3]), len(d2[3]))
return
def sort_func(e, f):
return cmp(e.rev, f.rev)
d1[3].sort(sort_func)
d2[3].sort(sort_func)
import pprint
for i in range(len(d1[3])):
if vars(d1[3][i]) != vars(d2[3][i]):
pprint.pprint((i, vars(d1[3][i]), vars(d2[3][i])))
def compare_fetch(full_name, which_rev=None):
# d1 and d2 are:
# ( HEAD revision, branch name, TAGS { name : revision }, [ LogEntry ] )
d1 = viewvc.fetch_log(full_name, which_rev)
d2 = fetch_log2(full_name, which_rev)
print 'comparing external tools vs a parser module:'
compare_data(d1, d2)
if tparse:
d2 = fetch_log3(full_name, which_rev)
print 'comparing external tools vs the tparse module:'
compare_data(d1, d2)
def compare_many(files):
for file in files:
print file, '...'
compare_fetch(file)
def time_stream(stream_class, filename, n=10):
d1 = d2 = d3 = d4 = 0
t = time.time()
for i in range(n):
ts = stream_class(open(filename, 'rb'))
while ts.get() is not None:
pass
t = time.time() - t
print t/n
def time_fetch(full_name, which_rev=None, n=1):
times1 = [ None ] * n
times2 = [ None ] * n
for i in range(n):
t = time.time()
viewvc.fetch_log(full_name, which_rev)
times1[i] = time.time() - t
for i in range(n):
t = time.time()
fetch_log2(full_name, which_rev)
times2[i] = time.time() - t
times1.sort()
times2.sort()
i1 = int(n*.05)
i2 = int(n*.95)+1
times1 = times1[i1:i2]
times2 = times2[i1:i2]
t1 = reduce(lambda x,y: x+y, times1, 0) / len(times1)
t2 = reduce(lambda x,y: x+y, times2, 0) / len(times2)
print "t1=%.4f (%.4f .. %.4f) t2=%.4f (%.4f .. %.4f)" % \
(t1, times1[0], times1[-1], t2, times2[0], times2[-1])
def profile_stream(stream_class, filename, n=20):
p = profile.Profile()
def many_calls(filename, n):
for i in xrange(n):
ts = stream_class(open(filename, 'rb'))
while ts.get() is not None:
pass
p.runcall(many_calls, filename, n)
p.print_stats()
def profile_fetch(full_name, which_rev=None, n=10):
p = profile.Profile()
def many_calls(full_name, which_rev, n):
for i in xrange(n):
fetch_log2(full_name, which_rev)
p.runcall(many_calls, full_name, which_rev, n)
p.print_stats()
| 27.977778 | 77 | 0.616958 | 1,542 | 0.306195 | 0 | 0 | 0 | 0 | 0 | 0 | 528 | 0.104845 |
5eb100dbb0d696d5a60712b084844f646f003f84 | 1,206 | py | Python | mysite/ct/migrations/0016_auto_20150626_0301.py | cjlee112/socraticqs2 | 2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820 | [
"Apache-2.0"
] | 8 | 2015-06-02T15:34:44.000Z | 2019-03-21T12:27:30.000Z | mysite/ct/migrations/0016_auto_20150626_0301.py | cjlee112/socraticqs2 | 2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820 | [
"Apache-2.0"
] | 761 | 2015-01-07T05:13:08.000Z | 2022-02-10T10:23:37.000Z | mysite/ct/migrations/0016_auto_20150626_0301.py | cjlee112/socraticqs2 | 2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820 | [
"Apache-2.0"
] | 12 | 2015-01-28T20:09:36.000Z | 2018-03-20T13:32:11.000Z | from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ct', '0015_migrate_fsm'),
]
operations = [
migrations.AlterField(
model_name='concept',
name='title',
field=models.CharField(max_length=200),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='kind',
field=models.CharField(default='base', max_length=50, choices=[('base', 'brief definition and explanation'), ('explanation', 'long explanation'), ('orct', 'Open Response Concept Test question'), ('mcct', 'Concept Inventory Test question'), ('exercise', 'exercise'), ('project', 'project'), ('practice', 'practice exam question'), ('answer', 'answer'), ('errmod', 'error model'), ('data', 'data'), ('case', 'Case Study'), ('e-pedia', 'Encyclopedia'), ('faq', 'frequently asked question'), ('forum', 'forum')]),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='title',
field=models.CharField(max_length=200),
preserve_default=True,
),
]
| 40.2 | 521 | 0.584577 | 1,162 | 0.963516 | 0 | 0 | 0 | 0 | 0 | 0 | 434 | 0.359867 |
5eb103b20a3f39de88460c4a6decebedc719548b | 51 | py | Python | src/pipeline/__init__.py | euranova/DAEMA | 29fec157c34afcc9abe95bc602a3012615b3c36b | [
"MIT"
] | 6 | 2021-09-17T02:09:29.000Z | 2022-03-20T04:15:15.000Z | src/pipeline/__init__.py | Jason-Xu-Ncepu/DAEMA | 29fec157c34afcc9abe95bc602a3012615b3c36b | [
"MIT"
] | null | null | null | src/pipeline/__init__.py | Jason-Xu-Ncepu/DAEMA | 29fec157c34afcc9abe95bc602a3012615b3c36b | [
"MIT"
] | 4 | 2021-06-29T22:57:18.000Z | 2022-03-09T09:19:17.000Z | """ Contains the pipeline used to test models. """
| 25.5 | 50 | 0.686275 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 50 | 0.980392 |
5eb119a3037c25e9d68b817c1a432c5bb93a20a4 | 1,954 | py | Python | import_result.py | IsobelMarguarethe/Bilby-GWTC-1-Analysis-and-Verification | 8829096f69f2dcd9ccd628e018c855179ecf0b49 | [
"MIT"
] | 6 | 2020-05-13T02:00:41.000Z | 2021-03-06T09:30:38.000Z | import_result.py | IsobelMarguarethe/Bilby-GWTC-1-Analysis-and-Verification | 8829096f69f2dcd9ccd628e018c855179ecf0b49 | [
"MIT"
] | null | null | null | import_result.py | IsobelMarguarethe/Bilby-GWTC-1-Analysis-and-Verification | 8829096f69f2dcd9ccd628e018c855179ecf0b49 | [
"MIT"
] | 4 | 2020-05-13T02:06:48.000Z | 2022-02-03T00:42:18.000Z | #!/usr/bin/env python
""" Script to convert a result into the standard format of this directory """
import argparse
import bilby
import numpy as np
# Set a random seed so the resampling is reproducible
np.random.seed(1234)
parser = argparse.ArgumentParser()
parser.add_argument('result', help="The result file to import")
parser.add_argument('-e', '--event', help="The event name", required=True)
parser.add_argument('-N', '--Nsamples', default=50000, type=int,
help="Number of samples in the dat file")
parser.add_argument('-l', '--extra-label', nargs="*", default=None,
help="Extra elements to add to the label")
parser.add_argument('-a', '--approximant', type=str, default=None,
help="Waveform approximant, if not given uses result")
parser.add_argument('--sampler', type=str, default=None,
help="Sampler, if not given uses result")
parser.add_argument('--outdir', type=str, default=None,
help="Output directory to use")
args = parser.parse_args()
result = bilby.gw.result.CBCResult.from_json(args.result)
outdir = './gwtc-1_analysis_results/downsampled_posterior_samples/'
if args.outdir:
outdir=args.outdir
result.outdir=outdir
if args.approximant is None:
args.approximant = result.waveform_approximant
if args.sampler is None:
args.sampler = result.sampler
result.label = f"{args.event}_downsampled"
Nsamples = args.Nsamples
if args.extra_label is not None:
result.label += "_" + "_".join(args.extra_label)
if args.Nsamples > len(result.posterior):
print("Requesting Nsamples={} when posterior only has {}"
.format(args.Nsamples, len(result.posterior)))
Nsamples = len(result.posterior)
# Save the result downsampled
result.posterior = result.posterior.sample(Nsamples)
result.save_posterior_samples(outdir=outdir)
result.save_to_file(outdir='./gwtc-1_analysis_results/bilby_result_files/')
| 36.867925 | 77 | 0.707779 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 691 | 0.353634 |
5eb3c2e37e129386b93102ad60b647ab6808cb94 | 18,653 | py | Python | bauer_bsm/bsm/client.py | alkradhazar/test1 | addb391d9e8bac8a7d8ed8a5310fb695ce15eaf6 | [
"Apache-2.0"
] | 7 | 2020-07-07T07:43:41.000Z | 2022-01-21T22:31:33.000Z | bauer_bsm/bsm/client.py | alkradhazar/test1 | addb391d9e8bac8a7d8ed8a5310fb695ce15eaf6 | [
"Apache-2.0"
] | 2 | 2021-02-28T22:06:54.000Z | 2021-09-29T09:47:45.000Z | bauer_bsm/bsm/client.py | alkradhazar/test1 | addb391d9e8bac8a7d8ed8a5310fb695ce15eaf6 | [
"Apache-2.0"
] | 2 | 2021-09-24T03:49:19.000Z | 2022-03-02T12:53:00.000Z | # BSM Python library and command line tool
#
# Copyright (C) 2020 chargeIT mobility GmbH
#
# SPDX-License-Identifier: Apache-2.0
from . import config
from . import md
from . import util as butil
from ..crypto import util as cutil
from ..sunspec.core import client as sclient
from ..sunspec.core import suns
from ..sunspec.core.modbus import client as smodbus
from collections import namedtuple
from aenum import IntEnum
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
_BsmModelInstanceInfo = namedtuple('_BsmModelInstanceInfo', 'id, label, is_snapshot, aliases')
BSM_DEFAULT_BAUDRATE = 19200
BSM_DEFAULT_PARITY = sclient.PARITY_EVEN
BSM_DEFAULT_SLAVE_ID = 42
BSM_DEFAULT_TIMEOUT = 10
SUNSPEC_ID_REGS = 2
SUNSPEC_HEADER_REGS = 2
_BSM_BASE_OFFSET = 40000
_BSM_MODEL_INSTANCES = [
_BsmModelInstanceInfo(1, 'Common', False, ['common', 'cb']),
_BsmModelInstanceInfo(10, 'Serial Interface Header', False, ['serial_interface_header', 'sih']),
_BsmModelInstanceInfo(17, 'Serial Interface', False, ['serial_interface', 'si']),
_BsmModelInstanceInfo(203, 'AC Meter', False, ['ac_meter', 'tpm']),
_BsmModelInstanceInfo(64900, 'Signing Meter', False, ['bs_meter', 'bsm', 'sm']),
_BsmModelInstanceInfo(64902, 'Communication Module Firmware Hash', False, ['cm_firmware_hash', 'cfwh']),
_BsmModelInstanceInfo(64901, 'Signed Current Snapshot', True, ['signed_current_snapshot', 'scs']),
_BsmModelInstanceInfo(64901, 'Signed Turn-On Snapshot', True, ['signed_turn_on_snapshot', 'stons']),
_BsmModelInstanceInfo(64901, 'Signed Turn-Off Snapshot', True, ['signed_turn_off_snapshot', 'stoffs']),
_BsmModelInstanceInfo(64901, 'Signed Start Snapshot', True, ['signed_start_snapshot', 'sss']),
_BsmModelInstanceInfo(64901, 'Signed End Snapshot', True, ['signed_end_snapshot', 'ses']),
_BsmModelInstanceInfo(64903, 'OCMF Signed Current Snapshot', False, ['ocmf_signed_current_snapshot', 'oscs']),
_BsmModelInstanceInfo(64903, 'OCMF Signed Turn-On Snapshot', False, ['ocmf_signed_turn_on_snapshot', 'ostons']),
_BsmModelInstanceInfo(64903, 'OCMF Signed Turn-Off Snapshot', False, ['ocmf_signed_turn_off_snapshot', 'ostoffs']),
_BsmModelInstanceInfo(64903, 'OCMF Signed Start Snapshot', False, ['ocmf_signed_start_snapshot', 'osss']),
_BsmModelInstanceInfo(64903, 'OCMF Signed End Snapshot', False, ['ocmf_signed_end_snapshot', 'oses']),
]
def _blob_point_value(point):
value_base = point.value_base
# Fixup invalid/unimpmlemented uint16 value 0xffff which gets converted to
# None by pySunSpec. When dealing with blob data we'd like to have the real
# bits.
if value_base is None:
value_base = suns.SUNS_UNIMPL_UINT16
return point.point_type.to_data(value_base, 2 * point.point_type.len)
class _BlobProxy:
"""
Proxy for exposing BLOB data from a SunSpecClientDevice convenience
wrapper.
This proxy does not read model data. This needs to be done beforehand
through the model object.
"""
def __init__(self, device):
self.device = device
def __getattr__(self, name):
model = getattr(self.device, name, None)
blob = None
if model is not None:
core_model = model.model
blob = core_model.device.repeating_blocks_blob(core_model)
return blob
# TODO: What about initializing the value from the actual model symbols?
class SnapshotType(IntEnum):
CURRENT = 0
TURN_ON = 1
TURN_OFF = 2
# TODO: What about initializing the value from the actual model symbols?
class SnapshotStatus(IntEnum):
VALID = 0
INVALID = 1
UPDATING = 2
FAILED_GENERAL = 3
FAILED_NOT_ENABLED = 4
FAILED_FEEDBACK = 5
class BsmClientDevice(sclient.ClientDevice):
"""
Attributes:
aliases_list
All aliases for the model instnace from models_list at the
corresponding index.
model_aliases
Dictionary mapping model instance aliases to the instances from
models_list. This includes BSM snapshots.
snapshots_aliases
Dictionary mapping model instance aliases of snapshots to the
instances from models list.
"""
def __init__(self, device_type=sclient.RTU, slave_id=BSM_DEFAULT_SLAVE_ID,
name=None, pathlist=None, baudrate=BSM_DEFAULT_BAUDRATE,
parity=BSM_DEFAULT_PARITY, ipaddr=None,
ipport=None, timeout=BSM_DEFAULT_TIMEOUT, trace=False,
max_count=smodbus.REQ_COUNT_MAX):
super(BsmClientDevice, self).__init__(device_type, slave_id=slave_id, name=name,
pathlist=pathlist, baudrate=baudrate, parity=parity,
ipaddr=ipaddr, ipport=ipport, timeout=timeout, trace=trace,
max_count=max_count)
self.aliases_list = []
self.model_aliases = {}
self.snapshot_aliases = {}
self._init_bsm_models()
def _fixup_curve_name(self, name):
"""
Returns our canonical curve name in case of an alias. Let's don't
bother users with this variety.
"""
if name in config.BSM_CURVE_ALIASES:
name = config.BSM_CURVE_NAME
return name
def _init_bsm_models(self):
"""
Initializes BSM models for the known layout for this device. This saves
the time for scanning the device.
"""
address = _BSM_BASE_OFFSET + SUNSPEC_ID_REGS + SUNSPEC_HEADER_REGS
for info in _BSM_MODEL_INSTANCES:
model = sclient.ClientModel(self, info.id, addr=address, mlen=0)
model.load()
self.add_model(model)
self.aliases_list.append(info.aliases)
# Provide model instances as well by name. The BSM snapshots use
# all the same model and a name comes in quite handy for referring
# to them.
self._register_aliases(self.model_aliases, info.aliases, model)
if info.is_snapshot:
self._register_aliases(self.snapshot_aliases, info.aliases, model)
address += model.len + SUNSPEC_HEADER_REGS
def _register_aliases(self, dictionary, aliases, model):
for alias in aliases:
dictionary[alias] = model
def create_snapshot(self, alias):
snapshot = self.snapshot_aliases[alias]
status = snapshot.points[config.SNAPSHOT_STATUS_DATA_POINT_ID]
status.value = SnapshotStatus.UPDATING
status.write()
def get_public_key(self, read_data=True, output_format='der'):
bsm = self.model_aliases[config.BSM_INSTANCE_ALIAS]
result = None
if read_data:
bsm.read_points()
if self.has_repeating_blocks_blob_layout(bsm):
public_key = self.repeating_blocks_blob(bsm)
result = cutil.public_key_data_from_blob(public_key, config.BSM_MESSAGE_DIGEST, output_format=output_format)
return result
def get_snapshot(self, alias):
snapshot = self.snapshot_aliases[alias]
status = snapshot.points[config.SNAPSHOT_STATUS_DATA_POINT_ID]
self.create_snapshot(alias)
snapshot.read_points()
while status.value == SnapshotStatus.UPDATING:
snapshot.read_points()
if status.value == SnapshotStatus.VALID:
return snapshot
else:
return None
def has_repeating_blocks_blob_layout(self, model):
"""
Returns whether the repeating blocks of the given model are likely to
contain BLOB data.
"""
result = False
# The repeating blocks are likely to contain a BLOB if they contain a
# single uint16 element without unit symbol and scale factor.
if len(model.blocks) > 1:
first_repeating = model.blocks[1]
if len(first_repeating.points_list) == 1:
repeating_point = first_repeating.points_list[0]
repeating_type = repeating_point.point_type
result = repeating_type.type == suns.SUNS_TYPE_UINT16 \
and repeating_type.units is None \
and repeating_type.sf is None
return result
def lookup_model(self, name):
"""
Case-insensitively looks up a model by the given name or alias.
"""
models = filter(lambda x: x.model_type.name.lower() == name.lower(),
self.models_list)
model = None
if PY2:
if len(models) >= 1:
model = models[0]
if PY3:
model = next(models, None)
if not model:
model = butil.dict_get_case_insensitive(self.model_aliases, name)
return model
def lookup_model_and_point(self, model_name, point_id):
"""
Case-insensitively looks up a data point along with its model by the
given point name and model name or alias.
"""
model = self.lookup_model(model_name)
point = None
if model:
point = self.lookup_point_in_model(model, point_id)
return (model, point)
def lookup_point_in_model(self, model, point_id):
"""
Case-insensitively looks up a data point by its name in the given
model.
"""
points = filter(lambda x: x.point_type.id.lower() == point_id.lower(), model.points_list)
if PY2:
return points[0]
if PY3:
return next(points, None)
def lookup_snapshot(self, name):
"""
Case-insensitively looks up a snapshot model by the given name or
alias.
"""
return butil.dict_get_case_insensitive(self.snapshot_aliases, name)
def model_instance_label(self, model):
"""
Returns a label for the given model instance.
"""
for index, current_model in enumerate(self.models_list):
if model == current_model:
return _BSM_MODEL_INSTANCES[index].label
# I did not find a mechanism for conveniently reading BLOB data from
# repeating blocks in pySunSpec.
#
# TODO: If BLOBs provided via repeated blocks is the default mechanism for
# binary data, What about integrating this support into Model or
# DeviceModel?
def repeating_blocks_blob(self, model):
"""
Collects BLOB data from the repeating blocks of the given model.
The same result could be achieved by just reading the data directly from
the client device by ClientDevice.read. This functions collects already
read data (scattered in the individual data points) to avoid the more
time-consuming read from the client device.
Returns:
The BLOB data as byte string or None, if there is no BLOB data.
"""
result = None
if self.has_repeating_blocks_blob_layout(model):
repeating = model.blocks[1:]
points = map(lambda b: b.points_list[0], repeating)
data = map(_blob_point_value, points)
result = b''.join(data)
# Trim blob data if an explicit length is given by the model.
blob_bytes = self.repeating_blocks_blob_explicit_length_bytes(model)
if blob_bytes is not None:
result = result[:blob_bytes]
return result
def repeating_blocks_blob_explicit_length_bytes(self, model):
"""
Returns the explicit BLOB data length (in bytes) if a model has an
appropriate data point. This needs to be an uint16 data point named
'Bx' when the repeating block data point is named 'x'.
"""
result = None
blob_id = self.repeating_blocks_blob_id(model)
bytes_id = 'B' + blob_id
bytes_point = model.blocks[0].points.get(bytes_id, None)
if bytes_point:
bytes_type = bytes_point.point_type
if bytes_point and bytes_type.type == suns.SUNS_TYPE_UINT16 \
and bytes_type.units is None \
and bytes_type.sf is None:
result = bytes_point.value
return result
def repeating_blocks_blob_id(self, model):
"""
Returns the BLOB data point ID from the repeating blocks of the given
model.
Returns:
The data point ID or None, if there is no BLOB data.
"""
result = None
if self.has_repeating_blocks_blob_layout(model):
result = model.blocks[1].points_list[0].point_type.id
return result
def verify_snapshot(self, alias, read_data=True, trace=None):
"""
Verifies snapshot data for the given alias.
By default both, the BSM model containing the public key and the
snapshot are read before verification.
"""
result = False
bsm = self.model_aliases[config.BSM_INSTANCE_ALIAS]
snapshot = self.snapshot_aliases[alias]
if read_data:
bsm.read_points()
snapshot.read_points()
public_key_data = self.get_public_key(read_data=False)
public_key = cutil.public_key_from_blob(public_key_data, config.BSM_MESSAGE_DIGEST)
curve_name = self._fixup_curve_name(public_key.curve.name)
signature_regs = snapshot.points[config.SNAPSHOT_SIGNATURE_REGS_DATA_POINT_ID].value
assert len(snapshot.blocks) == signature_regs + 1
signature = snapshot.device.repeating_blocks_blob(snapshot)
if trace:
trace('Verifying {} ...'.format(snapshot.model_type.id))
trace('Curve: {}'.format(curve_name))
trace('Public key: {}'.format(public_key_data.hex()))
trace('Signature: {}'.format(signature.hex()))
if len(public_key_data) == 0:
if trace:
trace('Failed. Device has no public key.')
result = False
elif len(signature) == 0:
if trace:
trace('Failed. Snapshot contains no signature.')
result = False
else:
assert curve_name == config.BSM_CURVE_NAME
if trace:
trace('Computing SHA-256 digest for snapshot data:')
digest = md.md_for_snapshot_data(snapshot, trace=trace)
if trace:
trace('Snapshot data SHA-256 digest: {}'.format(digest.hex()))
if cutil.verify_signed_digest(public_key_data, config.BSM_MESSAGE_DIGEST, signature, digest):
if trace:
trace('Success.')
result = True
else:
if trace:
trace('Failed.')
result = False
return result
class SunSpecBsmClientDevice(sclient.SunSpecClientDeviceBase):
"""
BsmClientDevice convenience wrapper for scripting, unit testing, and many
more.
In addition to the model attributes from SunSpecClientDeviceBase, it also
provides attributes for the model instance aliases from BsmClientDevice.
"""
def __init__(self, device_type=sclient.RTU, slave_id=BSM_DEFAULT_SLAVE_ID, name=None,
pathlist=None, baudrate=BSM_DEFAULT_BAUDRATE,
parity=BSM_DEFAULT_PARITY, ipaddr=None, ipport=None,
timeout=BSM_DEFAULT_TIMEOUT, trace=False, scan_progress=None,
scan_delay=None, max_count=smodbus.REQ_COUNT_MAX):
device = BsmClientDevice(device_type, slave_id, name, pathlist,
baudrate, parity, ipaddr, ipport, timeout, trace, max_count)
super(self.__class__, self).__init__(device)
# Also provide attributes for model aliases.
self._add_alias_attributes()
# Also provide convenient access to BLOBs (from models and aliases).
setattr(self, 'blobs', _BlobProxy(self))
def _snapshot_alias(self, snapshot):
alias = None
for a, m in self.device.snapshot_aliases.items():
if m is snapshot.model:
alias = a
break
return alias
def _add_alias_attributes(self):
"""
Registers the attribute model instances under the aliases given by the
client as well.
"""
for index, model in enumerate(self.device.models_list):
aliases = self.device.aliases_list[index]
if aliases:
attribute_model = self._get_attribute_model(model)
for alias in aliases:
setattr(self, alias, attribute_model)
def _get_attribute_model(self, model):
"""
"Scrapes" corresponding attribute model instance from this object's
attributes. This is done because there is no list of them (by now).
"""
models = getattr(self, model.model_type.name)
result = None
if type(models) is list:
# Pick the corresponding attribute model instance from the list in
# case of multiple instances of the same model.
result = next(filter(lambda x: x is not None and x.model == model, models), None)
else:
result = models
return result
def create_snapshot(self, snapshot):
alias = self._snapshot_alias(snapshot)
self.device.create_snapshot(alias)
def get_public_key(self, output_format='der'):
return self.device.get_public_key(output_format=output_format)
def get_snapshot(self, snapshot):
alias = self._snapshot_alias(snapshot)
result = None
if self.device.get_snapshot(alias) is not None:
# If the wrapped device returs something we were successful. Return
# the wrapped snapshot model whose underlying model has been
# updated.
result = snapshot
return result
def verify_snapshot(self, snapshot, read_data=True, trace=None):
"""
Verifies snapshot data for the given SunSpecClientModelBase instance.
By default both, the BSM model containing the public key and the
snapshot are read before verification.
"""
alias = self._snapshot_alias(snapshot)
result = False
if alias is not None:
result = self.device.verify_snapshot(alias, read_data=read_data, trace=trace)
return result
| 34.800373 | 134 | 0.633785 | 15,311 | 0.820833 | 0 | 0 | 0 | 0 | 0 | 0 | 5,888 | 0.31566 |
5eb3c974fa92731f0fdca10f3d676b0f6e0ffd2f | 5,664 | py | Python | users/models.py | pnwclw/cyfmazyr | dfeca513c7334335426d226ec3834af598b08b8c | [
"MIT"
] | 1 | 2020-07-18T11:20:29.000Z | 2020-07-18T11:20:29.000Z | users/models.py | panwaclaw/cyfmazyr | dfeca513c7334335426d226ec3834af598b08b8c | [
"MIT"
] | 8 | 2020-05-24T14:08:12.000Z | 2021-09-08T02:03:52.000Z | users/models.py | pnwclw/cyfmazyr | dfeca513c7334335426d226ec3834af598b08b8c | [
"MIT"
] | 1 | 2020-05-24T12:24:40.000Z | 2020-05-24T12:24:40.000Z | from django.contrib.auth.models import AbstractUser
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext as _
from simple_history.models import HistoricalRecords
from phonenumber_field.modelfields import PhoneNumberField
from internals.models import School, University
from .templatetags.users import device, location
SEX_CHOICES = [
('male', _('Male')),
('female', _('Female'))
]
class Parent(models.Model):
class Meta:
verbose_name = _('Parent')
verbose_name_plural = _('Parents')
sex = models.CharField(max_length=128, choices=SEX_CHOICES)
last_name = models.CharField(max_length=128, verbose_name=_('Last Name'))
first_name = models.CharField(max_length=128, verbose_name=_('First Name'))
middle_name = models.CharField(max_length=128, null=True, blank=True, verbose_name=_('Middle Name'))
job = models.CharField(max_length=128, verbose_name=_('Job'))
phone_number = PhoneNumberField(verbose_name=_('Parent Phone Number'))
def __str__(self):
return f"{self.get_full_name()}, {self.job}, {self.phone_number}"
def get_full_name(self):
return f"{self.last_name} {self.first_name} {self.middle_name}"
class User(AbstractUser):
class Meta:
verbose_name = _('User')
verbose_name_plural = _('Users')
GROUP_CHOICES = [
('junior', _('Junior')),
('middle', _('Middle')),
('senior', _('Senior')),
]
def get_photo_upload_path(instance, filename):
ext = filename.split('.')[-1]
dt = timezone.now()
filename = f"{dt.year}-{dt.month:02d}-{dt.day:02d}-{dt.hour:02d}-{dt.minute:02d}-{dt.second:02d}.{ext}"
return f"profiles/{instance.id}/{filename}"
sex = models.CharField(max_length=128, choices=SEX_CHOICES, verbose_name=_('Sex'))
middle_name = models.CharField(max_length=128, null=True, blank=True, verbose_name=_('Middle Name'))
phone_number = PhoneNumberField(verbose_name=_('Phone Number'))
group = models.CharField(default='junior', max_length=20, choices=GROUP_CHOICES, verbose_name=_('Group'))
birthday = models.DateField(default=timezone.now, verbose_name=_('Birthday'))
parents = models.ManyToManyField(Parent)
school = models.ForeignKey(School, null=True, on_delete=models.SET_NULL, verbose_name=_('School'))
photo = models.ImageField(upload_to=get_photo_upload_path, null=True, blank=True, verbose_name=_('Profile Photo'))
klass = models.PositiveIntegerField(null=True, blank=True,
choices=[(i, str(i)) for i in range(1, 12)], verbose_name=_('Class'))
symbol = models.CharField(max_length=1, null=True, blank=True, verbose_name=_('Class Symbol'))
history = HistoricalRecords()
def get_full_name(self):
return f"{self.last_name} {self.first_name} {self.middle_name}"
def __str__(self):
return f"{self.get_full_name()} (ID: {self.id}, {self.phone_number})"
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, null=True, parent_link=True, verbose_name=_('User'))
telegram_id = models.IntegerField()
def __str__(self):
return self.user.get_full_name()
class Student(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, null=True, parent_link=True, verbose_name=_('User'))
university = models.ForeignKey(University, null=True, on_delete=models.SET_NULL, verbose_name=_('University'))
admission_year = models.PositiveIntegerField(verbose_name=_('Admission Year'))
class SessionManager(models.Manager):
use_in_migrations = True
def encode(self, session_dict):
"""
Returns the given session dictionary serialized and encoded as a string.
"""
return SessionStore().encode(session_dict)
def save(self, session_key, session_dict, expire_date):
s = self.model(session_key, self.encode(session_dict), expire_date)
if session_dict:
s.save()
else:
s.delete() # Clear sessions with no data.
return s
class Session(models.Model):
"""
Session objects containing user session information.
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
Additionally this session object providers the following properties:
``user``, ``user_agent`` and ``ip``.
"""
class Meta:
verbose_name = _('session')
verbose_name_plural = _('sessions')
session_key = models.CharField(_('session key'), max_length=40, primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expiry date'), db_index=True)
user = models.ForeignKey(User, null=True, on_delete=models.CASCADE)
user_agent = models.CharField(null=True, blank=True, max_length=200)
last_activity = models.DateTimeField(auto_now=True)
ip = models.GenericIPAddressField(null=True, blank=True, verbose_name='IP')
objects = SessionManager()
def __str__(self):
return f"Session {self.session_key} (User: {self.user}, Location: {location(self.ip)}, Device: {device(self.user_agent)}, Expires: {self.expire_date})"
def get_decoded(self):
return SessionStore(None, None).decode(self.session_data)
# At bottom to avoid circular import
from .backends.db import SessionStore # noqa: E402 isort:skip
| 38.27027 | 159 | 0.69827 | 5,094 | 0.899364 | 0 | 0 | 0 | 0 | 0 | 0 | 1,554 | 0.274364 |
5eb46db6b4f0d64982209cc3ae42fc847fe7c287 | 84 | py | Python | google_play_scraper/constants/google_play.py | mrommel/google-play-scraper | 3bfcef87bc754192162841bc057919762b8bf548 | [
"MIT"
] | null | null | null | google_play_scraper/constants/google_play.py | mrommel/google-play-scraper | 3bfcef87bc754192162841bc057919762b8bf548 | [
"MIT"
] | null | null | null | google_play_scraper/constants/google_play.py | mrommel/google-play-scraper | 3bfcef87bc754192162841bc057919762b8bf548 | [
"MIT"
] | null | null | null | from enum import Enum
class Sort(int, Enum):
NEWEST = 2
MOST_RELEVANT = 1
| 12 | 22 | 0.654762 | 59 | 0.702381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5eb4a2f00f9d8a891e52f77fa6e0a7cf21d67a41 | 1,282 | py | Python | pystruct/utils/graph.py | dugu9sword/pystruct | 07c1e02c62d138f79c2114b8c14ca7e2b65540b4 | [
"BSD-2-Clause"
] | null | null | null | pystruct/utils/graph.py | dugu9sword/pystruct | 07c1e02c62d138f79c2114b8c14ca7e2b65540b4 | [
"BSD-2-Clause"
] | null | null | null | pystruct/utils/graph.py | dugu9sword/pystruct | 07c1e02c62d138f79c2114b8c14ca7e2b65540b4 | [
"BSD-2-Clause"
] | null | null | null | import numpy as np
def make_grid_edges(x, neighborhood=4, return_lists=False):
if neighborhood not in [4, 8]:
raise ValueError("neighborhood can only be '4' or '8', got %s" %
repr(neighborhood))
inds = np.arange(x.shape[0] * x.shape[1]).reshape(x.shape[:2])
inds = inds.astype(np.int64)
right = np.c_[inds[:, :-1].ravel(), inds[:, 1:].ravel()]
down = np.c_[inds[:-1, :].ravel(), inds[1:, :].ravel()]
edges = [right, down]
if neighborhood == 8:
upright = np.c_[inds[1:, :-1].ravel(), inds[:-1, 1:].ravel()]
downright = np.c_[inds[:-1, :-1].ravel(), inds[1:, 1:].ravel()]
edges.extend([upright, downright])
if return_lists:
return edges
return np.vstack(edges)
def edge_list_to_features(edge_list):
edges = np.vstack(edge_list)
edge_features = np.zeros((edges.shape[0], 2))
edge_features[:len(edge_list[0]), 0] = 1
edge_features[len(edge_list[0]):, 1] = 1
return edge_features
def generate_binary_edges(length, window):
"""
[(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (0, 2), (1, 3), (2, 4), (3, 5)]
"""
edges = []
for w in range(1, window + 1):
for i in range(length - w):
edges.append((i, i + w))
return edges
| 32.871795 | 80 | 0.556942 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 137 | 0.106864 |
5eb5c4c0903c665a41afc0516f248dfc593530f7 | 1,939 | py | Python | Muscollo/Tests/plot_inverse_dynamics.py | chrisdembia/opensim-moco | ff67eaeae4d3f1a02d3a0dec78347a70802692d7 | [
"Apache-2.0"
] | 2 | 2020-05-16T11:13:13.000Z | 2020-11-17T09:20:39.000Z | Muscollo/Tests/plot_inverse_dynamics.py | chrisdembia/opensim-moco | ff67eaeae4d3f1a02d3a0dec78347a70802692d7 | [
"Apache-2.0"
] | null | null | null | Muscollo/Tests/plot_inverse_dynamics.py | chrisdembia/opensim-moco | ff67eaeae4d3f1a02d3a0dec78347a70802692d7 | [
"Apache-2.0"
] | 1 | 2020-07-23T22:24:26.000Z | 2020-07-23T22:24:26.000Z | # -------------------------------------------------------------------------- #
# OpenSim Muscollo: plot_inverse_dynamics.py #
# -------------------------------------------------------------------------- #
# Copyright (c) 2017 Stanford University and the Authors #
# #
# Author(s): Christopher Dembia #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain a #
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# -------------------------------------------------------------------------- #
import sys
import pylab as pl
import pandas as pd
if len(sys.argv) != 2:
raise Exception("Requires actual inverse dynamics csv file as argument.")
filtered = pd.read_csv('DEBUG_desiredMoments.csv', index_col=0, header=None)
actual = pd.read_csv(sys.argv[1], index_col=0, skiprows=3)
fig = pl.figure()
num_columns = len(filtered.columns)
for i in range(num_columns):
ax = fig.add_subplot(num_columns, 1, i + 1)
ax.plot(filtered.index, filtered[filtered.columns[i]], label='filtered')
ax.plot(actual.index, actual[actual.columns[i]], label='actual')
pl.legend()
pl.show() | 49.717949 | 78 | 0.489943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,426 | 0.735431 |
5eb5ffe69b5c83729920cda7555dd6bc8ad0aa83 | 6,485 | py | Python | xd/xview3/vessel_length/dataset/__init__.py | smly/xview3-kohei-solution | f6933ff437240c6c07fd61c3bd4290b639d17531 | [
"MIT"
] | 2 | 2022-01-14T08:00:34.000Z | 2022-01-17T12:42:44.000Z | xd/xview3/vessel_length/dataset/__init__.py | smly/xview3-kohei-solution | f6933ff437240c6c07fd61c3bd4290b639d17531 | [
"MIT"
] | null | null | null | xd/xview3/vessel_length/dataset/__init__.py | smly/xview3-kohei-solution | f6933ff437240c6c07fd61c3bd4290b639d17531 | [
"MIT"
] | 1 | 2022-01-31T21:25:21.000Z | 2022-01-31T21:25:21.000Z | from dataclasses import dataclass
from pathlib import Path
import cv2
import numpy as np
import pandas as pd
import torch
from omegaconf import DictConfig
from torch.utils.data import DataLoader, Dataset
from xd.utils.configs import dynamic_load
@dataclass
class XView3DataSource:
train_csv: Path = Path("data/input/xview3/train.csv")
train_shore_dir: Path = Path("data/input/xview3/train")
validation_csv: Path = Path("data/input/xview3/validation.csv")
validation_shore_dir: Path = Path("data/input/xview3/validation")
@dataclass
class XView3PreprocessedV2:
train_image_dir: Path = Path(
"data/working/xview3/preprocess_vh_vv_bathymetry_v2/train"
)
train_csv: Path = Path(
"data/working/xview3/preprocess_vh_vv_bathymetry_v2/train.csv"
)
validation_image_dir: Path = Path(
"data/working/xview3/preprocess_vh_vv_bathymetry_v2/validation"
)
validation_mask_dir: Path = Path(
"data/working/xview3/preprocess_vh_vv_bathymetry_v2/validation_masks"
)
validation_csv: Path = Path(
"data/working/xview3/preprocess_vh_vv_bathymetry_v2/validation.csv"
)
validation_kfold_csv: Path = Path(
"data/working/xview3/preprocess_vh_vv_bathymetry_v2/validation_kfold.csv" # noqa
)
def get_dataloaders(args, conf):
ds = XView3VesselLengthDataset(conf, fold=args.fold, is_test=False)
dl = DataLoader(
ds, num_workers=32, batch_size=64, drop_last=True, shuffle=True
) # noqa
val_ds = XView3VesselLengthDataset(conf, fold=args.fold, is_test=True)
val_dl = DataLoader(
val_ds, num_workers=32, batch_size=64, drop_last=False, shuffle=False
)
dataloaders = {}
dataloaders["train"] = dl
dataloaders["val"] = val_dl
return dataloaders
class XView3VesselLengthDataset(Dataset):
def __init__(self, conf: DictConfig, fold: int = 0, is_test: bool = False):
self.crop_size = conf.dataset.crop_size
self.chip_image_size = conf.dataset.chip_image_size
self.length_upper = conf.dataset.length_upper # 95 percentile point
self.length_lower = conf.dataset.length_lower # 5 percentile point
assert self.crop_size % 2 == 0
self.kfold_csv = XView3PreprocessedV2().validation_kfold_csv
self.train_csv = XView3DataSource().train_csv
self.train_imdir = XView3PreprocessedV2().train_image_dir
self.val_imdir = XView3PreprocessedV2().validation_image_dir
self.is_test = is_test
if is_test:
self.df = self.load_valtst_scaled_vessel_length(fold=fold)
else:
self.df = self.load_train_val_scaled_vessel_length(fold=fold)
self.train_transform, self.test_transform = dynamic_load(
conf.train.augmentation.func
)(**conf.train.augmentation.kwargs)
def __len__(self):
return len(self.df)
def decode_vessel_length(self, vals: np.ndarray):
return np.expm1(vals) + self.length_lower
def load_valtst_scaled_vessel_length(self, fold: int = 0):
dfv = pd.read_csv(self.kfold_csv)
dfv = dfv[~dfv["vessel_length_m"].isna()][
[
"vessel_length_m",
# "is_vessel",
# "is_fishing",
"scene_id",
"detect_scene_row",
"detect_scene_column",
"fold_idx",
]
]
dfv["vessel_length_log1p_encoded"] = np.log1p(
np.minimum(
np.maximum(dfv["vessel_length_m"], self.length_lower),
self.length_upper,
) # noqa
- self.length_lower
)
dfv = dfv[dfv["fold_idx"] == fold]
return dfv
def load_train_val_scaled_vessel_length(self, fold: int = 0):
df = pd.read_csv(self.train_csv)
df = df[~df["vessel_length_m"].isna()][
[
"vessel_length_m",
# "is_vessel",
# "is_fishing",
"scene_id",
"detect_scene_row",
"detect_scene_column",
]
]
dfv = pd.read_csv(self.kfold_csv)
dfv = dfv[~dfv["vessel_length_m"].isna()][
[
"vessel_length_m",
# "is_vessel",
# "is_fishing",
"scene_id",
"detect_scene_row",
"detect_scene_column",
"fold_idx",
]
]
dfv = dfv[dfv["fold_idx"] != fold]
df = pd.concat([df, dfv])
# Rescale
df["vessel_length_log1p_encoded"] = np.log1p(
np.minimum(
np.maximum(df["vessel_length_m"], self.length_lower),
self.length_upper,
) # noqa
- self.length_lower
)
return df
def __getitem__(self, index):
r = self.df.iloc[index]
filename = "{}_{}_{}.png".format(
r["scene_id"],
r["detect_scene_row"] // self.chip_image_size,
r["detect_scene_column"] // self.chip_image_size,
)
yc = r["detect_scene_row"] % self.chip_image_size
xc = r["detect_scene_column"] % self.chip_image_size
if np.isnan(r["fold_idx"]):
im_orig = cv2.imread(str(self.train_imdir / filename))
else:
im_orig = cv2.imread(str(self.val_imdir / filename))
im_crop = np.zeros((self.crop_size, self.crop_size, 3), dtype=np.uint8)
d = int(self.crop_size / 2)
y0, y1, x0, x1 = yc - d, yc + d, xc - d, xc + d
top, left, bottom, right = 0, 0, self.crop_size, self.crop_size
if yc - d < 0:
top = d - yc
y0 = 0
if xc - d < 0:
left = d - xc
x0 = 0
if yc + d > self.chip_image_size:
bottom = self.chip_image_size - d - yc
y1 = self.chip_image_size
if xc + d > self.chip_image_size:
right = self.chip_image_size - d - xc
x1 = self.chip_image_size
im_crop[top:bottom, left:right] = im_orig[y0:y1, x0:x1]
if self.is_test:
im_crop = self.test_transform(image=im_crop)["image"]
else:
im_crop = self.train_transform(image=im_crop)["image"]
im_crop = torch.from_numpy(im_crop.transpose((2, 0, 1))).float()
return im_crop, torch.from_numpy(
np.array([r["vessel_length_log1p_encoded"]])
) # noqa
| 33.086735 | 89 | 0.59522 | 5,699 | 0.878797 | 0 | 0 | 1,029 | 0.158674 | 0 | 0 | 1,225 | 0.188897 |
5eb6ed393de918f8c3120b183de9a52d1c9d90da | 216 | py | Python | master/scripts/paths.py | OPU-Surveillance-System/monitoring | 2c2c657c74fce9a5938d986372f9077708617d9c | [
"MIT"
] | 4 | 2020-12-24T11:51:28.000Z | 2022-02-08T09:02:38.000Z | master/scripts/paths.py | OPU-Surveillance-System/monitoring | 2c2c657c74fce9a5938d986372f9077708617d9c | [
"MIT"
] | 1 | 2021-11-16T02:54:35.000Z | 2021-11-16T02:54:35.000Z | master/scripts/paths.py | OPU-Surveillance-System/monitoring | 2c2c657c74fce9a5938d986372f9077708617d9c | [
"MIT"
] | null | null | null | """
Define the environment paths
"""
#Path variables
TEMPLATE_PATH = "/home/scom/documents/opu_surveillance_system/monitoring/master/"
STATIC_PATH = "/home/scom/documents/opu_surveillance_system/monitoring/static/"
| 27 | 81 | 0.805556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 181 | 0.837963 |
5eb703c2f5ca89811146d4e9b20de24a3405a5d5 | 3,181 | py | Python | tests/strategies/test_local_strategy.py | gijswobben/customs | 72c0d071fe35ed84eb6d6371eb651edcd13a1044 | [
"MIT"
] | null | null | null | tests/strategies/test_local_strategy.py | gijswobben/customs | 72c0d071fe35ed84eb6d6371eb651edcd13a1044 | [
"MIT"
] | null | null | null | tests/strategies/test_local_strategy.py | gijswobben/customs | 72c0d071fe35ed84eb6d6371eb651edcd13a1044 | [
"MIT"
] | null | null | null | from flask.globals import request
import pytest
from flask import Flask
from typing import Dict
from customs import Customs
from customs.exceptions import UnauthorizedException
from customs.strategies import LocalStrategy
def test_local_strategy_initialization_without_customs():
class Local(LocalStrategy):
def get_or_create_user(self, user: Dict) -> Dict:
return super().get_or_create_user(user)
def validate_credentials(self, username: str, password: str) -> Dict:
return super().validate_credentials(username, password)
with pytest.warns(UserWarning):
print(Customs.get_instance())
strategy = Local()
assert strategy.name == "local"
def test_local_strategy_initialization_with_customs():
class Local(LocalStrategy):
def get_or_create_user(self, user: Dict) -> Dict:
return super().get_or_create_user(user)
def validate_credentials(self, username: str, password: str) -> Dict:
return super().validate_credentials(username, password)
# Create customs
app = Flask("TESTS")
app.secret_key = "630738a8-3b13-4311-8018-87554d6f7e85"
Customs(app)
# Create the strategy
strategy = Local()
assert strategy.name == "local"
# Cleanup of the Customs object used for testing
Customs.remove_instance()
def test_local_strategy_extract_crendentials():
class Local(LocalStrategy):
def get_or_create_user(self, user: Dict) -> Dict:
return super().get_or_create_user(user)
def validate_credentials(self, username: str, password: str) -> Dict:
return super().validate_credentials(username, password)
# Create customs
app = Flask("TESTS")
app.secret_key = "630738a8-3b13-4311-8018-87554d6f7e85"
Customs(app)
# Create the strategy
strategy = Local()
with app.test_request_context("/?test=123", json={"bla": "bla"}):
credentials = strategy.extract_credentials(request)
assert credentials == {}
with app.test_request_context("/?username=test&password=test"):
credentials = strategy.extract_credentials(request)
assert "username" in credentials
assert "password" in credentials
# Cleanup of the Customs object used for testing
Customs.remove_instance()
def test_local_strategy_authenticate():
class Local(LocalStrategy):
def get_or_create_user(self, user: Dict) -> Dict:
return super().get_or_create_user(user)
def validate_credentials(self, username: str, password: str) -> Dict:
return {}
# Create customs
app = Flask("TESTS")
app.secret_key = "630738a8-3b13-4311-8018-87554d6f7e85"
Customs(app)
# Create the strategy
strategy = Local()
with app.test_request_context("/?test=123", json={"bla": "bla"}):
with pytest.raises(UnauthorizedException):
user = strategy.authenticate(request)
with app.test_request_context("/?username=test&password=test"):
user = strategy.authenticate(request)
assert user == {}
# Cleanup of the Customs object used for testing
Customs.remove_instance()
| 30.586538 | 77 | 0.688463 | 1,090 | 0.34266 | 0 | 0 | 0 | 0 | 0 | 0 | 530 | 0.166614 |
5eb7f47546bca179adc2ac8b071991f097ead1c9 | 3,105 | py | Python | package/spack-omega-h/package.py | ctuning/ck-spack | 307934efce1be2d4f104251275c82fbc70127105 | [
"BSD-3-Clause"
] | 1 | 2018-07-17T07:45:09.000Z | 2018-07-17T07:45:09.000Z | package/spack-omega-h/package.py | ctuning/ck-spack | 307934efce1be2d4f104251275c82fbc70127105 | [
"BSD-3-Clause"
] | null | null | null | package/spack-omega-h/package.py | ctuning/ck-spack | 307934efce1be2d4f104251275c82fbc70127105 | [
"BSD-3-Clause"
] | null | null | null | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class OmegaH(CMakePackage):
"""Omega_h is a C++11 library providing data structures and algorithms
for adaptive discretizations. Its specialty is anisotropic triangle and
tetrahedral mesh adaptation. It runs efficiently on most modern HPC
hardware including GPUs.
"""
homepage = "https://github.com/ibaned/omega_h"
url = "https://github.com/ibaned/omega_h/archive/v9.13.4.tar.gz"
version('9.13.4', '035f9986ec07ad97ae0aa1e171872307')
variant('shared', default=True, description='Build shared libraries')
variant('mpi', default=True, description='Activates MPI support')
variant('zlib', default=True, description='Activates ZLib support')
depends_on('mpi', when='+mpi')
depends_on('zlib', when='+zlib')
def cmake_args(self):
args = ['-DUSE_XSDK_DEFAULTS:BOOL=ON']
if '+shared' in self.spec:
args.append('-DBUILD_SHARED_LIBS:BOOL=ON')
else:
args.append('-DBUILD_SHARED_LIBS:BOOL=OFF')
if '+mpi' in self.spec:
args.append('-DOmega_h_USE_MPI:BOOL=ON')
args.append('-DCMAKE_CXX_COMPILER:FILEPATH={0}'.format(
self.spec['mpi'].mpicxx))
else:
args.append('-DOmega_h_USE_MPI:BOOL=OFF')
if '+zlib' in self.spec:
args.append('-DTPL_ENABLE_ZLIB:BOOL=ON')
args.append('-DTPL_ZLIB_INCLUDE_DIRS:STRING={0}'.format(
self.spec['zlib'].prefix.include))
args.append('-DTPL_ZLIB_LIBRARIES:STRING={0}'.format(
self.spec['zlib'].libs))
else:
args.append('-DTPL_ENABLE_ZLIB:BOOL=OFF')
return args
def flag_handler(self, name, flags):
flags = list(flags)
if name == 'cxxflags':
flags.append(self.compiler.cxx11_flag)
return (None, None, flags)
| 41.959459 | 78 | 0.644767 | 1,847 | 0.594847 | 0 | 0 | 0 | 0 | 0 | 0 | 2,066 | 0.665378 |
5eb834e28210f09f415768f60fec78d4bb5c4bb0 | 984 | py | Python | core/data.py | mohamadnosratian/Pricer | a19fddc526add55385abd9569c0d3bdbd0cb79c4 | [
"MIT"
] | null | null | null | core/data.py | mohamadnosratian/Pricer | a19fddc526add55385abd9569c0d3bdbd0cb79c4 | [
"MIT"
] | 1 | 2020-11-29T15:26:47.000Z | 2020-11-29T15:26:47.000Z | core/data.py | mohamadnosratian/Pricer | a19fddc526add55385abd9569c0d3bdbd0cb79c4 | [
"MIT"
] | null | null | null | import TmConv
import time
class Data():
def __init__(self, content):
self.content = content
self.corrent = '-'
self.object = {}
def update(self):
Cp, name = self.content.Update()
if Cp != self.corrent:
self.corrent = Cp
self.object = {
"name": name,
"corrent": self.corrent,
"date": self.time_date(),
}
def time_date(self):
[y, m, d] = TmConv.gregorian_to_jalali(
time.localtime().tm_year, time.localtime().tm_mon, time.localtime().tm_mday)
h = time.localtime().tm_hour
_m = time.localtime().tm_min
s = time.localtime().tm_sec
return [y, m, d, h, _m, s]
def rearange(self, f):
st = ''
for i in range(0, len(f), 3):
holder = f[i:i+3]
st += holder + "," if i < len(f) - 3 else holder
return st
def arange(self, f):
return int(f.replace(',', ''))
| 24.6 | 88 | 0.505081 | 955 | 0.970528 | 0 | 0 | 0 | 0 | 0 | 0 | 34 | 0.034553 |
5eb8db949bc6779860bde61b3f9a676998243338 | 8,002 | py | Python | src/sage/categories/super_modules.py | LaisRast/sage | 5fb2a6ea44400e469caee82748cf863ca0c5f724 | [
"BSL-1.0"
] | null | null | null | src/sage/categories/super_modules.py | LaisRast/sage | 5fb2a6ea44400e469caee82748cf863ca0c5f724 | [
"BSL-1.0"
] | null | null | null | src/sage/categories/super_modules.py | LaisRast/sage | 5fb2a6ea44400e469caee82748cf863ca0c5f724 | [
"BSL-1.0"
] | null | null | null | r"""
Super modules
"""
#*****************************************************************************
# Copyright (C) 2015 Travis Scrimshaw <tscrim at ucdavis.edu>
#
# Distributed under the terms of the GNU General Public License (GPL)
# http://www.gnu.org/licenses/
#******************************************************************************
from sage.categories.category_types import Category_over_base_ring
from sage.categories.covariant_functorial_construction import CovariantConstructionCategory
# Note, a commutative algebra is not a commutative super algebra,
# therefore the following whitelist.
axiom_whitelist = frozenset(["Facade", "Finite", "Infinite",
"FiniteDimensional", "Connected", "WithBasis",
"FinitelyGeneratedAsLambdaBracketAlgebra",
# "Commutative", "Cocommutative",
"Supercommutative", "Supercocommutative",
"Associative", "Inverse", "Unital", "Division",
"AdditiveCommutative", "AdditiveAssociative",
"AdditiveInverse", "AdditiveUnital",
"NoZeroDivisors", "Distributive"])
class SuperModulesCategory(CovariantConstructionCategory, Category_over_base_ring):
@classmethod
def default_super_categories(cls, category, *args):
"""
Return the default super categories of `F_{Cat}(A,B,...)` for
`A,B,...` parents in `Cat`.
INPUT:
- ``cls`` -- the category class for the functor `F`
- ``category`` -- a category `Cat`
- ``*args`` -- further arguments for the functor
OUTPUT:
A join category.
This implements the property that subcategories constructed by
the set of whitelisted axioms is a subcategory.
EXAMPLES::
sage: HopfAlgebras(ZZ).WithBasis().FiniteDimensional().Super() # indirect doctest
Category of finite dimensional super hopf algebras with basis over Integer Ring
"""
axioms = axiom_whitelist.intersection(category.axioms())
C = super(SuperModulesCategory, cls).default_super_categories(category, *args)
return C._with_axioms(axioms)
def __init__(self, base_category):
"""
EXAMPLES::
sage: C = Algebras(QQ).Super()
sage: C
Category of super algebras over Rational Field
sage: C.base_category()
Category of algebras over Rational Field
sage: sorted(C.super_categories(), key=str)
[Category of graded algebras over Rational Field,
Category of super modules over Rational Field]
sage: AlgebrasWithBasis(QQ).Super().base_ring()
Rational Field
sage: HopfAlgebrasWithBasis(QQ).Super().base_ring()
Rational Field
"""
super(SuperModulesCategory, self).__init__(base_category, base_category.base_ring())
_functor_category = "Super"
def _repr_object_names(self):
"""
EXAMPLES::
sage: AlgebrasWithBasis(QQ).Super() # indirect doctest
Category of super algebras with basis over Rational Field
"""
return "super {}".format(self.base_category()._repr_object_names())
class SuperModules(SuperModulesCategory):
r"""
The category of super modules.
An `R`-*super module* (where `R` is a ring) is an `R`-module `M` equipped
with a decomposition `M = M_0 \oplus M_1` into two `R`-submodules
`M_0` and `M_1` (called the *even part* and the *odd part* of `M`,
respectively).
Thus, an `R`-super module automatically becomes a `\ZZ / 2 \ZZ`-graded
`R`-module, with `M_0` being the degree-`0` component and `M_1` being the
degree-`1` component.
EXAMPLES::
sage: Modules(ZZ).Super()
Category of super modules over Integer Ring
sage: Modules(ZZ).Super().super_categories()
[Category of graded modules over Integer Ring]
The category of super modules defines the super structure which
shall be preserved by morphisms::
sage: Modules(ZZ).Super().additional_structure()
Category of super modules over Integer Ring
TESTS::
sage: TestSuite(Modules(ZZ).Super()).run()
"""
def super_categories(self):
"""
EXAMPLES::
sage: Modules(ZZ).Super().super_categories()
[Category of graded modules over Integer Ring]
Nota bene::
sage: Modules(QQ).Super()
Category of super modules over Rational Field
sage: Modules(QQ).Super().super_categories()
[Category of graded modules over Rational Field]
"""
return [self.base_category().Graded()]
def extra_super_categories(self):
r"""
Adds :class:`VectorSpaces` to the super categories of ``self`` if
the base ring is a field.
EXAMPLES::
sage: Modules(QQ).Super().extra_super_categories()
[Category of vector spaces over Rational Field]
sage: Modules(ZZ).Super().extra_super_categories()
[]
This makes sure that ``Modules(QQ).Super()`` returns an
instance of :class:`SuperModules` and not a join category of
an instance of this class and of ``VectorSpaces(QQ)``::
sage: type(Modules(QQ).Super())
<class 'sage.categories.super_modules.SuperModules_with_category'>
.. TODO::
Get rid of this workaround once there is a more systematic
approach for the alias ``Modules(QQ)`` -> ``VectorSpaces(QQ)``.
Probably the latter should be a category with axiom, and
covariant constructions should play well with axioms.
"""
from sage.categories.modules import Modules
from sage.categories.fields import Fields
base_ring = self.base_ring()
if base_ring in Fields():
return [Modules(base_ring)]
else:
return []
class ParentMethods:
pass
class ElementMethods:
def is_even_odd(self):
"""
Return ``0`` if ``self`` is an even element or ``1``
if an odd element.
.. NOTE::
The default implementation assumes that the even/odd is
determined by the parity of :meth:`degree`.
Overwrite this method if the even/odd behavior is desired
to be independent.
EXAMPLES::
sage: cat = Algebras(QQ).WithBasis().Super()
sage: C = CombinatorialFreeModule(QQ, Partitions(), category=cat)
sage: C.degree_on_basis = sum
sage: C.basis()[2,2,1].is_even_odd()
1
sage: C.basis()[2,2].is_even_odd()
0
"""
return self.degree() % 2
def is_even(self):
"""
Return if ``self`` is an even element.
EXAMPLES::
sage: cat = Algebras(QQ).WithBasis().Super()
sage: C = CombinatorialFreeModule(QQ, Partitions(), category=cat)
sage: C.degree_on_basis = sum
sage: C.basis()[2,2,1].is_even()
False
sage: C.basis()[2,2].is_even()
True
"""
return self.is_even_odd() == 0
def is_odd(self):
"""
Return if ``self`` is an odd element.
EXAMPLES::
sage: cat = Algebras(QQ).WithBasis().Super()
sage: C = CombinatorialFreeModule(QQ, Partitions(), category=cat)
sage: C.degree_on_basis = sum
sage: C.basis()[2,2,1].is_odd()
True
sage: C.basis()[2,2].is_odd()
False
"""
return self.is_even_odd() == 1
| 35.40708 | 93 | 0.567733 | 6,741 | 0.842414 | 0 | 0 | 943 | 0.117846 | 0 | 0 | 6,149 | 0.768433 |
5eb917289cf2645c455634d8b05490fcdaf7e3cd | 2,616 | py | Python | emissor/processing/api.py | cltl/GMRCAnnotation | cc4c7f0c9cbbce0eb6c7dee4d39d128f91b85839 | [
"MIT"
] | null | null | null | emissor/processing/api.py | cltl/GMRCAnnotation | cc4c7f0c9cbbce0eb6c7dee4d39d128f91b85839 | [
"MIT"
] | 18 | 2021-01-12T15:18:07.000Z | 2021-03-23T12:30:57.000Z | emissor/processing/api.py | cltl/EMISSOR | 68504c859c36b1b65b2c0002d065028b3d5b5d08 | [
"MIT"
] | null | null | null | from abc import ABC
from typing import Iterable, Any, Tuple, Mapping
from emissor.persistence import ScenarioStorage
from emissor.persistence.persistence import ScenarioController
from emissor.representation.scenario import Signal, Modality
class DataPreprocessor(ABC):
def preprocess(self):
raise NotImplementedError("")
@property
def name(self) -> str:
return self.__class__.__name__
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
class ScenarioInitializer(ABC):
def initialize_scenario(self, scenario_id: str, storage: ScenarioStorage):
raise NotImplementedError("")
def initialize_modality(self, scenario: ScenarioController, modality: Modality):
raise NotImplementedError("")
@property
def name(self) -> str:
return self.__class__.__name__
@property
def parallel(self) -> bool:
return False
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def scenario_key(self, storage: ScenarioStorage) -> Any:
return None
class SignalProcessor(ABC):
def process_signal(self, scenario: ScenarioController, signal: Signal):
raise NotImplementedError("")
def process_signals(self, scenario: ScenarioController, signals: Mapping[Modality, Iterable[Signal]]):
for modality, signals in signals.items():
for signal in signals:
self.process_signal(scenario, signal)
def process_scenario(self, scenario: ScenarioController):
scenario.load_signals(self.modalities)
self.process_signals(scenario, scenario.signals)
@property
def name(self) -> str:
return self.__class__.__name__
@property
def parallel(self) -> bool:
return False
@property
def modalities(self) -> Tuple[Modality]:
return ()
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def scenario_key(self, storage: ScenarioStorage) -> Any:
return None
def signal_key(self, storage: ScenarioStorage) -> Any:
return lambda signal: signal.time.start
class ProcessorPlugin:
def create_preprocessor(self) -> DataPreprocessor:
return None
def create_initializer(self) -> ScenarioInitializer:
return None
def create_processors(self) -> Iterable[SignalProcessor]:
return []
@property
def name(self) -> str:
return self.__class__.__name__
@property
def priority(self) -> int:
return 0
| 24.679245 | 106 | 0.675076 | 2,360 | 0.902141 | 0 | 0 | 553 | 0.211391 | 0 | 0 | 8 | 0.003058 |
5eb921e1f305cdd4c49679c44d5fbbbb4958bfcf | 336 | py | Python | pseudocode/scratch_code.py | AndrewBeers/DifferentialPrivacy_For_Quotes | db63d9b9d533f37b9f2c546ffef1d4dd883cd3cb | [
"MIT"
] | null | null | null | pseudocode/scratch_code.py | AndrewBeers/DifferentialPrivacy_For_Quotes | db63d9b9d533f37b9f2c546ffef1d4dd883cd3cb | [
"MIT"
] | 2 | 2021-09-08T01:38:38.000Z | 2022-01-13T02:11:04.000Z | pseudocode/scratch_code.py | AndrewBeers/DifferentialPrivacy_For_Quotes | db63d9b9d533f37b9f2c546ffef1d4dd883cd3cb | [
"MIT"
] | null | null | null | from colour import Color
from pprint import pprint
def test_function():
red = Color("red")
green = Color("green")
colors = list(red.range_to(green, 20))
# pprint(colors)
# pprint(dir(colors[0]))
for color in colors:
print(color.get_hex())
return
if __name__ == '__main__':
test_function() | 16 | 42 | 0.627976 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 62 | 0.184524 |
5eb9b284f901406c6cbd4aa345e318f7d639ee36 | 2,779 | py | Python | src/ros_libav/common.py | ORANKWON/ros_libav | d8d1417d0dd422db717de593718df240316b77ed | [
"MIT"
] | null | null | null | src/ros_libav/common.py | ORANKWON/ros_libav | d8d1417d0dd422db717de593718df240316b77ed | [
"MIT"
] | null | null | null | src/ros_libav/common.py | ORANKWON/ros_libav | d8d1417d0dd422db717de593718df240316b77ed | [
"MIT"
] | 1 | 2019-03-02T04:41:36.000Z | 2019-03-02T04:41:36.000Z | # @Author: Jose Rojas
# @Date: 2018-07-10T08:20:17-07:00
# @Email: jrojas@redlinesolutions.co
# @Project: ros-libav-node
# @Last modified by: jrojas
# @Last modified time: 2018-07-13T17:26:29-07:00
# @License: MIT License
# @Copyright: Copyright @ 2018, Jose Rojas
import rospy
import av
import av.filter
class Node(object):
EVENT_START = 1
EVENT_END = 2
def __init__(self, type):
self.crop = self.get_param('libav_{}_crop'.format(type), None)
self.vflip = self.get_param('libav_{}_flip_v'.format(type), None)
self.hflip = self.get_param('libav_{}_flip_h'.format(type), None)
self.filter_chain = []
self.filter_graph = None
def get_param(self, name, default=None):
local = rospy.get_param("~{}".format(name), None)
parent = rospy.get_param("{}".format(name), None)
glb = rospy.get_param("/{}".format(name), None)
if local is not None:
return local
if parent is not None:
return parent
if glb is not None:
return glb
return default
def pixel_encoding(self, format):
encoding = None
if format == 'rgb8':
encoding = 'rgb24'
elif format == 'bgr8':
encoding = 'bgr24'
return encoding
def initialize_filter_graph(self, stream, format):
crop = self.crop
vflip = self.vflip
hflip = self.hflip
graph = av.filter.Graph()
fchain = []
ow = iw = stream.width
oh = ih = stream.height
if crop is not None or vflip is not None or hflip is not None:
fchain.append(graph.add_buffer(width=iw, height=ih, format=format))
if crop is not None:
fchain.append(graph.add("crop", crop))
fchain[-2].link_to(fchain[-1])
# scale video
box = crop.split(":")
rospy.logwarn('cropping to box {}'.format(box))
ow = iw = box[0]
oh = ih = box[1]
if vflip:
fchain.append(graph.add("vflip"))
fchain[-2].link_to(fchain[-1])
rospy.loginfo('flipping vertically')
if hflip:
fchain.append(graph.add("hflip"))
fchain[-2].link_to(fchain[-1])
rospy.loginfo('flipping horizontally')
if len(fchain) > 0:
fchain.append(graph.add("buffersink")) # graph must end with buffersink...?
fchain[-2].link_to(fchain[-1])
self.filter_chain = fchain
self.filter_graph = graph
return ow, oh
def process_filter_chain(self, frame):
if len(self.filter_chain) > 0:
self.filter_chain[0].push(frame)
frame = self.filter_chain[-1].pull()
return frame
| 28.357143 | 88 | 0.570349 | 2,466 | 0.88737 | 0 | 0 | 0 | 0 | 0 | 0 | 499 | 0.179561 |
5eba6763e45275c05c6ea71d177bcb0b20fdb6c3 | 266 | py | Python | zeri_di_funzioni/stima_ordine.py | Ro0t-set/metodi-numerici | d586bce78f94f1aa6736218c9123064a57cb87ec | [
"MIT"
] | 5 | 2021-05-21T12:22:18.000Z | 2021-07-02T16:48:32.000Z | zeri_di_funzioni/stima_ordine.py | AlePioggia/metodi-numerici | a231f4a857074d811eb230b1114545f05fabe65f | [
"MIT"
] | null | null | null | zeri_di_funzioni/stima_ordine.py | AlePioggia/metodi-numerici | a231f4a857074d811eb230b1114545f05fabe65f | [
"MIT"
] | 2 | 2021-06-22T10:20:58.000Z | 2021-07-02T19:58:24.000Z | # -*- coding: utf-8 -*-
"""
Metodo per la stima dell'ordine
"""
import numpy as np
def stima_ordine(xks,num_iterazioni):
k = num_iterazioni - 3
return np.log(abs(xks[k+2]-xks[k+3])/abs(xks[k+1]-xks[k+2])) / np.log(abs(xks[k+1]-xks[k+2])/abs(xks[k]-xks[k+1])) | 33.25 | 118 | 0.62406 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 62 | 0.233083 |
5eba75a29b860145b0023e5e394f13be95f75dca | 539 | py | Python | src/game_objects/projectiles/projectile.py | ozcer/Project-Ooze | 28eb84995f4fa283366e3f04edb7e393d5281ac5 | [
"MIT"
] | 1 | 2018-10-10T02:11:50.000Z | 2018-10-10T02:11:50.000Z | src/game_objects/projectiles/projectile.py | ozcer/Project-Ooze | 28eb84995f4fa283366e3f04edb7e393d5281ac5 | [
"MIT"
] | 29 | 2018-03-16T05:07:18.000Z | 2018-04-03T03:58:32.000Z | src/game_objects/projectiles/projectile.py | ozcer/FlaPy-Bird | 28eb84995f4fa283366e3f04edb7e393d5281ac5 | [
"MIT"
] | 1 | 2018-03-18T00:27:12.000Z | 2018-03-18T00:27:12.000Z | import pygame
from src.game_objects.dynamic import Dynamic
from src.game_objects.foes.foe import Foe
class Projectile(Dynamic):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def decayable(self):
"""
Overriding decayable in GameObject
:return: bool
"""
active_zone = self.game.surface.get_rect()
return not active_zone.colliderect(self.rect)
def draw(self):
super().draw()
def update(self):
super().update()
| 21.56 | 53 | 0.612245 | 434 | 0.805195 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.148423 |
5ebac3e5f4a5272b4168613de726fdb25fcb20e1 | 1,773 | py | Python | API/app/getLatLng.py | EdmundoSanchezM/Trobify | 08733546737cd8baf2537eb00dcf37b1f6445a8f | [
"MIT"
] | null | null | null | API/app/getLatLng.py | EdmundoSanchezM/Trobify | 08733546737cd8baf2537eb00dcf37b1f6445a8f | [
"MIT"
] | null | null | null | API/app/getLatLng.py | EdmundoSanchezM/Trobify | 08733546737cd8baf2537eb00dcf37b1f6445a8f | [
"MIT"
] | null | null | null | import requests
import urllib.parse
from bs4 import BeautifulSoup
from geopy.geocoders import Nominatim
def parseCoord (coord):
start = 0
for i in range(0, len(coord)):
if coord[i].isnumeric () or coord[i] == '-' :
start = i
break
coordAsStr = coord[start: - 1].split (", ")
coords = (float (coordAsStr[0]), float (coordAsStr[1]))
return coords
def parseAddr (address):
A = address
B = A.split (',')
finale = str ()
for i in B:
finale = finale + i.strip () + ", "
return finale[:-3]
def getAddress (url):
response = requests.get (url)
soup = BeautifulSoup(response.text, 'lxml')
address = str ()
highFives = soup.find_all ('h5', attrs={'class':'card-title'})
for i in highFives:
if i.text == 'Ubicación':
address = i.next_sibling.text
break
address = parseAddr (address)
try:
coord = soup.find('img', attrs={'class':'img-static-map'})['onclick']
coord = parseCoord (coord)
return (address, coord)
except:
try:
print (address, ": No coordinates found, will attempt to find coordinates through address...")
geolocator = Nominatim(user_agent="trobify")
location = geolocator.geocode(address)
coord = (float (location.latitude), float (location.longitude))
print (address, ": Coordinates found")
return (address, coord)
except:
print (address, ": Couldn't find coordinates, entry will be ignored")
return None
addr = getAddress ("https://century21mexico.com/propiedad/402980_casa-en-venta-en-bosque-de-echegaray-naucalpan-estado-de-mexico-mexico")
print (addr)
print (addr[1][0]) | 30.568966 | 137 | 0.598985 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 368 | 0.207441 |
5ebb524fc20cdce67043f7ce16122bebdb4e6bf2 | 390 | py | Python | python/echoserver.py | vesche/snippets | 7a9d598df99c26c4e0c63669f9f95a94eeed0d08 | [
"Unlicense"
] | 7 | 2016-01-03T19:42:07.000Z | 2018-10-23T14:03:12.000Z | python/echoserver.py | vesche/snippets | 7a9d598df99c26c4e0c63669f9f95a94eeed0d08 | [
"Unlicense"
] | null | null | null | python/echoserver.py | vesche/snippets | 7a9d598df99c26c4e0c63669f9f95a94eeed0d08 | [
"Unlicense"
] | 1 | 2018-03-09T08:52:01.000Z | 2018-03-09T08:52:01.000Z | #!/usr/bin/env python
import socket
import sys
if len(sys.argv) == 2:
port = int(sys.argv[1])
else:
print('Usage: ./echoserver.py <port>')
sys.exit(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('127.0.0.1', port))
s.listen(1)
while 1:
client, address = s.accept()
data = client.recv(1024)
if data:
client.send(data)
client.close()
| 17.727273 | 53 | 0.623077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 63 | 0.161538 |
5ebb91cb5e4e0255ed257a4d8c5da3f46959b083 | 1,416 | py | Python | Py Apple Dynamics V7.3 SRC/PA-Dynamics V7.3/PA_ATTITUDE.py | musen142/py-apple-dynamics | 95f831ecf9c9167e9709c63deabc989eda6bf669 | [
"Apache-2.0"
] | 1 | 2022-01-18T11:47:29.000Z | 2022-01-18T11:47:29.000Z | Py Apple Dynamics V7.3 SRC/PA-Dynamics V7.3/PA_ATTITUDE.py | musen142/py-apple-dynamics | 95f831ecf9c9167e9709c63deabc989eda6bf669 | [
"Apache-2.0"
] | null | null | null | Py Apple Dynamics V7.3 SRC/PA-Dynamics V7.3/PA_ATTITUDE.py | musen142/py-apple-dynamics | 95f831ecf9c9167e9709c63deabc989eda6bf669 | [
"Apache-2.0"
] | null | null | null | from math import sin,cos,pi
def cal_ges(PIT,ROL,l,b,w,x,Hc):
YA=0
P=PIT*pi/180
R=ROL*pi/180
Y=YA*pi/180
#腿1
ABl_x=l/2 - x -(l*cos(P)*cos(Y))/2 + (b*cos(P)*sin(Y))/2
ABl_y=w/2 - (b*(cos(R)*cos(Y) + sin(P)*sin(R)*sin(Y)))/2 - (l*(cos(R)*sin(Y) - cos(Y)*sin(P)*sin(R)))/2
ABl_z= - Hc - (b*(cos(Y)*sin(R) - cos(R)*sin(P)*sin(Y)))/2 - (l*(sin(R)*sin(Y) + cos(R)*cos(Y)*sin(P)))/2
#腿2
AB2_x=l/2 - x - (l*cos(P)*cos(Y))/2 - (b*cos(P)*sin(Y))/2
AB2_y=(b*(cos(R)*cos(Y) + sin(P)*sin(R)*sin(Y)))/2 - w/2 - (l*(cos(R)*sin(Y) - cos(Y)*sin(P)*sin(R)))/2
AB2_z=(b*(cos(Y)*sin(R) - cos(R)*sin(P)*sin(Y)))/2 - Hc - (l*(sin(R)*sin(Y) + cos(R)*cos(Y)*sin(P)))/2
#腿3
AB3_x=(l*cos(P)*cos(Y))/2 - x - l/2 + (b*cos(P)*sin(Y))/2
AB3_y=w/2 - (b*(cos(R)*cos(Y) + sin(P)*sin(R)*sin(Y)))/2 + (l*(cos(R)*sin(Y) - cos(Y)*sin(P)*sin(R)))/2
AB3_z=(l*(sin(R)*sin(Y) + cos(R)*cos(Y)*sin(P)))/2 - (b*(cos(Y)*sin(R) - cos(R)*sin(P)*sin(Y)))/2 - Hc
#腿4
AB4_x=(l*cos(P)*cos(Y))/2 - x - l/2 - (b*cos(P)*sin(Y))/2
AB4_y=(b*(cos(R)*cos(Y) + sin(P)*sin(R)*sin(Y)))/2 - w/2 + (l*(cos(R)*sin(Y) - cos(Y)*sin(P)*sin(R)))/2
AB4_z=(b*(cos(Y)*sin(R) - cos(R)*sin(P)*sin(Y)))/2 - Hc + (l*(sin(R)*sin(Y) + cos(R)*cos(Y)*sin(P)))/2
x1=ABl_x
y1=ABl_z
x2=AB2_x
y2=AB2_z
x3=AB4_x
y3=AB4_z
x4=AB3_x
y4=AB3_z
return x1,x2,x3,x4,y1,y2,y3,y4
| 32.930233 | 109 | 0.47952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 0.014045 |
5ebc7941277bb454426f173139c1ec78075f5c4f | 7,477 | py | Python | src/tau_clients/parsers.py | vmware-samples/tau-clients | d7d07885dec9a1e44a5acf76ee62a33064629ab0 | [
"BSD-2-Clause"
] | 2 | 2021-07-27T12:02:28.000Z | 2021-12-30T13:47:23.000Z | src/tau_clients/parsers.py | vmware-samples/tau-clients | d7d07885dec9a1e44a5acf76ee62a33064629ab0 | [
"BSD-2-Clause"
] | 4 | 2021-07-27T12:03:05.000Z | 2022-02-11T13:55:05.000Z | src/tau_clients/parsers.py | vmware-samples/tau-clients | d7d07885dec9a1e44a5acf76ee62a33064629ab0 | [
"BSD-2-Clause"
] | null | null | null | # Copyright 2021 VMware, Inc.
# SPDX-License-Identifier: BSD-2
import ipaddress
from typing import Any
from typing import Dict
from typing import Optional
from urllib import parse
import tau_clients
try:
import pymisp
except ImportError:
raise ImportError("This module requires 'pymisp' to be installed.") from None
class ResultParserMISP:
"""This is a parser to extract *basic* information from a result dictionary."""
# Max number of objects to be created to avoid slowness
MAX_SAME_TYPE_OBJ_CREATION = 30
@staticmethod
def validate_hostname(input_str: str) -> str:
"""
Validate a hostname.
:param str input_str: the host name
:return: the validated hostname
:raises ValueError: in case of any validation errors
"""
# skip null values
if not input_str:
raise ValueError
# remove explicit roots
if input_str[-1] == ".":
input_str = input_str[:-1]
# skip reverse resolutions
if "in-addr.arpa" in input_str:
raise ValueError
# skip hostnames
if "." not in input_str:
raise ValueError
# skip IP addresses
try:
_ = ipaddress.ip_address(input_str)
except ValueError:
pass
else:
raise ValueError("IP addresses are not allowed")
return input_str
def __init__(self, techniques_galaxy: Optional[Dict[str, str]] = None):
"""Constructor."""
self.techniques_galaxy = techniques_galaxy or {}
def parse(self, analysis_link: str, result: Dict[str, Any]) -> pymisp.MISPEvent:
"""
Parse the analysis result into a MISP event.
:param str analysis_link: the analysis link
:param dict[str, any] result: the JSON returned by the analysis client.
:rtype: pymisp.MISPEvent
:return: a MISP event
"""
misp_event = pymisp.MISPEvent()
# Add analysis subject info
if "url" in result["analysis_subject"]:
o = pymisp.MISPObject("url")
o.add_attribute("url", result["analysis_subject"]["url"])
else:
o = pymisp.MISPObject("file")
o.add_attribute("md5", type="md5", value=result["analysis_subject"]["md5"])
o.add_attribute("sha1", type="sha1", value=result["analysis_subject"]["sha1"])
o.add_attribute("sha256", type="sha256", value=result["analysis_subject"]["sha256"])
o.add_attribute(
"mimetype",
category="Payload delivery",
type="mime-type",
value=result["analysis_subject"]["mime_type"],
)
misp_event.add_object(o)
# Add HTTP requests from url analyses
network_dict = result.get("report", {}).get("analysis", {}).get("network", {})
obj_count = 0
for request in network_dict.get("requests", []):
if not request["url"] or not request["ip"]:
continue
parsed_uri = parse.urlparse(request["url"])
o = pymisp.MISPObject(name="http-request")
o.add_attribute("host", parsed_uri.netloc)
o.add_attribute("method", "GET")
o.add_attribute("uri", request["url"])
o.add_attribute("ip-dst", request["ip"])
misp_event.add_object(o)
obj_count += 1
if obj_count > self.MAX_SAME_TYPE_OBJ_CREATION:
break
# Add network behaviors from files
for subject in result.get("report", {}).get("analysis_subjects", []):
# Add DNS requests
obj_count = 0
for dns_query in subject.get("dns_queries", []):
try:
hostname = self.validate_hostname(dns_query.get("hostname"))
except ValueError:
continue
o = pymisp.MISPObject(name="domain-ip")
o.add_attribute("hostname", type="hostname", value=hostname)
for ip in dns_query.get("results", []):
o.add_attribute("ip", type="ip-dst", value=ip)
misp_event.add_object(o)
obj_count += 1
if obj_count > self.MAX_SAME_TYPE_OBJ_CREATION:
break
# Add HTTP conversations (as network connection and as http request)
obj_count = 0
for http_conv in subject.get("http_conversations", []):
o = pymisp.MISPObject(name="network-connection")
o.add_attribute("ip-src", http_conv["src_ip"])
o.add_attribute("ip-dst", http_conv["dst_ip"])
o.add_attribute("src-port", http_conv["src_port"])
o.add_attribute("dst-port", http_conv["dst_port"])
o.add_attribute("hostname-dst", http_conv["dst_host"])
o.add_attribute("layer3-protocol", "IP")
o.add_attribute("layer4-protocol", "TCP")
o.add_attribute("layer7-protocol", "HTTP")
misp_event.add_object(o)
method, path = http_conv["url"].split(" ")[:2]
if http_conv["dst_port"] == 80:
uri = f"http://{http_conv['dst_host']}{path}"
else:
uri = f"http://{http_conv['dst_host']}:{http_conv['dst_port']}{path}"
o = pymisp.MISPObject(name="http-request")
o.add_attribute("host", http_conv["dst_host"])
o.add_attribute("method", method)
o.add_attribute("uri", uri)
o.add_attribute("ip-dst", http_conv["dst_ip"])
misp_event.add_object(o)
obj_count += 2
if obj_count > self.MAX_SAME_TYPE_OBJ_CREATION:
break
# Add sandbox info like score and sandbox type
o = pymisp.MISPObject(name="sandbox-report")
sandbox_type = "saas" if tau_clients.is_task_link_hosted(analysis_link) else "on-premise"
o.add_attribute("score", result["score"])
o.add_attribute("sandbox-type", sandbox_type)
o.add_attribute("{}-sandbox".format(sandbox_type), "vmware-nsx-defender")
o.add_attribute("permalink", analysis_link)
misp_event.add_object(o)
# Add behaviors
if result.get("malicious_activity", []):
o = pymisp.MISPObject(name="sb-signature")
o.add_attribute("software", "VMware NSX Defender")
for activity in result.get("malicious_activity"):
a = pymisp.MISPAttribute()
a.from_dict(type="text", value=activity)
o.add_attribute("signature", **a)
misp_event.add_object(o)
# Add mitre techniques
for techniques in result.get("activity_to_mitre_techniques", {}).values():
for technique in techniques:
for misp_technique_id, misp_technique_name in self.techniques_galaxy.items():
if technique["id"].casefold() in misp_technique_id.casefold():
# If report details a sub-technique, trust the match
# Otherwise trust it only if the MISP technique is not a sub-technique
if "." in technique["id"] or "." not in misp_technique_id:
misp_event.add_tag(misp_technique_name)
break
return misp_event
| 41.77095 | 97 | 0.57269 | 7,148 | 0.955998 | 0 | 0 | 872 | 0.116624 | 0 | 0 | 2,338 | 0.312692 |
5ebca2415c32be73b3b846d60b82069b036022ff | 12,398 | py | Python | HSI_Data_Preparation.py | fjpsxh/CNN_HSIC_MRF | 83f3202b512e3cdee1d0555593a55c6d0d6aa4fb | [
"MIT"
] | 56 | 2017-05-07T18:08:35.000Z | 2021-08-31T08:57:56.000Z | HSI_Data_Preparation.py | zoeyingz/CNN_HSIC_MRF | 83f3202b512e3cdee1d0555593a55c6d0d6aa4fb | [
"MIT"
] | 2 | 2018-05-25T11:14:47.000Z | 2018-06-14T06:12:27.000Z | HSI_Data_Preparation.py | zoeyingz/CNN_HSIC_MRF | 83f3202b512e3cdee1d0555593a55c6d0d6aa4fb | [
"MIT"
] | 22 | 2017-10-25T02:32:20.000Z | 2021-08-31T08:58:01.000Z | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 18 16:21:13 2017
@author: Xiangyong Cao
This code is modified based on https://github.com/KGPML/Hyperspectral
"""
import scipy.io
import numpy as np
from random import shuffle
import random
import scipy.ndimage
from skimage.util import pad
import os
import time
import pandas as pd
from utils import patch_size,convertToOneHot
import math
from sklearn.decomposition import PCA
flag_augment = True # true if adopt data-augmentation strategy
start_time = time.time()
## Load data
DATA_PATH = os.path.join(os.getcwd(),"Data")
Data = scipy.io.loadmat(os.path.join(DATA_PATH, 'Indian_pines.mat'))['indian_pines']
Label = scipy.io.loadmat(os.path.join(DATA_PATH, 'Indian_pines_gt.mat'))['indian_pines_gt']
#Data = scipy.io.loadmat(os.path.join(DATA_PATH, 'PaviaU.mat'))['paviaU']
#Label = scipy.io.loadmat(os.path.join(DATA_PATH, 'PaviaU_gt.mat'))['paviaU_gt']
#Data = scipy.io.loadmat(os.path.join(DATA_PATH, 'Simu_data.mat'))['Simu_data']
#Label = scipy.io.loadmat(os.path.join(DATA_PATH, 'Simu_label.mat'))['Simu_label']
## Some constant parameters
Height, Width, Band = Data.shape[0], Data.shape[1], Data.shape[2]
Num_Classes = len(np.unique(Label))-1 # Simu: len(np.unique(Label))
## Scale the HSI Data between [0,1]
Data = Data.astype(float)
for band in range(Band):
Data[:,:,band] = (Data[:,:,band]-np.min(Data[:,:,band]))/(np.max(Data[:,:,band])-np.min(Data[:,:,band]))
#data_all = Data.transpose(2,0,1).transpose(0,2,1).reshape(Band,-1).transpose(1,0)
#n_components = 40
#pca = PCA(n_components=n_components)
#data_all = pca.fit_transform(data_all)
#Data = data_all.transpose(1,0).reshape(n_components,Width,Height).transpose(0,2,1).transpose(1,2,0)
## padding the data beforehand
Height, Width, Band = Data.shape[0], Data.shape[1], Data.shape[2]
Data_Padding = np.zeros((Height+int(patch_size-1),Width+int(patch_size-1),Band))
for band in range(Band):
Data_Padding[:,:,band] = pad(Data[:,:,band],int((patch_size-1)/2),'symmetric')
def Patch(height_index,width_index):
""" function to extract patches from the orignal data """
transpose_array = np.transpose(Data_Padding,(2,0,1))
height_slice = slice(height_index, height_index + patch_size)
width_slice = slice(width_index, width_index + patch_size)
patch = transpose_array[:,height_slice, width_slice]
return np.array(patch)
## Collect patches for each class
Classes,Classes_Index = [],[]
for k in range(Num_Classes):
Classes.append([])
Classes_Index.append([])
All_Patches, All_Labels = [],[]
for j in range(0,Width):
for i in range(0,Height):
curr_inp = Patch(i,j)
curr_ind = j * Height + i
curr_tar = Label[i,j]
All_Patches.append(curr_inp)
All_Labels.append(curr_tar)
#Ignore patches with unknown landcover type for the central pixel
if(curr_tar!=0):
Classes[curr_tar-1].append(curr_inp)
Classes_Index[curr_tar-1].append(curr_ind)
All_data = {}
All_data['patch'] = All_Patches
All_data['labels'] = All_Labels
Num_Each_Class=[]
for k in range(Num_Classes):
Num_Each_Class.append(len(Classes[k]))
def DataDivide(Classes_k,Num_Train_Each_Class_k,Classes_Index_k):
""" function to divide collected patches into training and test patches """
np.random.seed(0)
idx = np.random.choice(len(Classes_k), Num_Train_Each_Class_k, replace=False)
train_patch = [Classes_k[i] for i in idx]
train_index = [Classes_Index_k[i] for i in idx]
idx_test = np.setdiff1d(range(len(Classes_k)),idx)
test_patch = [Classes_k[i] for i in idx_test]
test_index = [Classes_Index_k[i] for i in idx_test]
return train_patch, test_patch, train_index, test_index
## Make a train and test split
# method 1: fixed proportion
train_frac = 0.05
Num_Train_Each_Class = [math.ceil(train_frac*x) for x in Num_Each_Class]
Num_Train_Each_Class = [int(x) for x in Num_Train_Each_Class]
# method 2: a fixed number for each class
#Num_Train_Each_Class = [40] * Num_Classes
Num_Test_Each_Class = list(np.array(Num_Each_Class) - np.array(Num_Train_Each_Class))
Train_Patch, Train_Label, Test_Patch, Test_Label = [],[],[],[]
TestIndex, TrainIndex = [], []
for k in range(Num_Classes):
train_patch, test_patch, train_index, test_index = DataDivide(Classes[k],Num_Train_Each_Class[k],
Classes_Index[k])
TestIndex.extend(test_index)
TrainIndex.extend(train_index)
#Make training and test splits
Train_Patch.append(train_patch) # patches_of_current_class[:-test_split_size]
Test_Patch.extend(test_patch) # patches_of_current_class[-test_split_size:]
Test_Label.extend(np.full(Num_Test_Each_Class[k], k, dtype=int))
Train_Label = []
for k in range(Num_Classes):
Train_Label.append([k]*Num_Train_Each_Class[k])
Resample_Num_Count = Num_Train_Each_Class
# Augment the data with random flipped and rotated patches
fixed_Train_Patch = Train_Patch
if flag_augment:
Resample_Num_Count = []
times = 10 # can be tuned
for k in range(Num_Classes):
for l in range(times*Num_Train_Each_Class[k]):
if(len(Train_Patch[k])<times*Num_Train_Each_Class[k]):
num = random.randint(0,3)
j = random.randint(0,Num_Train_Each_Class[k]-1)
if num == 0 :
#Flip patch up-down
flipped_patch = np.flipud(fixed_Train_Patch[k][j])
if num == 1 :
#Flip patch left-right
flipped_patch = np.fliplr(fixed_Train_Patch[k][j])
if num == 2 :
#add gaussian noise
flipped_patch = Train_Patch[k][j] + np.random.normal(0,0.01,size = fixed_Train_Patch[k][j].shape)
if num == 3 :
#Rotate patch by a random angle
no = random.randrange(-180,180,30)
flipped_patch = scipy.ndimage.interpolation.rotate(fixed_Train_Patch[k][j], no,axes=(1, 0),
reshape=False, output=None, order=3, mode='constant', cval=0.0, prefilter=False)
Train_Patch[k].append(flipped_patch)
Train_Label[k].append(k)
Resample_Num_Count.append(len(Train_Patch[k]))
OS_Aug_Num_Training_Each = []
for k in range(Num_Classes):
OS_Aug_Num_Training_Each.append(len(Train_Label[k]))
if flag_augment == False:
OS_Aug_Num_Training_Each = Resample_Num_Count
# release list to elements
Temp1,Temp2 = [],[]
for k in range(Num_Classes):
Temp1.extend(Train_Patch[k])
Temp2.extend(Train_Label[k])
Train_Patch = Temp1
Train_Label = Temp2
Train_Patch = np.array(Train_Patch)
# Convert the labels to One-Hot vector
Train_Label = np.array(Train_Label)
Test_Label = np.array(Test_Label)
Train_Label = convertToOneHot(Train_Label,num_classes=Num_Classes)
Test_Label = convertToOneHot(Test_Label,num_classes=Num_Classes)
## Save the patches in segments
# Train Data
train_dict = {}
num_train = len(Train_Patch)
num_train_file = 10
num_each_file = int(num_train / num_train_file)
res_num = num_train - num_train_file * num_each_file
Num_Each_File = num_each_file * np.ones((1,num_train_file),dtype=int)
Num_Each_File = Num_Each_File[0]
Num_Each_File[num_train_file-1] = Num_Each_File[num_train_file-1] + res_num
start = 0
for i in range(num_train_file):
file_name = 'Train_'+str(patch_size)+'_'+str(i+1)+'.mat'
train_dict["train_patch"] = Train_Patch[start:start+Num_Each_File[i]]
train_dict["train_labels"] = Train_Label[start:start+Num_Each_File[i],:]
scipy.io.savemat(os.path.join(DATA_PATH, file_name),train_dict)
start = start + Num_Each_File[i]
# Test Data
test_dict = {}
num_test = len(Test_Patch)
num_test_file = 10
num_each_file = int(num_test / num_test_file)
res_num = num_test - num_test_file * num_each_file
Num_Each_File = num_each_file * np.ones((1,num_test_file),dtype=int)
Num_Each_File = Num_Each_File[0]
Num_Each_File[num_test_file-1] = Num_Each_File[num_test_file-1] + res_num
start = 0
for i in range(num_test_file):
file_name = 'Test_'+str(patch_size)+'_'+str(i+1)+'.mat'
test_dict["test_patch"] = Test_Patch[start:start+Num_Each_File[i]]
test_dict["test_labels"] = Test_Label[start:start+Num_Each_File[i],:]
scipy.io.savemat(os.path.join(DATA_PATH, file_name),test_dict)
start += Num_Each_File[i]
def add_DataSet(first,second,data_type):
if data_type == 'train':
temp_image = np.concatenate((first['train_patch'],second['train_patch']),axis=0)
temp_labels = np.concatenate((first['train_labels'],second['train_labels']),axis=0)
Combined_data = {}
Combined_data['train_patch'] = temp_image
Combined_data['train_labels'] = temp_labels
if data_type == 'test':
temp_image = np.concatenate((first['test_patch'],second['test_patch']),axis=0)
temp_labels = np.concatenate((first['test_labels'],second['test_labels']),axis=0)
Combined_data = {}
Combined_data['test_patch'] = temp_image
Combined_data['test_labels'] = temp_labels
return Combined_data
def Prepare_data():
""" functions to prepare Training and Testing data"""
for i in range(num_train_file):
file_name = 'Train_'+str(patch_size)+'_'+str(i+1)+'.mat'
data_sets = scipy.io.loadmat(os.path.join(DATA_PATH, file_name))
if(i==0):
Training_data = data_sets
continue
else:
Training_data = add_DataSet(Training_data,data_sets,'train')
for i in range(num_test_file):
file_name = 'Test_'+str(patch_size)+'_'+str(i+1)+'.mat'
data_sets = scipy.io.loadmat(os.path.join(DATA_PATH, file_name))
if(i==0):
Test_data = data_sets
continue
else:
Test_data = add_DataSet(Test_data,data_sets,'test')
return Training_data, Test_data
Training_data, Test_data = Prepare_data()
print('Training Data:')
print(Training_data['train_patch'].shape)
print('Test Data:')
print(Test_data['test_patch'].shape)
# Data Summary
df = pd.DataFrame(np.random.randn(Num_Classes, 4),
columns=['Total', 'Training', 'OS&Aug', 'Testing'])
df['Total'] = Num_Each_Class
df['Training'] = Num_Train_Each_Class
df['OS&Aug'] = OS_Aug_Num_Training_Each
df['Testing'] = Num_Test_Each_Class
print("=======================================================================")
print("Data Summary")
print("=======================================================================")
print('The size of the original HSI data is (%d,%d,%d)'%(Height,Width,Band))
print('The size of Training data is (%d)'%(num_train))
print('The size of Test data is (%d)'%(num_test))
print('The size of each sample is (%d,%d,%d)'%(Band,patch_size,patch_size))
print('-----------------------------------------------------------------------')
print("The Data Division is")
print(df)
duration_time = time.time() - start_time
print("=======================================================================")
print('Data Preparation is Completed! (It takes %.5f seconds)'%(duration_time))
print("=======================================================================")
def load_index_data():
data_path = os.getcwd()
train_index = scipy.io.loadmat(os.path.join(data_path, 'TrainIndex.mat'))['TrainIndex']
test_index = scipy.io.loadmat(os.path.join(data_path, 'TestIndex.mat'))['TestIndex']
train_index = train_index[0]
test_index = test_index[0]
TrainData = {}
TrainData['train_patch'] = np.array([All_data['patch'][i] for i in train_index])
TrainLabel = [All_data['labels'][i] for i in train_index]
TrainLabel = np.array(TrainLabel)
TrainLabel = convertToOneHot(TrainLabel-1,num_classes=Num_Classes)
TrainData['train_labels'] = TrainLabel
TestData = {}
TestData['test_patch'] = np.array([All_data['patch'][i] for i in test_index])
TestLabel = [All_data['labels'][i] for i in test_index]
TestLabel = np.array(TestLabel)
TestLabel = convertToOneHot(TestLabel-1,num_classes=Num_Classes)
TestData['test_labels'] = TestLabel
return TrainData, TestData, train_index, test_index | 40.917492 | 118 | 0.660106 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,081 | 0.248508 |
5ebd6ec4df2b4d6b96a69a2fe794a4cefe53fc90 | 1,253 | py | Python | fedml_api/model/linear/har/encoders.py | ziqi-zhang/FedML | 83cfc1245609c50f21d13be5f180f67576c57735 | [
"Apache-2.0"
] | null | null | null | fedml_api/model/linear/har/encoders.py | ziqi-zhang/FedML | 83cfc1245609c50f21d13be5f180f67576c57735 | [
"Apache-2.0"
] | null | null | null | fedml_api/model/linear/har/encoders.py | ziqi-zhang/FedML | 83cfc1245609c50f21d13be5f180f67576c57735 | [
"Apache-2.0"
] | null | null | null |
import torch
from torch import nn
from pdb import set_trace as st
def get_encoder(model_type):
model_type = model_type.lower().capitalize()
return eval("{}".format(model_type))
class Cnn1(nn.Module):
def __init__(self, data_size, n_classes):
"""
"""
super(Cnn1, self).__init__()
self.n_chan = data_size[0]
self.n_classes = n_classes
# Convolutional Layers
self.conv1 = nn.Conv1d(self.n_chan, 32, kernel_size=3, stride=1)
self.conv2 = nn.Conv1d(32, 32, kernel_size=3, stride=1)
self.drop = nn.Dropout(p=0.5)
self.pool = nn.MaxPool1d(kernel_size=2,stride=2)
# Fully connected layers
self.lin3 = nn.Linear(1984, 100)
self.lin4 = nn.Linear(100, self.n_classes)
self.softmax = nn.Softmax(dim=1)
def forward(self, x):
batch_size = x.size(0)
# Convolutional layers with ReLu activations
a = torch.relu(self.conv1(x))
a = torch.relu(self.conv2(a))
a = self.drop(a)
a = self.pool(a)
#Fully connected layers
a = a.view((batch_size, -1))
a = self.lin3(a)
a = self.drop(a)
a = self.lin4(a)
a = self.softmax(a)
return a
| 26.659574 | 72 | 0.584198 | 1,063 | 0.848364 | 0 | 0 | 0 | 0 | 0 | 0 | 132 | 0.105347 |
5ebf381a379c6a121f8f5413969634f97d6a2668 | 263 | py | Python | validadores/regras/valor.py | mayronceccon/olist-python-labs-project | 217188212158ba2401866e06173d762e346ebeee | [
"MIT"
] | null | null | null | validadores/regras/valor.py | mayronceccon/olist-python-labs-project | 217188212158ba2401866e06173d762e346ebeee | [
"MIT"
] | null | null | null | validadores/regras/valor.py | mayronceccon/olist-python-labs-project | 217188212158ba2401866e06173d762e346ebeee | [
"MIT"
] | null | null | null | from .regra import Regra
class Valor(Regra):
def __init__(self, valor):
self.__valor = float(valor)
def is_valid(self):
if type(self.__valor) is not float and type(self.__valor) is not int:
raise Exception('Valor inválido')
| 23.909091 | 77 | 0.65019 | 236 | 0.893939 | 0 | 0 | 0 | 0 | 0 | 0 | 17 | 0.064394 |
5ebfcba39af614fbdd7ab0a691121807d8fb70ac | 4,075 | py | Python | src/python/pants/backend/core/tasks/scm_publish.py | hythloday/pants | 107e9b0957f6949ac4bd535fbef8d2d8cba05c5c | [
"Apache-2.0"
] | 11 | 2015-01-20T01:39:41.000Z | 2019-08-08T07:27:44.000Z | src/python/pants/backend/core/tasks/scm_publish.py | hythloday/pants | 107e9b0957f6949ac4bd535fbef8d2d8cba05c5c | [
"Apache-2.0"
] | 1 | 2016-03-15T20:35:18.000Z | 2016-03-15T20:35:18.000Z | src/python/pants/backend/core/tasks/scm_publish.py | fakeNetflix/square-repo-pants | 28a018c7f47900aec4f576c81a52e0e4b41d9fec | [
"Apache-2.0"
] | 5 | 2015-03-30T02:46:53.000Z | 2018-03-08T20:10:43.000Z | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from abc import abstractmethod
import re
from pants.base.exceptions import TaskError
class Version(object):
@staticmethod
def parse(version):
"""Attempts to parse the given string as Semver, then falls back to Namedver."""
try:
return Semver.parse(version)
except ValueError:
return Namedver.parse(version)
@abstractmethod
def version(self):
"""Returns the string representation of this Version."""
class Namedver(Version):
_VALID_NAME = re.compile('^[-_A-Za-z0-9]+$')
@classmethod
def parse(cls, version):
# must not contain whitespace
if not cls._VALID_NAME.match(version):
raise ValueError("Named versions must be alphanumeric: '{0}'".format(version))
# must not be valid semver
try:
Semver.parse(version)
except ValueError:
return Namedver(version)
else:
raise ValueError("Named versions must not be valid semantic versions: '{0}'".format(version))
def __init__(self, version):
self._version = version
def version(self):
return self._version
def __eq__(self, other):
return self._version == other._version
def __cmp__(self, other):
raise ValueError("{0} is not comparable to {1}".format(self, other))
def __repr__(self):
return 'Namedver({0})'.format(self.version())
class Semver(Version):
@staticmethod
def parse(version):
components = version.split('.', 3)
if len(components) != 3:
raise ValueError
major, minor, patch = components
def to_i(component):
try:
return int(component)
except (TypeError, ValueError):
raise ValueError('Invalid revision component %s in %s - '
'must be an integer' % (component, version))
return Semver(to_i(major), to_i(minor), to_i(patch))
def __init__(self, major, minor, patch, snapshot=False):
self.major = major
self.minor = minor
self.patch = patch
self.snapshot = snapshot
def bump(self):
# A bump of a snapshot discards snapshot status
return Semver(self.major, self.minor, self.patch + 1)
def make_snapshot(self):
return Semver(self.major, self.minor, self.patch, snapshot=True)
def version(self):
return '%s.%s.%s' % (
self.major,
self.minor,
('%s-SNAPSHOT' % self.patch) if self.snapshot else self.patch
)
def __eq__(self, other):
return self.__cmp__(other) == 0
def __cmp__(self, other):
diff = self.major - other.major
if not diff:
diff = self.minor - other.minor
if not diff:
diff = self.patch - other.patch
if not diff:
if self.snapshot and not other.snapshot:
diff = 1
elif not self.snapshot and other.snapshot:
diff = -1
else:
diff = 0
return diff
def __repr__(self):
return 'Semver(%s)' % self.version()
class ScmPublish(object):
def __init__(self, scm, restrict_push_branches):
self.restrict_push_branches = frozenset(restrict_push_branches or ())
self.scm = scm
def check_clean_master(self, commit=False):
if commit:
if self.restrict_push_branches:
branch = self.scm.branch_name
if branch not in self.restrict_push_branches:
raise TaskError('Can only push from %s, currently on branch: %s' % (
' '.join(sorted(self.restrict_push_branches)), branch
))
changed_files = self.scm.changed_files()
if changed_files:
raise TaskError('Can only push from a clean branch, found : %s' % ' '.join(changed_files))
else:
print('Skipping check for a clean %s in test mode.' % self.scm.branch_name)
def commit_pushdb(self, coordinates):
self.scm.commit('pants build committing publish data for push of %s' % coordinates)
| 29.316547 | 99 | 0.659387 | 3,678 | 0.902577 | 0 | 0 | 1,215 | 0.29816 | 0 | 0 | 843 | 0.206871 |
5ec056e8a3fe7a1dd0d7eedbbca11575fb19f604 | 122 | py | Python | abc/abc204/b/main.py | tonko2/AtCoder | 5d617072517881d226d7c8af09cb88684d41af7e | [
"Xnet",
"X11",
"CECILL-B"
] | 2 | 2022-01-22T07:56:58.000Z | 2022-01-24T00:29:37.000Z | abc/abc204/b/main.py | tonko2/AtCoder | 5d617072517881d226d7c8af09cb88684d41af7e | [
"Xnet",
"X11",
"CECILL-B"
] | null | null | null | abc/abc204/b/main.py | tonko2/AtCoder | 5d617072517881d226d7c8af09cb88684d41af7e | [
"Xnet",
"X11",
"CECILL-B"
] | null | null | null | N = int(input())
A = list(map(int, input().split()))
ans = 0
for a in A:
if a > 10:
ans += a - 10
print(ans) | 13.555556 | 35 | 0.491803 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5ec20deb24c94cf9ee316404a6b77dc4dd8b8ff6 | 842 | py | Python | students/K33401/laboratory_works/Egorov_Michil/laboratory_work_2/room/models.py | EgorovM/ITMO_ICT_WebDevelopment_2021-2022 | 35c41ba024d7a3cd89654bd4db23f7d447e0f0a2 | [
"MIT"
] | null | null | null | students/K33401/laboratory_works/Egorov_Michil/laboratory_work_2/room/models.py | EgorovM/ITMO_ICT_WebDevelopment_2021-2022 | 35c41ba024d7a3cd89654bd4db23f7d447e0f0a2 | [
"MIT"
] | null | null | null | students/K33401/laboratory_works/Egorov_Michil/laboratory_work_2/room/models.py | EgorovM/ITMO_ICT_WebDevelopment_2021-2022 | 35c41ba024d7a3cd89654bd4db23f7d447e0f0a2 | [
"MIT"
] | null | null | null | from django.db import models
class Room(models.Model):
SUBJECTS = (
('math', 'Математика'),
('inf', 'Информатика'),
('othr', 'Другое')
)
SUBJECTS_COLOR = (
('math', '#28a745'),
('inf', '#007bff'),
('othr', '#6c757d')
)
name = models.CharField(max_length=32)
subject = models.CharField(max_length=4, choices=SUBJECTS)
description = models.TextField()
creator = models.CharField(max_length=162)
max_people = models.IntegerField(default=5)
audio_works = models.BooleanField(default=False)
pub_date = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
def subject_name(self):
return dict(self.SUBJECTS)[self.subject]
def subject_color(self):
return dict(self.SUBJECTS_COLOR)[self.subject]
| 24.764706 | 62 | 0.627078 | 837 | 0.963176 | 0 | 0 | 0 | 0 | 0 | 0 | 121 | 0.139241 |
5ec2acbce4a69ac08d74be58cb9979e47b281c7d | 412 | py | Python | pytalk/obsolete/message.py | henryz2004/pytalk | 76d84985ddb1e049cc4db2d5d11b52c49927164e | [
"MIT"
] | 3 | 2018-02-27T00:47:38.000Z | 2021-06-04T20:50:13.000Z | pytalk/obsolete/message.py | henryz2004/pytalk | 76d84985ddb1e049cc4db2d5d11b52c49927164e | [
"MIT"
] | null | null | null | pytalk/obsolete/message.py | henryz2004/pytalk | 76d84985ddb1e049cc4db2d5d11b52c49927164e | [
"MIT"
] | null | null | null | from pynet import socket_utility
class Message:
"""
Stores a string message and whether or not it is a notification (NTF) or plain message (MSG)
"""
def __init__(self, message):
self.message = message
def prepare(self, max_bytes=None):
return socket_utility.prepare(self.message, max_bytes=max_bytes)
def __repr__(self):
return self.message
| 22.888889 | 97 | 0.650485 | 372 | 0.902913 | 0 | 0 | 0 | 0 | 0 | 0 | 110 | 0.26699 |
5ec47955f1ecf4b81bd290a76d77ba6d233be01c | 1,181 | py | Python | backend/keplerapi/api/migrations/0010_auto_20200627_2003.py | ADSPI/kepler | db45471e3428eea93579e48f130f255a1f5974c4 | [
"MIT"
] | 2 | 2020-02-16T15:23:21.000Z | 2020-03-07T12:39:56.000Z | backend/keplerapi/api/migrations/0010_auto_20200627_2003.py | creativepisystem/kepler | 307708666a1913fbb369a57b5fca04b20209929d | [
"MIT"
] | null | null | null | backend/keplerapi/api/migrations/0010_auto_20200627_2003.py | creativepisystem/kepler | 307708666a1913fbb369a57b5fca04b20209929d | [
"MIT"
] | 1 | 2020-05-13T00:07:18.000Z | 2020-05-13T00:07:18.000Z | # Generated by Django 3.0.3 on 2020-06-27 20:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0009_auto_20200625_2347'),
]
operations = [
migrations.AlterModelOptions(
name='hiredservice',
options={'ordering': ['-created_at']},
),
migrations.AlterModelOptions(
name='person',
options={'ordering': ['-created_at']},
),
migrations.AlterModelOptions(
name='service',
options={'ordering': ['-created_at']},
),
migrations.AddField(
model_name='hiredservice',
name='accepted_at',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='hiredservice',
name='finished_at',
field=models.DateTimeField(null=True),
),
migrations.AlterField(
model_name='interests',
name='interest',
field=models.CharField(choices=[('1', 'EXEMPLO1'), ('2', 'EXEMPLO2'), ('3', 'EXEMPLO3'), ('0', 'OTHER')], max_length=2),
),
]
| 28.804878 | 132 | 0.54022 | 1,088 | 0.921253 | 0 | 0 | 0 | 0 | 0 | 0 | 301 | 0.254869 |
5ec50ef5996e125d5eaf7ab56c50549bb75fe8e9 | 1,184 | py | Python | Scripts/ReverseSearch/social_media.py | balswyan/senior-capstone-fall-2018 | 8740614f0db917bfdc5131095fe566a92b806e73 | [
"MIT"
] | 1 | 2020-03-03T01:01:41.000Z | 2020-03-03T01:01:41.000Z | Scripts/ReverseSearch/social_media.py | balswyan/senior-capstone-fall-2018 | 8740614f0db917bfdc5131095fe566a92b806e73 | [
"MIT"
] | null | null | null | Scripts/ReverseSearch/social_media.py | balswyan/senior-capstone-fall-2018 | 8740614f0db917bfdc5131095fe566a92b806e73 | [
"MIT"
] | null | null | null | import urllib2
from cookielib import CookieJar
import os
import re
import time
import json
cookies = CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies))
opener.addheaders = [('User-agent', 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 '
'(KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17')]
with open("missing_people.json") as f:
people = json.load(f)
for person in people['results']:
facebook_profile = 'https://www.facebook.com/search/top/?q='+ person['firstname'] + '%20' + person['lastname']
facebook_post ='https://www.facebook.com/search/posts/?q='+ person['firstname'] + '%20' + person['lastname']
facebook_news = 'https://www.facebook.com/search/str/' + person['firstname'] + '%20' + person['lastname'] + '/links-keyword/stories-news-pivot'
instagram_tags = 'https://www.instagram.com/explore/tags/'+ person['firstname'] + person['lastname']
twitter_search = 'https://twitter.com/search?q='+ person['firstname'] + '%20' + person['lastname'] + '&src=typd'
twitter_hashtag = 'https://twitter.com/hashtag/' + person['firstname'] + person['lastname'] '?src=hash'
print(What ever you want)
| 47.36 | 145 | 0.686655 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 575 | 0.485642 |
5ec54d097af06658b5b632d52a4f3101a7a15d52 | 1,939 | py | Python | Server.py | ReFormationPro/SPSC | d3be0637a095ddfcfe2d32272b5a6ca962b393da | [
"Apache-2.0"
] | null | null | null | Server.py | ReFormationPro/SPSC | d3be0637a095ddfcfe2d32272b5a6ca962b393da | [
"Apache-2.0"
] | null | null | null | Server.py | ReFormationPro/SPSC | d3be0637a095ddfcfe2d32272b5a6ca962b393da | [
"Apache-2.0"
] | null | null | null | import socket as Socket
import threading as Threading
from pprint import pprint
import Packet
def main():
serverThread = startServer()
#getCommands(serverThread)
print("Goodbye.")
def getCommands(serverThread):
print("Server Commands")
while True:
cmd = input("0- Halt")
if cmd == "0":
print("Halting the server.")
serverThread.stop()
break
else:
print("Unknown Command")
def startServer():
thread = ServerThread()
thread.start()
return thread
class ServerThread(Threading.Thread):
isRunning = True
def __init__(self):
super(ServerThread, self).__init__()
def stop(self):
self.isRunning = False
def run(self):
socket = Socket.socket()
socket.bind(("localhost", 8888))
socket.listen(99)
while self.isRunning:
clsocket, claddr = socket.accept()
print(claddr)
self._processClient(clsocket, claddr)
clsocket.close()
socket.close()
def _processClient(self, sock, addr):
#sock.send((b'A')*25)
global latest
latest = ClientHandlerThread(sock, addr)
latest.start()
class ClientHandlerThread(Threading.Thread):
isRunning = True
PACKET_HEADER = 1
def __init__(self, socket, addr):
super(ClientHandlerThread, self).__init__()
self.socket = socket
self.addr = addr
def stop(self):
self.isRunning = False
def run(self):
while self.isRunning:
Packet.sendPacket(self.socket, {"msg": "0- Close 1- Echo"})
cmd = Packet.getPacket(self.socket, self.PACKET_HEADER)
if cmd["type"] == "0":
Packet.sendPacket(self.socket, {"msg": "Goodbye friend."})
print("Closing the connection.")
socket.close()
self.stop()
break
elif cmd["type"] == "1":
print("type 1 received")
print(cmd["msg"])
Packet.sendPacket(self.socket, {"msg": "haha I deceived you"})
else:
print("Unknown Command")
main()
| 23.361446 | 70 | 0.639505 | 1,428 | 0.736462 | 0 | 0 | 0 | 0 | 0 | 0 | 288 | 0.14853 |
5ec5c3c5ad95f9da38c7ad657235db69b5b006c7 | 354 | py | Python | hackerrank/algorithms/general/kangaroo/kangaroo.py | nicklambourne/hackerrank-solutions | 9536aa16a67325566e6d3ebea5d5f2c5bf12a05d | [
"MIT"
] | null | null | null | hackerrank/algorithms/general/kangaroo/kangaroo.py | nicklambourne/hackerrank-solutions | 9536aa16a67325566e6d3ebea5d5f2c5bf12a05d | [
"MIT"
] | null | null | null | hackerrank/algorithms/general/kangaroo/kangaroo.py | nicklambourne/hackerrank-solutions | 9536aa16a67325566e6d3ebea5d5f2c5bf12a05d | [
"MIT"
] | null | null | null | #!/bin/python3
import sys
def kangaroo(x1, v1, x2, v2):
if (x1 < x2 and v1 <= v2) or (x2 < x1 and v2 <= v1):
return "NO"
if ((x2 - x1) % (v2-v1)) == 0:
return "YES"
return "NO"
x1, v1, x2, v2 = input().strip().split(' ')
x1, v1, x2, v2 = [int(x1), int(v1), int(x2), int(v2)]
result = kangaroo(x1, v1, x2, v2)
print(result)
| 22.125 | 56 | 0.519774 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0.084746 |
5ec62c30a034128c3c15a108e434ab6f4d99a322 | 216 | py | Python | plugins/espcms_cms.py | cflq3/getcms | 6cf07da0ea3ec644866df715cff1f311a46ee378 | [
"MIT"
] | 22 | 2016-09-01T08:27:07.000Z | 2021-01-11T13:32:59.000Z | plugins/espcms_cms.py | cflq3/getcms | 6cf07da0ea3ec644866df715cff1f311a46ee378 | [
"MIT"
] | null | null | null | plugins/espcms_cms.py | cflq3/getcms | 6cf07da0ea3ec644866df715cff1f311a46ee378 | [
"MIT"
] | 20 | 2015-11-07T19:09:48.000Z | 2018-05-02T03:10:41.000Z | #!/usr/bin/env python
# encoding: utf-8
def run(whatweb, pluginname):
whatweb.recog_from_content(pluginname, "espcms")
whatweb.recog_from_file(pluginname, "templates/wap/cn/public/footer.html", "espcms")
| 21.6 | 88 | 0.736111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 91 | 0.421296 |
5ec64e7203ff5e47285eff2de68b2cba358d3dc6 | 116 | py | Python | examples/02_current_price.py | sharebook-kr/coinonepy | 9d946a20d2228f90f3fe8dfb557e16a4ce214151 | [
"MIT"
] | 2 | 2021-08-31T09:06:05.000Z | 2021-12-03T12:26:49.000Z | examples/02_current_price.py | sharebook-kr/coinonepy | 9d946a20d2228f90f3fe8dfb557e16a4ce214151 | [
"MIT"
] | null | null | null | examples/02_current_price.py | sharebook-kr/coinonepy | 9d946a20d2228f90f3fe8dfb557e16a4ce214151 | [
"MIT"
] | 1 | 2021-12-03T12:26:50.000Z | 2021-12-03T12:26:50.000Z | import coinonepy
btc = coinonepy.get_current_price("btc")
btc_last_price = float(btc['last'])
print(btc_last_price) | 23.2 | 40 | 0.793103 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 0.094828 |
5ec664692a8004415ba033ef9d8d62ed175a5bc8 | 2,115 | py | Python | tests/techniques/test_train_policy_gradient.py | alphagamatoe/AlphaToe | a7cd0969aa46dfd151a22ed8b9aec1a894747b17 | [
"MIT"
] | 172 | 2016-09-27T12:23:10.000Z | 2022-01-19T09:52:11.000Z | tests/techniques/test_train_policy_gradient.py | afcarl/AlphaToe | 1220f4f883dbbd7ac1d84092bdaf04ca18a4dbc2 | [
"MIT"
] | 13 | 2018-07-19T09:42:28.000Z | 2018-09-25T15:08:05.000Z | tests/techniques/test_train_policy_gradient.py | afcarl/AlphaToe | 1220f4f883dbbd7ac1d84092bdaf04ca18a4dbc2 | [
"MIT"
] | 63 | 2016-09-27T13:00:51.000Z | 2021-04-04T04:34:37.000Z | import functools
from unittest import TestCase
from common.base_game_spec import BaseGameSpec
from common.network_helpers import create_network
from games.tic_tac_toe import TicTacToeGameSpec
from games.tic_tac_toe_x import TicTacToeXGameSpec
from techniques.train_policy_gradient import train_policy_gradients
class _VerySimpleGameSpec(BaseGameSpec):
def new_board(self):
return [0, 0]
def apply_move(self, board_state, move, side):
board_state[move] = side
return board_state
def has_winner(self, board_state):
return board_state[0]
def __init__(self):
pass
def available_moves(self, board_state):
return [i for i, x in enumerate(board_state) if x == 0]
def board_dimensions(self):
return 2,
class TestTrainPolicyGradient(TestCase):
def test_learn_simple_game(self):
game_spec = _VerySimpleGameSpec()
create_model_func = functools.partial(create_network, 2, (4,))
variables, win_rate = train_policy_gradients(game_spec, create_model_func, None,
learn_rate=0.1,
number_of_games=1000, print_results_every=100,
batch_size=20,
randomize_first_player=False)
self.assertGreater(win_rate, 0.9)
def test_tic_tac_toe(self):
game_spec = TicTacToeGameSpec()
create_model_func = functools.partial(create_network, game_spec.board_squares(), (100, 100, 100,))
variables, win_rate = train_policy_gradients(game_spec, create_model_func, None,
learn_rate=1e-4,
number_of_games=60000,
print_results_every=1000,
batch_size=100,
randomize_first_player=False)
self.assertGreater(win_rate, 0.4)
| 39.90566 | 106 | 0.580142 | 1,797 | 0.849645 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5ec6853372a2b5e21b95ce1c9271c4a2d1694982 | 2,865 | py | Python | paddlespeech/text/speechtask/punctuation_restoration/model/lstm.py | hysunflower/PaddleSpeech | bf393573a4f738b163c672e075999b97f445a39a | [
"Apache-2.0"
] | null | null | null | paddlespeech/text/speechtask/punctuation_restoration/model/lstm.py | hysunflower/PaddleSpeech | bf393573a4f738b163c672e075999b97f445a39a | [
"Apache-2.0"
] | null | null | null | paddlespeech/text/speechtask/punctuation_restoration/model/lstm.py | hysunflower/PaddleSpeech | bf393573a4f738b163c672e075999b97f445a39a | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
import paddle.nn as nn
import paddle.nn.initializer as I
class RnnLm(nn.Layer):
def __init__(self,
vocab_size,
punc_size,
hidden_size,
num_layers=1,
init_scale=0.1,
dropout=0.0):
super(RnnLm, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.init_scale = init_scale
self.punc_size = punc_size
self.embedder = nn.Embedding(
vocab_size,
hidden_size,
weight_attr=paddle.ParamAttr(initializer=I.Uniform(
low=-init_scale, high=init_scale)))
self.lstm = nn.LSTM(
input_size=hidden_size,
hidden_size=hidden_size,
num_layers=num_layers,
dropout=dropout,
weight_ih_attr=paddle.ParamAttr(initializer=I.Uniform(
low=-init_scale, high=init_scale)),
weight_hh_attr=paddle.ParamAttr(initializer=I.Uniform(
low=-init_scale, high=init_scale)))
self.fc = nn.Linear(
hidden_size,
punc_size,
weight_attr=paddle.ParamAttr(initializer=I.Uniform(
low=-init_scale, high=init_scale)),
bias_attr=paddle.ParamAttr(initializer=I.Uniform(
low=-init_scale, high=init_scale)))
self.dropout = nn.Dropout(p=dropout)
self.softmax = nn.Softmax()
def forward(self, inputs):
x = inputs
x_emb = self.embedder(x)
x_emb = self.dropout(x_emb)
y, (_, _) = self.lstm(x_emb)
y = self.dropout(y)
y = self.fc(y)
y = paddle.reshape(y, shape=[-1, self.punc_size])
logit = self.softmax(y)
return y, logit
class CrossEntropyLossForLm(nn.Layer):
def __init__(self):
super(CrossEntropyLossForLm, self).__init__()
def forward(self, y, label):
label = paddle.unsqueeze(label, axis=2)
loss = paddle.nn.functional.cross_entropy(
input=y, label=label, reduction='none')
loss = paddle.squeeze(loss, axis=[2])
loss = paddle.mean(loss, axis=[0])
loss = paddle.sum(loss)
return loss
| 33.313953 | 74 | 0.613962 | 2,178 | 0.760209 | 0 | 0 | 0 | 0 | 0 | 0 | 603 | 0.210471 |
5ec76f7b2d8bd24f38f18fcf89397e41c3adc670 | 3,448 | py | Python | sdk/python/core/tests/test_sanity_augmentation.py | YDK-Solutions/ydk | 7ab961284cdc82de8828e53fa4870d3204d7730e | [
"ECL-2.0",
"Apache-2.0"
] | 125 | 2016-03-15T17:04:13.000Z | 2022-03-22T02:46:17.000Z | sdk/python/core/tests/test_sanity_augmentation.py | YDK-Solutions/ydk | 7ab961284cdc82de8828e53fa4870d3204d7730e | [
"ECL-2.0",
"Apache-2.0"
] | 818 | 2016-03-17T17:06:00.000Z | 2022-03-28T03:56:17.000Z | sdk/python/core/tests/test_sanity_augmentation.py | YDK-Solutions/ydk | 7ab961284cdc82de8828e53fa4870d3204d7730e | [
"ECL-2.0",
"Apache-2.0"
] | 93 | 2016-03-15T19:18:55.000Z | 2022-02-24T13:55:07.000Z | # ----------------------------------------------------------------
# Copyright 2016 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------
"""
test_sanity_bundle_augmentation.py
Unittest for bundle augmentation.
"""
from __future__ import absolute_import
import sys
import unittest
from ydk.services import CRUDService
from ydk.providers import NetconfServiceProvider
from ydk.models.augmentation import ietf_aug_base_1
from ydk.models.augmentation import ietf_aug_base_2
from test_utils import assert_with_error
from test_utils import ParametrizedTestCase
from test_utils import get_device_info
class SanityYang(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.ncc = NetconfServiceProvider(
cls.hostname,
cls.username,
cls.password,
cls.port,
cls.protocol,
cls.on_demand,
cls.common_cache,
cls.timeout)
cls.crud = CRUDService()
def setUp(self):
self.crud.delete(self.ncc, ietf_aug_base_1.Cpython())
self.crud.delete(self.ncc, ietf_aug_base_2.Cpython())
def tearDown(self):
self.crud.delete(self.ncc, ietf_aug_base_1.Cpython())
self.crud.delete(self.ncc, ietf_aug_base_2.Cpython())
def test_aug_base_1(self):
cpython = ietf_aug_base_1.Cpython()
cpython.doc.ydktest_aug_1.aug_one = 'aug one'
cpython.doc.ydktest_aug_2.aug_two = 'aug two'
cpython.doc.ydktest_aug_4.aug_four = 'aug four'
cpython.lib.ydktest_aug_1.ydktest_aug_nested_1.aug_one = 'aug one'
cpython.lib.ydktest_aug_2.ydktest_aug_nested_2.aug_two = 'aug two'
cpython.lib.ydktest_aug_4.ydktest_aug_nested_4.aug_four = 'aug four'
cpython.doc.disutils.four_aug_list.enabled = True
item1 = cpython.doc.disutils.four_aug_list.Ldata()
item2 = cpython.doc.disutils.four_aug_list.Ldata()
item1.name, item1.number = 'one', 1
item2.name, item1.number = 'two', 2
self.crud.create(self.ncc, cpython)
cpython_read = self.crud.read(self.ncc, ietf_aug_base_1.Cpython())
self.assertEqual(cpython, cpython_read)
def test_aug_base_2(self):
cpython = ietf_aug_base_2.Cpython()
cpython.tools.aug_four = 'aug four'
self.crud.create(self.ncc, cpython)
cpython_read = self.crud.read(self.ncc, ietf_aug_base_2.Cpython())
self.assertEqual(cpython, cpython_read)
if __name__ == '__main__':
device, non_demand, common_cache, timeout = get_device_info()
suite = unittest.TestSuite()
suite.addTest(ParametrizedTestCase.parametrize(
SanityYang,
device=device,
non_demand=non_demand,
common_cache=common_cache,
timeout=timeout))
ret = not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
sys.exit(ret)
| 34.138614 | 77 | 0.674304 | 1,848 | 0.535963 | 0 | 0 | 320 | 0.092807 | 0 | 0 | 872 | 0.2529 |
5eca0176387a7151273d1f8238c5afd0d8ffbd54 | 36,675 | py | Python | src/telem.py | swharden/Telem-A-Gator | e2fcca77e9ae68ec5d002409e3d46c67ddc8ebb8 | [
"MIT"
] | null | null | null | src/telem.py | swharden/Telem-A-Gator | e2fcca77e9ae68ec5d002409e3d46c67ddc8ebb8 | [
"MIT"
] | null | null | null | src/telem.py | swharden/Telem-A-Gator | e2fcca77e9ae68ec5d002409e3d46c67ddc8ebb8 | [
"MIT"
] | 1 | 2019-06-18T13:58:19.000Z | 2019-06-18T13:58:19.000Z | import time
import os
import glob
import datetime
import numpy
import threading
import subprocess
#import scipy.stats
from PyQt4 import QtCore, QtGui
import matplotlib
matplotlib.use('TkAgg')
matplotlib.rcParams['backend'] = 'TkAgg'
import pylab
def shortenTo(s,maxsize=100):
if len(s)<=maxsize: return s
first=s[:maxsize/2]
last=s[-maxsize/2:]
return first+"..."+last
def messagebox(title,msg):
#tempApp = QtGui.QApplication(sys.argv)
QtGui.QMessageBox.information(QtGui.QDialog(),title,msg)
#tempApp.exit(0)
def com2lst(s):
"""separate CSVs to a list, returning [s] if no commas."""
if "," in s:
s=s.split(",")
else:
s=[s]
return s
def ep2dt(ep):
"""convert an epoch time to a datetime object."""
return datetime.datetime.fromtimestamp(float(ep))
def ep2st(ep):
"""convert epoch seconds to a string-formatted date."""
return dt2st(ep2dt(ep))
def ep2fn(ep):
"""convert epoch seconds to a file-ready date."""
dt=ep2dt(ep)
return dt.strftime('%Y-%m-%d-%H-%M-%S')
def ep2xl(ep):
dt=ep2dt(ep)
def dt2ep(dt):
"""convert a datetime object to epoch seconds."""
return time.mktime(dt.timetuple())
def dt2st(dt):
"""convert a datetime object to string-formatted date."""
return dt.strftime('%Y/%m/%d %H:%M:%S')
def st2dt(st):
"""convert a string-formatted date to a datetime object."""
st=str(st)
return datetime.datetime.strptime(st,'%Y/%m/%d %H:%M:%S')
def st2ep(st):
"""convert a string-formatted date to epoch seconds."""
st=str(st)
return dt2ep(st2dt(st))
def stripWhiteSpace(s):
"""eliminate spaces at ends of a string."""
while s[0]==" ": s=s[1:]
while s[-1]==" ": s=s[:-1]
return s
threads=[]
def threadCmd(cmd):
global threads
threads.append(ThreadCMDs())
threads[-1].cmd=cmd
threads[-1].start()
threads[-1].join()
def launchPath(path):
cmd="explorer.exe "+os.path.abspath(path)
threadCmd(cmd)
class ThreadCMDs(threading.Thread):
def __init__(self):
self.stdout = None
self.stderr = None
self.cmd = "cmd.exe"
threading.Thread.__init__(self)
def run(self):
p = subprocess.Popen(self.cmd.split(),
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.stdout, self.stderr = p.communicate()
class TelemSession:
"""Telemetry conversion and analysis session class.
Load this once, and interact with it accordingly.
"""
def __init__(self):
self.schemeClear()
self.dataClear()
self.log=[]
self.printLogLevel=15
self.secPerLine=10
self.processing=False
self.uimain=False
self.app=False
self.abortNow=False
self.schemeLoad()
#self.status="waiting"
self.debug("loaded telemetry session class",4)
######################
### SCHEME OPTIONS ###
######################
def scheme2txt(self,scheme,showIt=False):
"""Convert a scheme to text. Optionally print it to console."""
keys=scheme.keys()
keys.sort()
out="# AUTOMATICALLY GENERATED SCHEME:\n"
for key in keys:
val=scheme[key]
if type(val)==str:
val='"'+val+'"'
val=val.replace("\\","/")
out+="%s: %s\n"%(key,val)
return out
def schemeLoad(self,fname="scheme_default.ini"):
"""load a scheme.ini file and populate the scheme."""
self.debug("loading scheme from "+fname,3)
if fname==None: fname="scheme_default.ini"
if not os.path.exists(fname):
self.debug("Default scheme not found!\nWill generate a new one.",5)
self.schemeCreateDefault()
self.schemeSave()
return
f=open(fname)
raw=f.readlines()
f.close()
for line in raw:
if len(line)<3: continue
line=line.replace("\n","")
if line[0] in [" ","#","\n","\r"]: continue
if not ":" in line: continue
var,val=line.split(":",1)
val=stripWhiteSpace(val)
val=eval(val)
self.scheme[var]=val
self.debug("setting [%s] to [%s] (%s)"%(var,val,type(val)))
self.listAvailable()
self.schemeRecalculate()
def schemeSave(self,fname="scheme_default.ini"):
"""save a scheme to a file."""
self.debug("saving scheme to "+fname,3)
out=self.scheme2txt(self.scheme)
self.debug("saving scheme:",fname)
f=open(fname,'w')
f.write(out)
f.close()
def schemeRecalculate(self):
"""go through and do math for auto-generated fields."""
self.listAvailable()
try:
if self.scheme["animals"]=="all":
self.scheme["animals"]=",".join(self.animals)
if self.scheme["features"]=="all":
self.scheme["features"]=",".join(self.features)
if self.scheme["binunit"]==0: self.scheme["binsize"]=int(float(self.scheme["binnum"]))
if self.scheme["binunit"]==1: self.scheme["binsize"]=int(float(self.scheme["binnum"])*int(60))
if self.scheme["binunit"]==2: self.scheme["binsize"]=int(float(self.scheme["binnum"])*int(60*60))
if self.scheme["binunit"]==3: self.scheme["binsize"]=int(float(self.scheme["binnum"])*int(60*60*24))
if self.scheme["sweep"]==True: #24 hour sweeps
self.scheme["expSpanSec"]=60*60*24
self.scheme["baseSpanSec"]=60*60*24
self.scheme["basePoints"]=int(self.scheme["baseSpanSec"]/self.scheme["binsize"])
self.scheme["expPoints"]=int(self.scheme["expSpanSec"]/self.scheme["binsize"])
else:
self.scheme["expSpanSec"]=int(st2ep(self.scheme["expB"])-st2ep(self.scheme["expA"]))
self.scheme["baseSpanSec"]=int(st2ep(self.scheme["baseB"])-st2ep(self.scheme["baseA"]))
self.scheme["basePoints"]=int(self.scheme["baseSpanSec"]/self.scheme["binsize"])
self.scheme["expPoints"]=int(self.scheme["expSpanSec"]/self.scheme["binsize"])
except Exception:
self.debug("could not recalculate!",5)
def schemeGood(self):
"""Returns True if the scheme is valid."""
# TO DO
return True
def schemeShow(self):
keys=self.scheme.keys()
keys.sort()
for key in keys:
self.debug("%s = %s"%(key,self.scheme[key]),5)
def schemeClear(self):
"""Completely clear scheme."""
self.scheme={}
def schemeCreateDefault(self):
"""Generate example/demo scheme."""
self.scheme["location"]=os.path.abspath("./data-npy")
self.scheme["input"]=os.path.abspath("./data-txt")
self.scheme["output"]=r"./output"
self.scheme["features"]="all"
self.scheme["animals"]="all"
self.scheme["baseA"]="2012/05/23 19:00:00"
self.scheme["baseB"]="2012/06/08 19:00:00"
self.scheme["baseT"]="baseline"
self.scheme["expA"]="2012/06/08 19:00:00"
self.scheme["expB"]="2012/06/19 19:00:00"
self.scheme["expT"]="experiment"
self.scheme["baseline"]=True
self.scheme["sweep"]=True
self.scheme["binnum"]=1
self.scheme["binunit"]=2 # 0=sec, 1=min, 2=hr, 3=day
self.scheme["stdev"]=False
### FIGURE OPTIONS
self.scheme["plotPrimary"]=True
self.scheme["plotSecondary"]=False
self.scheme["plotErrorBars"]=True
self.scheme["plotKey"]=0
self.scheme["plotExperiment"]=True
self.scheme["plotBaseline"]=True
self.scheme["plotNormalized"]=False
### THE FOLLOWING ARE AUTO-CALCULATED BY schemeRecalculate()
#self.scheme["binsize"]=None #DO NOT SET THIS!
#self.scheme["expSpanSec"]=None #DO NOT SET THIS!
self.schemeRecalculate()
#######################
### DATA CONVERSION ###
#######################
def convert(self):
"""Given a folder of .txt data files, generate npy files."""
folderIn=os.path.abspath(self.scheme["input"])
folderOut=os.path.abspath(self.scheme["location"])
files=glob.glob(folderIn+"/*.txt")
for i in range(len(files)):
if self.uimain and self.app:
self.uimain.progConvertAnimal.setMaximum(len(files))
self.uimain.progConvertAnimal.setValue(i+1)
self.uimain.lblConvertAnimal.setText(os.path.split(files[i])[1])
self.app.processEvents()
self.convertTxt2Npy(files[i],folderOut)
self.uimain.progConvertAnimal.setValue(0)
self.uimain.progConvertFeature.setValue(0)
self.uimain.lblConvertAnimal.setText("complete")
self.uimain.lblConvertFeature.setText("complete")
messagebox("COMPLETE","file conversion complete!")
def convertTxt2Npy(self,fnameIn,pathOut):
"""Takes an input .txt raw data file and outputs multiple .npy data files.
ORIGINAL DATA FORMAT:
For this to work, the export settings in the telemetry analysis software
have to be configured as such:
select all data, click export.
File name: [I].txt (example: T12.txt)
Time mode: elapsed time (seconds)
Data format: width=3, precision=3
checkbox enabled: Import compatible
OUTPUT:
Output format is numpy binary files (.npy) of evenly spaced data.
Each point represents 10 seconds of time.
Missing data are replaced by numpy.NaN
"""
filePathIn,fileNameIn=os.path.split(fnameIn)
self.debug("LOADING: "+fnameIn)
self.uimain.lblConvertFeature.setText("loading ...")
self.app.processEvents()
f=open(fnameIn)
raw=f.read()
f.close()
raw=raw.split("\n")
animals=[] #[T5,T5,T5]
features=[] #[Activity,Diastolic,Heart Rate]
data=[]
self.debug("READING DATA")
for i in range(len(raw)):
line=raw[i]
if len(line)<10: continue
if line[0]=="#": # WE HAVE A HEADER LINE
if "Time: " in line:
ep_start=st2ep(line.split(": ")[1])
if "Col: " in line:
animal,feature=line.split(": ")[1].split(",")[0].split(".")
animals.append(animal)
features.append(feature)
else: # WE HAVE A DATA LINE
data.append(line.split(","))
self.debug("CONVERTING TO MATRIX")
self.uimain.lblConvertFeature.setText("converting to matrix ...")
self.app.processEvents()
data=numpy.array(data,dtype=float)
self.debug("RESHAPING DATA")
self.uimain.lblConvertFeature.setText("reshaping data ...")
self.app.processEvents()
data=numpy.reshape(data,(-1,len(animals)+1))
data[:,0]=data[:,0]+ep_start #turn time stamps into epoch
if self.uimain and self.app:
self.uimain.progConvertFeature.setMaximum(len(features))
self.app.processEvents()
for i in range(len(features)):
if self.uimain and self.app:
self.uimain.progConvertFeature.setValue(i+1)
#self.uimain.lblConvertFeature.setText(features[i])
self.app.processEvents()
tag="%s-%s-%d"%(animals[i],features[i],ep_start)+"-even.npy"
fname=os.path.join(pathOut,tag)
self.debug("CONVERTING TO EVENLY SPACED DATA")
self.uimain.lblConvertFeature.setText("spacing data ...")
self.app.processEvents()
timestamps=data[:,0].astype(int)
values=data[:,i+1]
indices=(timestamps-timestamps[0])/self.secPerLine
dayData=numpy.empty(indices[-1]+1,dtype=float)
dayData[:]=numpy.nan
dayData[indices]=values
self.debug("SAVING "+tag)
self.uimain.lblConvertFeature.setText("saving %s ..."%tag)
self.app.processEvents()
numpy.save(fname,dayData)
return
# to do
####################
### DATA LOADING ###
####################
def listAvailable(self):
"""returns [animals,features] from scheme["location"]."""
animals,features=[],[]
self.animalInfo=[] #[animal,startEp,endEp]
fnames=glob.glob(self.scheme["location"]+"/*-even.npy")
for fname in fnames:
fn,ft=os.path.split(fname)
ft=ft.split("-")
if not ft[0] in animals:
animals.append(ft[0])
startEp=int(ft[2])
length=numpy.memmap(fname).shape[0]
info=[ft[0],startEp,startEp+length*self.secPerLine]
#self.debug(str(info),5)
self.animalInfo.append(info)
if not ft[1] in features: features.append(ft[1])
self.animals=animals
self.features=features
return [animals,features]
def selectedTimes(self):
if len(self.animalInfo)==0: return [None,None]
first=None
last=None
selAnimals=com2lst(self.scheme["animals"])
for info in self.animalInfo:
if info[0] in selAnimals:
if first==None or info[1]<first: first=info[1]
if last==None or info[2]>last: last=info[2]
self.selectedExtremes=[first,last]
return [first,last]
def loadNpy(self,fname):
"""load a filename of a .npy and return [data,animal,feature,startEp,endEp].
You probably don't need to call this directly. loadData() calls it."""
fpath,ftag=os.path.split(fname)
#self.debug("\n\n",5)
self.debug("loading "+ftag,2)
data=numpy.load(fname) # pulls the whole thing to ram
#data=numpy.memmap(fname) # MEMORY MAPPING IS FASTER IF BETTER DATA TYPE
animal,feature,startEp,mode=ftag.split(".")[0].split("-")
startEp=int(startEp)
endEp=startEp+len(data)*self.secPerLine
return [data,animal,feature,startEp,endEp]
def loadData(self,animal=None,feature=None,location=None,startEpCut=False,endEpCut=False,binsize=False,sweep=False):
"""simple way to get data from animal/feature combo. return [x],[[ys]].
if binsize is given (sec), binning will occur.
If startEp and/or endEp are given (epoch), trimming will occur.
if sweep == False:
returns [X], [[Y]]
where x = time epochs
if sweep == True: (day starts at the time of startEpCut)
returns [X], [[Y],[Y],[Y]]
where x = ticks 0-24hr
UPDATE: returns [xs,data,startX,startX+self.secPerLine2*len(data[0])]
"""
### DEMO DATA ###################################
#startEpCut=st2ep("2012/06/01 19:00:00")
#endEpCut=st2ep("2012/06/10 19:00:00")
#binsize=60*60 #in seconds
#sweep=True
#################################################
if location==None:
location=self.scheme["location"]
self.secPerLine2=self.secPerLine
fnames=glob.glob(location+"/%s-%s*-even.npy"%(animal,feature))
if len(fnames)==0:
self.debug("%s - %s does not exist!"%(animal,feature),2)
return []
fname=fnames[0]
data,animal,feature,startEp,endEp=self.loadNpy(fname)
self.debug("data shape before cutting/padding: %s"%str(data.shape))
if startEpCut==False: startEpCut=startEp
if endEpCut==False: endEpCut=endEp
expectedPoints=int((endEpCut-startEpCut)/self.secPerLine)
offsetStart=int(startEpCut-startEp)/self.secPerLine
if startEpCut:
if offsetStart<0:
# left padding is necessary
padding=numpy.empty(abs(offsetStart))
padding[:]=numpy.nan
data=numpy.concatenate((padding,data))
elif offsetStart>0:
#left trimming is necessary
data=data[offsetStart:]
if endEpCut:
if len(data)<expectedPoints:
# right padding is necessary
padding=numpy.empty(expectedPoints-len(data))
padding[:]=numpy.nan
data=numpy.concatenate((data,padding))
elif len(data)>expectedPoints:
# right trimming is necessary
data=data[:expectedPoints]
self.debug("data shape after cutting/padding: %s"%str(data.shape))
if binsize:
self.debug("binning to %s"%binsize,5)
binSamples=int(binsize/self.secPerLine) #number of samples per bin
self.secPerLine2=self.secPerLine*binSamples #seconds per sample
if len(data) % binSamples: # we need to extend this to the appropriate bin size
hangover=len(data) % binSamples
needed=numpy.empty(binSamples-hangover)
needed[:]=numpy.NaN
data=numpy.append(data,needed)
data=numpy.reshape(data,(len(data)/binSamples,binSamples))
#data=numpy.ma.masked_invalid(data).mean(axis=1) #this is bad because it makes NaN become 0
#data=numpy.mean(data,axis=1) #now it's binned!
### THIS PART IS NEW #################################
avgs=numpy.empty(len(data))
for i in range(len(data)):
line=data[i]
line=line[numpy.where(numpy.isfinite(line))[0]]
avgs[i]=numpy.average(line)
data=avgs
######################################################
self.debug("data shape at end of binning: %s"%str(data.shape))
if sweep:
self.debug("sweeping",5)
samplesPerDay=int(60*60*24/self.secPerLine2)
if len(data) % samplesPerDay: # we need to extend this to the appropriate bin size
hangover=len(data) % samplesPerDay
needed=numpy.empty(samplesPerDay-hangover)
needed[:]=numpy.nan
data=numpy.append(data,needed)
days=len(data)/float(samplesPerDay)
data=numpy.reshape(data,(int(days),int(len(data)/days)))
xs=numpy.arange(0,24.0,24.0/float(len(data[0])))
else:
#data=numpy.array([data])
data=numpy.atleast_2d(data)
xs=range(int(startEpCut),int(startEpCut+self.secPerLine2*len(data[0])),int(self.secPerLine2))
for i in range(len(xs)): xs[i]=ep2dt(xs[i])
self.debug("data shape at end of sweeping: %s"%str(data.shape))
if numpy.max(data)==0 or numpy.ma.count(data)==0:
self.debug("%s - %s - NO DATA!"%(animal,feature),2)
return []
self.debug("returning data of size: %d"%len(data[0]))
return [xs,data,startEpCut,startEpCut+self.secPerLine2*len(data[0])]
#######################
### DATA STATISTICS ###
#######################
def dataAverage(self,data):
"""Given [[ys],[ys],[ys]] return [avg,err]. If stderr=False, return stdev."""
if data is None or not data.any():
self.debug("averager got None value",5)
return [[],[]]
if len(data)==1:
self.debug("only a single data stream, nothing to average",5)
return [data[0],numpy.zeros(len(data[0]))]
avg=numpy.mean(numpy.ma.masked_invalid(data),axis=0)
err=numpy.std(numpy.ma.masked_invalid(data),axis=0)
cnt=numpy.isfinite(data).sum(0)
if self.scheme["stdev"]==False:
err=err/numpy.sqrt(cnt) #standard error
if numpy.sum(numpy.isfinite(data))==0:
self.debug("Averager got nothing but NaN. Giving back NaN.",5)
avg[:]=numpy.NaN
err[:]=numpy.NaN
avg[numpy.ma.getmask(avg)]=numpy.nan
err[numpy.ma.getmask(err)]=numpy.nan
return [avg,err]
#################
### ANALYSIS ###
#################
def dataClear(self):
"""reset data={} where format is as follows:
data["feature"]=[x,E,ER,[Es,Es,Es],B,BR,[Bs,Bs,Bs],N]
where:
x - experiment x time points
E - experiment average trace
ER - experiment average error
Es - experiment individual traces
x2 - baseline x time points
B - baseline average trace
BR - baseline average error
Bs - baseline individual traces
N - normalized value (E-B) +/ ER
In reality, are better stats necesary???
"""
self.data={}
def schemeExecute(self):
self.schemeShow()
self.debug("executing analysis",2)
self.schemeRecalculate()
self.dataClear()
self.processing=True
animals=com2lst(self.scheme["animals"])
features=com2lst(self.scheme["features"])
timeExecuteStart=time.time()
if not os.path.exists(self.scheme["output"]):
os.makedirs(self.scheme["output"])
# data["feature"]=[x,E,ER,Es, x2,B,BR,Bs,N,NR]
# 0 1 2 3 4 5 6 7 8 9
#dataLine=[x,Eavg,Eerr,linearEs,x2,Bavg,Berr,linearBs,norm,normErr]
x,Eavg,Eerr,linearEs,x2,Bavg,Berr,linearBs,norm,normErr=[None]*10
for i in range(len(features)):
linearEs=numpy.empty((len(animals),self.scheme["expPoints"]))
linearEs[:]=numpy.NaN
linearBs=numpy.empty((len(animals),self.scheme["basePoints"]))
linearBs[:]=numpy.NaN
for j in range(len(animals)):
feature=features[i]
animal=animals[j]
progress=len(animals)*i+j
if self.uimain and self.app:
if self.abortNow==True:
self.abortNow=False
return
self.uimain.progExecute.setMaximum(len(features)*len(animals))
self.uimain.progExecute.setValue(progress+1)
self.uimain.lblStatus.setText("processing %s - %s"%(animal,feature))
self.app.processEvents()
dataLine=[None]*9
dataPack=self.loadData(animal,feature,self.scheme["location"],st2ep(self.scheme["expA"]),st2ep(self.scheme["expB"]),int(self.scheme["binsize"]),self.scheme["sweep"])
if len(dataPack)>0:
x,Es,timeA,timeB=dataPack
EsweepAvg,EsweepErr=self.dataAverage(Es)
if len(animals)==1:
Eavg,Eerr=EsweepAvg,EsweepErr
linearEs=Es
else:
linearEs[j][:]=EsweepAvg
if self.scheme["baseline"]==True:
dataPack=self.loadData(animal,feature,self.scheme["location"],st2ep(self.scheme["baseA"]),st2ep(self.scheme["baseB"]),int(self.scheme["binsize"]),self.scheme["sweep"])
if len(dataPack)>0:
x2,Bs,baseA,baseB=dataPack
BsweepAvg,BsweepErr=self.dataAverage(Bs)
if len(animals)==1:
Bavg,Berr=BsweepAvg,BsweepErr
linearBs=Bs
else:
linearBs[j]=BsweepAvg
pass # last thing to do for each animal
if len(animals)>1:
Eavg,Eerr=self.dataAverage(linearEs)
Bavg,Berr=self.dataAverage(linearBs)
if self.scheme["baseline"]==True:
if len(Eavg)==len(Bavg):
norm=Eavg-Bavg
normErr=numpy.sqrt(Eerr*Eerr+Berr*Berr)
else:
self.debug("can't create baseline because lengths are uneven.")
dataLine=[x,Eavg,Eerr,linearEs,x2,Bavg,Berr,linearBs,norm,normErr]
self.data[feature]=dataLine
pass #last thing to do for each feature
timeExecute=time.time()-timeExecuteStart
self.debug("scheme analyzed in %.03f seconds."%timeExecute,3)
if self.uimain and self.app:
if self.abortNow==True:
self.abortNow=False
return
self.uimain.lblStatus.setText("scheme analyzed in %.03f seconds."%timeExecute)
self.uimain.progExecute.setMaximum(len(features)*len(animals))
self.uimain.progExecute.setValue(0)
#####################
### DATA PLOTTING ###
#####################
def plotPopup(self):
self.uimain.btnLaunchInteractive.setEnabled(False)
self.plotFigure()
pylab.show()
self.uimain.btnLaunchInteractive.setEnabled(True)
def summaryPopup(self):
self.schemeRecalculate()
self.uimain.btnSummary.setEnabled(False)
self.plotSummary()
pylab.show()
self.uimain.btnSummary.setEnabled(True)
def plotSummary(self,fig=None):
"""plots summary figure for all animals in the current folder."""
self.debug("generating plot summary figure...",3)
if not fig: fig=pylab.figure()
axes=fig.gca()
selAnimals=com2lst(self.scheme["animals"])
for i in range(len(selAnimals)):
self.debug("generating plot summary figure... plotting animal %d of %d"%(i,len(selAnimals)),3)
animal=selAnimals[i]
feature=com2lst(self.scheme["features"])[0]
data=self.loadData(animal,feature,binsize=60*60,sweep=False)
if len(data)==0: continue
xs,data,startX,endX=data
ys=data[0]*0
ys=ys+i
axes.plot(xs,ys,'.')
for spine in axes.spines.itervalues():
spine.set_visible(False)
axes.set_yticklabels(selAnimals)
axes.yaxis.set_major_locator(matplotlib.ticker.FixedLocator(range(len(selAnimals))))
for xlabel in axes.get_xaxis().get_ticklabels():
xlabel.set_rotation(90)
fig.subplots_adjust(bottom=.35,left=.08, right=0.98)
fig.set_facecolor("#FFFFFF")
axes.set_title("DATA SUMMARY")
axes.autoscale()
axes.set_ylim((-.5,i+1.5))
x1,x2=axes.get_xlim()
x1=x1-3
x2=x2+3
axes.set_xlim((x1,x2))
if self.scheme["baseline"]:
axes.axvspan(st2dt(self.scheme["baseA"]),st2dt(self.scheme["baseB"]),facecolor="b",alpha=.1)
axes.text(ep2dt((st2ep(self.scheme["baseA"])+st2ep(self.scheme["baseB"]))/2),i+1,"baseline",color='blue',horizontalalignment='center',verticalalignment='top')
axes.axvspan(st2dt(self.scheme["expA"]),st2dt(self.scheme["expB"]),facecolor="g",alpha=.1)
axes.text(ep2dt((st2ep(self.scheme["expA"])+st2ep(self.scheme["expB"]))/2),i+1,"experiment",color='green',horizontalalignment='center',verticalalignment='top')
self.debug("generating plot summary figure... COMPLETE!",3)
return fig
def plotFigure(self,figure=None):
"""given a figure and data key, make a pretty telemetry graph."""
if not figure: figure=pylab.figure()
axes=figure.gca()
key=self.scheme["plotKey"]
self.debug("plotting data for key %d (%s)"%(key,self.data.keys()[key]),3)
key=self.data.keys()[key]
d=self.data[key]
if self.scheme["plotSecondary"]==True:
if numpy.array(d[3]).any and self.scheme["plotExperiment"]:
for yvals in d[3]:
# SECONDARY EXPERIMENTAL
axes.plot(d[0],yvals,'g-',alpha=.2)
if numpy.array(d[7]).any and self.scheme["baseline"] and self.scheme["plotBaseline"]:
for yvals in d[7]:
# SECONDARY BASELINE
axes.plot(d[4],yvals,'b-',alpha=.2)
if self.scheme["plotPrimary"]==True:
if numpy.array(d[1]).any and self.scheme["plotExperiment"]:
# PRIMARY EXPERIMENTAL
axes.plot(d[0],d[1],'g-',label="experiment")
if numpy.array(d[5]).any and self.scheme["baseline"] and self.scheme["plotBaseline"]:
# PRIMARY BASELINE
axes.plot(d[4],d[5],'b-',label="baseline")
if self.scheme["plotNormalized"] and d[8].any():
# NORMALIZED
axes.plot(d[0],d[8],'r-')
if self.scheme["plotErrorBars"]==True:
if numpy.array(d[1]).any and self.scheme["plotExperiment"]:
# EXPERIMENTAL ERROR BARS
axes.errorbar(d[0],d[1],yerr=d[2],fmt='g.')
if numpy.array(d[5]).any and self.scheme["baseline"] and self.scheme["plotBaseline"]:
# BASELINE ERROR BARS
axes.errorbar(d[4],d[5],yerr=d[6],fmt='b.')
if numpy.array(d[8]).any and self.scheme["plotNormalized"]==True:
# NORMALIZED ERROR BARS
axes.errorbar(d[0],d[8],yerr=d[9],fmt='r.')
for xlabel in axes.get_xaxis().get_ticklabels():
#TODO make labels offset by the 24 hour day start time
xlabel.set_rotation(90)
axes.set_title("%s - %s"%(self.scheme["animals"],key))
axes.grid()
figure.subplots_adjust(bottom=.35,left=.08, right=0.98)
figure.set_facecolor("#FFFFFF")
if self.scheme["sweep"]: axes.set_xlim([0,24])
#figure.canvas.draw()
return figure
###################
### DATA OUTPUT ###
###################
# data["feature"]=[x,E,ER,Es, x2,B,BR,Bs,N,NR]
# 0 1 2 3 4 5 6 7 8 9
def outputHTML(self,launchItToo=True):
self.outputImages()
out='<html><body><div align="center">'
out+="<h1>Telem-A-Gator</h2>"
out+="<h2>Summary Report</h2>"
out+='<img src="summary.png"><br>'
keys=self.data.keys()
for i in range(len(keys)):
out+='<img src="%s"><br>'%(keys[i]+".png")
out+="<h2>Scheme Data:</h2>"
out+=self.scheme2txt(self.scheme).replace("\n","<br>")
out+="</div></body></html>"
f=open(os.path.join(self.scheme["output"],"summary.html"),'w')
f.write(out)
f.close()
if launchItToo:
cmd="explorer.exe "+os.path.abspath(os.path.join(self.scheme["output"],"summary.html"))
self.debug("running: "+cmd,3)
threadCmd(cmd)
return
def outputImages(self):
"""save every feature in data{} as an image."""
keys=self.data.keys()
for i in range(len(keys)):
self.debug("generating image for %s"%keys[i])
self.scheme["plotKey"]=i
self.plotFigure()
self.debug("saving "+keys[i]+".png")
pylab.savefig(os.path.join(self.scheme["output"],keys[i]+".png"))
pylab.close()
self.plotSummary()
pylab.savefig(os.path.join(self.scheme["output"],"summary.png"))
pylab.close()
self.schemeSave(os.path.join(self.scheme["output"],"schemeUsed.ini"))
self.debug("image export complete.")
def generateCSV(self,dates,avg,err,sweeps,fname):
"""given some data, format it as a proper CSV file."""
fout=os.path.join(self.scheme["output"],fname)
#matrix=numpy.array([dates,avg,err,sweeps])
if dates==None or avg==None:
#no data
return
animals=com2lst(self.scheme["animals"])
rows=3
if sweeps:
rows+=len(sweeps)
cols = len(avg)
matrix=numpy.zeros((rows,cols),dtype=numpy.object)
for i in range(len(dates)):
if type(dates[i])<>float:
dates[i]=str(dates[i])
matrix[0,:len(dates)]=dates
matrix[1,:len(avg)]=avg
matrix[2,:len(err)]=err
if sweeps:
for i in range(len(sweeps)):
matrix[3+i,:]=sweeps[i]
matrix=numpy.rot90(matrix,1)
matrix=matrix[::-1]
labels=matrix[0]
self.debug("saving %s"%(fname))
out="Time,Average,Error"
if sweeps:
for i in range(len(sweeps)):
if len(animals)>1:
out+=","+animals[i]
else:
out+=",DAY %d"%(i+1)
out+="\n"
for line in matrix:
line=line.tolist()
for i in range(len(line)):
line[i]=str(line[i])
if line[i]=='nan':
line[i]=''
out+=",".join(line)+"\n"
f=open(fout,'w')
f.write(out)
f.close()
self.schemeSave(os.path.join(self.scheme["output"],"schemeUsed.ini"))
def outputExcel(self):
"""save every feature in data{} as an image."""
keys=self.data.keys()
for i in range(len(keys)):
self.debug("generating Excel file for %s"%(keys[i]))
dataLine=self.data[keys[i]]
self.generateCSV(dataLine[0],dataLine[1],dataLine[2],dataLine[3],keys[i]+"-experiment.csv")
self.generateCSV(dataLine[4],dataLine[5],dataLine[6],dataLine[7],keys[i]+"-baseline.csv")
self.generateCSV(dataLine[0],dataLine[8],dataLine[9],None,keys[i]+"-normalized.csv")
self.debug("Excel output complete.")
######################
### MISC PROCESSES ###
######################
def makeCrashLog(self):
sep="#"*20
out=sep+" MOST RECENT SCHEME "+sep
out="\n\n\n"+sep+" FULL LOG OUTPUT "+sep+"\n\n\n"
#self.schemeShow()
for line in self.log:
t,l,m=line
out+="[%s]%s%s\n"%(ep2st(t),"-"*l,m)
fname="crashlog-%s.txt"%(ep2fn(time.time()))
#fname="crashlog.txt"
f=open('./log/'+fname,'w')
f.write(out)
f.close()
messagebox("BUG REPORT","saved bug report as:\n"+fname)
def debug(self,msg,level=3):
"""save messages to session log with optional significance.
levels:
1 - critical, show pop-up window, exit
2 - critical, show pop-up window
3 - important
4 - casual
5 - rediculous
"""
self.log.append([time.time(),level,msg])
if level<2:
messagebox("IMPORTANT",msg)
if level<=self.printLogLevel:
print " "*level+msg
if self.uimain and self.app:
self.uimain.lblDebug.setText(shortenTo(msg.replace("\n","")))
self.uimain.textDebug.appendPlainText(msg)
self.app.processEvents()
def showDebug(self,maxLevel=5):
for item in self.log:
print item
if __name__ == "__main__":
print "DONT RUN ME DIRECTLY."
TG=TelemSession()
# TG.summaryPopup()
TG.schemeLoad("SCOTT.ini")
TG.schemeExecute()
TG.plotFigure()
# #TG.makeCrashLog()
pylab.show()
| 37.309257 | 187 | 0.531425 | 34,389 | 0.937669 | 0 | 0 | 0 | 0 | 0 | 0 | 10,238 | 0.279155 |
5eca186387175ce199864fab1e34908e3cd7c9c5 | 2,000 | py | Python | Gathered CTF writeups/2019-10-12-hitcon/lost_key/modulus.py | mihaid-b/CyberSakura | f60e6b6bfd6898c69b84424b080090ae98f8076c | [
"MIT"
] | 1 | 2022-03-27T06:00:41.000Z | 2022-03-27T06:00:41.000Z | Gathered CTF writeups/2019-10-12-hitcon/lost_key/modulus.py | mihaid-b/CyberSakura | f60e6b6bfd6898c69b84424b080090ae98f8076c | [
"MIT"
] | null | null | null | Gathered CTF writeups/2019-10-12-hitcon/lost_key/modulus.py | mihaid-b/CyberSakura | f60e6b6bfd6898c69b84424b080090ae98f8076c | [
"MIT"
] | 1 | 2022-03-27T06:01:42.000Z | 2022-03-27T06:01:42.000Z | import random
from crypto_commons.generic import bytes_to_long, multiply, factor, long_to_bytes
from crypto_commons.netcat.netcat_commons import nc, receive_until_match, receive_until, send
from crypto_commons.rsa.rsa_commons import gcd_multi
def prepare_values():
prefix = bytes_to_long("X: ")
factors, _ = factor(prefix)
random.shuffle(factors)
base1 = multiply(factors[:len(factors) / 2])
base2 = multiply(factors[len(factors) / 2:])
assert base1 * base2 == prefix
shift = 5
x = base1 * 256 ** shift + 0
y = base2 * 256 ** shift + 0
z = base1 * 256 ** shift + 1
v = base2 * 256 ** shift + 1
A = x * y
B = z * v
C = x * v
D = y * z
assert (A * B == C * D == x * y * z * v)
for x in [A, B, C, D]:
assert (long_to_bytes(x)[:3] == 'X: ')
assert (len(long_to_bytes(x)) < 16)
return A, B, C, D
def get_kn():
host = "3.115.26.78"
port = 31337
s = nc(host, port)
receive_until_match(s, "! ")
flag_ct = receive_until(s, "\n")[:-1]
plaintexts = [long_to_bytes(x)[3:] for x in prepare_values()]
results = []
for pt in plaintexts:
receive_until_match(s, ": ")
send(s, pt.encode("hex"))
res = receive_until(s, "\n")[:-1]
results.append(res)
s.close()
CTA = int(results[0], 16)
CTB = int(results[1], 16)
CTC = int(results[2], 16)
CTD = int(results[3], 16)
kn = (CTA * CTB) - (CTD * CTC)
print("Got k*N", kn)
return kn
def main():
kns = [get_kn() for i in range(5)]
possible_n = gcd_multi(kns)
print('possible n', possible_n)
main()
def sanity():
from Crypto.Util.number import getPrime
e = 65537
p = getPrime(512)
q = getPrime(512)
n = p * q
A, B, C, D = prepare_values()
CTA = pow(A, e, n)
CTB = pow(B, e, n)
CTC = pow(C, e, n)
CTD = pow(D, e, n)
assert ((CTA * CTB) % n == (CTD * CTC) % n)
assert ((CTA * CTB) - (CTD * CTC)) % n == 0
# sanity()
| 23.255814 | 93 | 0.557 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 0.0375 |
5eca3822d3d88e640f8cd392a9134c9bdd311c55 | 1,458 | py | Python | pincer/middleware/ready.py | shivamdurgbuns/Pincer | aa27d6d65023ea62a2d0c09c1e9bc0fe4763e0c3 | [
"MIT"
] | null | null | null | pincer/middleware/ready.py | shivamdurgbuns/Pincer | aa27d6d65023ea62a2d0c09c1e9bc0fe4763e0c3 | [
"MIT"
] | null | null | null | pincer/middleware/ready.py | shivamdurgbuns/Pincer | aa27d6d65023ea62a2d0c09c1e9bc0fe4763e0c3 | [
"MIT"
] | null | null | null | # Copyright Pincer 2021-Present
# Full MIT License can be found in `LICENSE` at the project root.
"""
non-subscription event sent immediately after connecting,
contains server information
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from ..commands import ChatCommandHandler
from ..exceptions import InvalidPayload
from ..objects.user.user import User
from ..utils.conversion import construct_client_dict
if TYPE_CHECKING:
from typing import Tuple
from ..utils.types import Coro
from ..core.dispatch import GatewayDispatch
async def on_ready_middleware(
self,
payload: GatewayDispatch
) -> Tuple[str]:
"""|coro|
Middleware for ``on_ready`` event.
Parameters
----------
payload : :class:`~pincer.core.dispatch.GatewayDispatch`
The data received from the stage instance create event
Returns
-------
Tuple[:class:`str`]
``on_ready``
"""
user = payload.data.get("user")
guilds = payload.data.get("guilds")
if not user or guilds is None:
raise InvalidPayload(
"A `user` and `guilds` key/value pair is expected on the "
"`ready` payload event."
)
self.bot = User.from_dict(construct_client_dict(self, user))
self.guilds = dict(map(lambda i: (i["id"], None), guilds))
await ChatCommandHandler(self).initialize()
return "on_ready",
def export() -> Coro:
return on_ready_middleware
| 24.711864 | 70 | 0.682442 | 0 | 0 | 0 | 0 | 0 | 0 | 834 | 0.572016 | 581 | 0.398491 |
5ecb8246d4044f46b66c6135aafacc666491da6d | 327 | py | Python | 1177.py | oliveiraeverton/uri | 91e87dd1c9d18facd0b276d73caf53ed41d8eaea | [
"MIT"
] | null | null | null | 1177.py | oliveiraeverton/uri | 91e87dd1c9d18facd0b276d73caf53ed41d8eaea | [
"MIT"
] | null | null | null | 1177.py | oliveiraeverton/uri | 91e87dd1c9d18facd0b276d73caf53ed41d8eaea | [
"MIT"
] | null | null | null | vetor = []
entradaUsuario = int(input())
repetir = 1000
indice = 0
adicionar = 0
while(repetir > 0):
vetor.append(adicionar)
adicionar += 1
if(adicionar == entradaUsuario):
adicionar = 0
repetir -= 1
repetir = 1000
while(repetir>0):
repetir -= 1
print("N[{}] = {}".format(indice, vetor[indice]))
indice += 1
| 18.166667 | 51 | 0.639144 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.036697 |
5ece95cf5808af191aa4776b596abeebfb595d6b | 1,620 | py | Python | conftest.py | RussellJQA/stg-python-webdriver-cert | 73fa7b02f38fe534e074d727f96994a92636ba7b | [
"MIT"
] | 4 | 2021-04-29T22:03:36.000Z | 2021-10-20T11:25:55.000Z | conftest.py | RussellJQA/stg-python-webdriver-cert | 73fa7b02f38fe534e074d727f96994a92636ba7b | [
"MIT"
] | null | null | null | conftest.py | RussellJQA/stg-python-webdriver-cert | 73fa7b02f38fe534e074d727f96994a92636ba7b | [
"MIT"
] | 1 | 2021-05-30T12:56:13.000Z | 2021-05-30T12:56:13.000Z | """
This module implements some pytest fixtures for use with Selenium WebDriver.
"""
import os
import time
import pytest
# pip installed
from dotenv import find_dotenv, load_dotenv
from selenium.webdriver import Chrome
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver.support.wait import WebDriverWait
from webdriver_manager.chrome import ChromeDriverManager
@pytest.fixture
def driver() -> WebDriver:
# Setup: Code before the 'yield' statement is run before each test
driver: Chrome = Chrome(ChromeDriverManager().install(
)) # Install and initialize Chrome WebDriver for Selenium
driver.maximize_window()
yield driver
# Cleanup/Teardown: Code after the 'yield' statement is run after each test
# Load environment variables from .env file
load_dotenv(find_dotenv())
seconds_to_sleep_before_webdriver_quit = int(
os.environ.get("SECONDS_TO_SLEEP_BEFORE_WEBDRIVER_QUIT", "0"))
# Only do this when the corresponding environment variable has specifically been set to enable it
# [as for development or demonstration purposes --
# to allow (during test execution) the then current Web page to be observed].
if seconds_to_sleep_before_webdriver_quit:
time.sleep(seconds_to_sleep_before_webdriver_quit)
driver.quit()
@pytest.fixture
def wait(driver: WebDriver) -> WebDriverWait:
""" WebDriverWait allows us to wait until a condition is True.
For example, wait until an element is displayed
"""
return WebDriverWait(driver, timeout=10) # timeout is the max number of seconds to wait for.
| 31.153846 | 101 | 0.758025 | 0 | 0 | 917 | 0.566049 | 1,221 | 0.753704 | 0 | 0 | 779 | 0.480864 |
5ecf1e2171d2690342787b732c0021b9436bde5e | 4,029 | py | Python | api/lib/datafetcher/DataFetcher.py | NLeRoy917/indcovid | cbc94ad3e9993364743fae92f553ef2d154bee18 | [
"Apache-2.0"
] | null | null | null | api/lib/datafetcher/DataFetcher.py | NLeRoy917/indcovid | cbc94ad3e9993364743fae92f553ef2d154bee18 | [
"Apache-2.0"
] | null | null | null | api/lib/datafetcher/DataFetcher.py | NLeRoy917/indcovid | cbc94ad3e9993364743fae92f553ef2d154bee18 | [
"Apache-2.0"
] | 1 | 2021-03-24T15:46:19.000Z | 2021-03-24T15:46:19.000Z | import requests
import time
import pandas
class DataFetcher():
"""
Python interface for the CKAN Indiana Coronavirus Data Site.
URL: https://hub.mph.in.gov/dataset?q=COVID
"""
_session = requests.Session()
_session.headers = {
'application': 'IndCovid.com',
'User-Agent': 'NLeRoy917@gmail.com',
'Content-Type': 'application/json'
}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between url calls (seconds)
def __init__(self, dir='./tmp/', timeout=1000, sleep_time=0.5):
"""
init DataFetcher Object
- dir - string - the directory to save files into
- timeout - int - the time to wait in second before disconnecting download requests
- sleep_time - float - time to force sleeping
"""
self.api_base = 'https://hub.mph.in.gov/dataset/'
self.timeout = timeout
self.sleep_time = sleep_time
self._data_sources = {
'covid-19-demographics': '62ddcb15-bbe8-477b-bb2e-175ee5af8629/resource/2538d7f1-391b-4733-90b3-9e95cd5f3ea6/download/covid_report_demographics.xlsx'
}
self.dir = dir
def get_data(self,dataset):
"""
Make a call to the url to get the data we want
"""
uri = self.api_base + self._data_sources.get(dataset)
try:
response = self._session.get(uri)
except requests.Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
response = None
except requests.RequestException as e:
print("Error raised and caught:\n{e}".format(e=e))
response = None
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response
def generate_url(self,dataset):
"""
Generate a url link to an excel file that can be downloaded or passed to pandas to create dataframes
"""
return self.api_base + self._data_sources.get(dataset)
def get_latest_data(self):
# download the excel file to local storage
res = self.get_data('covid-19-demographics')
with open(self.dir + 'covid_19_demographics.xlsx','wb') as xl:
xl.write(res.content)
def read_case_demographics_race(self):
df = pandas.read_excel(self.dir + 'covid_19_demographics.xlsx','Race')
case_demographics = []
for index, row in df.iterrows():
case_demographics.append({
'Race': row['RACE'],
'COVID_TEST': row['COVID_TEST'],
'COVID_COUNT': row['COVID_COUNT'],
'COVID_DEATHS': row['COVID_DEATHS'],
'COVID_TEST_PCT': row['COVID_TEST_PCT'],
'COVID_COUNT_PCT': row['COVID_COUNT_PCT'],
'COVID_DEATHS_PCT': row['COVID_DEATHS_PCT']
})
return case_demographics
def read_case_demographics_ethnicity(self):
df = pandas.read_excel(self.dir + 'covid_19_demographics.xlsx','Ethnicity')
case_demographics = []
for index, row in df.iterrows():
case_demographics.append({
'Race': row['ETHNICITY'],
'COVID_TEST': row['COVID_TEST'],
'COVID_COUNT': row['COVID_COUNT'],
'COVID_DEATHS': row['COVID_DEATHS'],
'COVID_TEST_PCT': row['COVID_TEST_PCT'],
'COVID_COUNT_PCT': row['COVID_COUNT_PCT'],
'COVID_DEATHS_PCT': row['COVID_DEATHS_PCT']
})
return case_demographics
if __name__ == '__main__':
# create datafetcher object
fetcher = DataFetcher()
# download the excel file to local storage
res = fetcher.get_data('covid-19-demographics')
with open('covid_19_demographics.xlsx','wb') as xl:
xl.write(res.content)
# open file and read/print data 10 times to assess speed
for i in range(10):
df = pandas.read_excel('covid_19_demographics.xlsx','Race')
print(df)
| 34.144068 | 161 | 0.602135 | 3,534 | 0.877141 | 0 | 0 | 0 | 0 | 0 | 0 | 1,781 | 0.442045 |
5ed167138cffc066c8275849b2ee1e74108aad6f | 1,631 | py | Python | pybots/src/audio/vario.py | aivian/robots | 6827886916e36432ce1d806f0a78edef6c9270d9 | [
"MIT"
] | null | null | null | pybots/src/audio/vario.py | aivian/robots | 6827886916e36432ce1d806f0a78edef6c9270d9 | [
"MIT"
] | null | null | null | pybots/src/audio/vario.py | aivian/robots | 6827886916e36432ce1d806f0a78edef6c9270d9 | [
"MIT"
] | 1 | 2021-09-24T17:08:30.000Z | 2021-09-24T17:08:30.000Z | from audio.sounds import beep
import numpy as np
import time
class VarioTone(object):
""" A class to make vario sounds
"""
def __init__(self, max_val=5.0):
""" Constructor
Arguments:
max_val: optional (defaults to 5), the saturation vario reading in
meters per second
Returns:
class instance
"""
super(VarioTone, self).__init__()
self._max = max_val
self._is_running = False
self._val = 0.0
self._last_val = 0.0
self._beep_time = time.time()
self._beep_duration = 0.0
self._beep_dt = 1.0
self._thread = None
def start_vario(self):
""" Start the vario running
"""
self._is_running = True
while self._is_running is True:
self._service()
dt = min(0.1, self._beep_dt/2.0)
time.sleep(dt)
def stop_vario(self):
""" stop the vario
"""
self._is_running = False
def _service(self):
""" make the beeps
"""
f = 260.0 + (3000.0 - 260.0)/self._max*(
np.clip(self._val, 0.0, self._max))
dt = 0.3 + (0.03 - 0.3)/self._max*(
np.clip(self._val, 0.0, self._max))
spacing = 1.0 + (0.1 - 1.0)/self._max*(
np.clip(self._val, 0.0, self._max))
if (time.time() - self._beep_time > self._beep_duration or
abs(self._val - self._last_val) > 0.0):
if self._val > 0.001 :
self._beep_duration = dt*spacing + dt
self._beep_dt = dt
beep(dt, f)
| 26.306452 | 78 | 0.521766 | 1,567 | 0.96076 | 0 | 0 | 0 | 0 | 0 | 0 | 335 | 0.205395 |
5ed2d6e06ea77385ae38bbc942bc6b4df97670f0 | 1,309 | py | Python | design.py | Raj-kar/Shuffle-Game-with-python | 1b0a765559850137bc49e503b6a79a03c3e8fb12 | [
"MIT"
] | null | null | null | design.py | Raj-kar/Shuffle-Game-with-python | 1b0a765559850137bc49e503b6a79a03c3e8fb12 | [
"MIT"
] | null | null | null | design.py | Raj-kar/Shuffle-Game-with-python | 1b0a765559850137bc49e503b6a79a03c3e8fb12 | [
"MIT"
] | null | null | null | from functions import decorate, ascii_text
def rules(): # Some Game rules, first shown at screen !
decorate(" ************************************************************ ")
decorate(" * * ")
decorate(" * Welcome to Word jumbling, Suffle, re-arange Game! * ")
decorate(" * * ")
decorate(" ************************************************************ ")
decorate("Game Rules --->> Two-player game | Each time a player enters a word and the game shows the word in shuffle form.")
decorate(
"Then player 2 will guess it. If the correct, then player 2 enter a word, and player 1 will guess it !")
decorate(
"Both the player will get three hints, one each time if they can't answer the word at once ..!")
decorate("The Game will run, untill player exit it !")
def loading_screen(p1, p2): # welcome player 1 and 2
ascii_text(f"WELCOME {p1} and {p2}")
decorate(f"We start with {p1} turn ..!")
decorate("Don't show the word to your opponent !")
# -> decorate is a function which you find at functions.py file
# -> It's just like print function, but it prints statements with different colors !
| 52.36 | 129 | 0.524828 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 989 | 0.755539 |
5ed3010660f532f36ff748813256fddc0ccb424a | 1,224 | py | Python | Supernova/database/pack.py | Feliconut/PurdueCS324-LSST | 79273ba0ef8ed9aab9e08c736c22631a8e250f6e | [
"Apache-2.0"
] | null | null | null | Supernova/database/pack.py | Feliconut/PurdueCS324-LSST | 79273ba0ef8ed9aab9e08c736c22631a8e250f6e | [
"Apache-2.0"
] | null | null | null | Supernova/database/pack.py | Feliconut/PurdueCS324-LSST | 79273ba0ef8ed9aab9e08c736c22631a8e250f6e | [
"Apache-2.0"
] | 1 | 2021-02-24T03:54:33.000Z | 2021-02-24T03:54:33.000Z | from shutil import copy, make_archive, rmtree
from os import mkdir, remove
from os.path import join, exists
from .io import DATA_PATH, fetch_locus
def pack(name, locus_ids, include_alerts=False):
if exists(name + '.zip'):
raise FileExistsError(name + '.zip')
DST_PATH = name + '_temp'
print(f'Creating temp folder ./{name}_temp ...')
mkdir(DST_PATH)
mkdir(join(DST_PATH, 'loci'))
mkdir(join(DST_PATH, 'lightcurves'))
mkdir(join(DST_PATH, 'alerts'))
print(f'Copying necessary files ...')
for locus_id in locus_ids:
copy(join(DATA_PATH, 'loci', locus_id), join(DST_PATH, 'loci',
locus_id))
copy(join(DATA_PATH, 'lightcurves', locus_id + '.lc'),
join(DST_PATH, 'lightcurves', locus_id + '.lc'))
if include_alerts:
for alert in fetch_locus(locus_id).alerts:
alert_id = alert.alert_id
copy(join(DATA_PATH, 'alerts', alert_id),
join(DST_PATH, 'alerts', alert_id))
print(f'Making {name}.zip ...')
make_archive(name, 'zip', DST_PATH)
print(f'Complete. Clearing temp files')
rmtree(DST_PATH)
print(f'Complete.')
| 38.25 | 70 | 0.606209 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 254 | 0.207516 |
5ed383700e85d30633a0a565fb47e10ed17d6cee | 975 | py | Python | utils/keychain.py | remcoroyen/ShallotNetwork | d8f36533d613839f70b8eab30112d85229764af2 | [
"MIT"
] | null | null | null | utils/keychain.py | remcoroyen/ShallotNetwork | d8f36533d613839f70b8eab30112d85229764af2 | [
"MIT"
] | null | null | null | utils/keychain.py | remcoroyen/ShallotNetwork | d8f36533d613839f70b8eab30112d85229764af2 | [
"MIT"
] | null | null | null | from Crypto.Util import number
def random_prime(bit_size):
return number.getPrime(bit_size)
def random_int(bit_size):
return number.getRandomInteger(bit_size)
class KeyChain:
def __init__(self):
self.keys = []
self.keyids = []
def new_key(self, key, key_id):
self.keys.append(key)
self.keyids.append(key_id)
def has_key(self, key_id):
return key_id in self.keyids
def get_key(self, key_id):
if self.has_key(key_id):
return self.keys[self.keyids.index(key_id)]
else:
return None
def destroy_key(self, key_id):
if self.has_key(key_id):
index = self.keyids.index(key_id)
self.keyids.pop(index)
self.keys.pop(index)
else:
print('Key not found, none removed')
def clear(self):
for key_id in self.keyids:
self.destroy_key(key_id)
| 23.780488 | 55 | 0.57641 | 801 | 0.821538 | 0 | 0 | 0 | 0 | 0 | 0 | 29 | 0.029744 |
5ed3c8a738f51e8e4457aa68a1a92a2c4383f96b | 731 | py | Python | python3/dec-hexIPformatter.py | 7RU7H/hacking-scripts | 1672db32ab0875b8105c62deaa72b7b756ceac5f | [
"MIT"
] | null | null | null | python3/dec-hexIPformatter.py | 7RU7H/hacking-scripts | 1672db32ab0875b8105c62deaa72b7b756ceac5f | [
"MIT"
] | null | null | null | python3/dec-hexIPformatter.py | 7RU7H/hacking-scripts | 1672db32ab0875b8105c62deaa72b7b756ceac5f | [
"MIT"
] | null | null | null | """
You can run this in the following format:
For decimal: python3 ip2dh.py D <Ip-address>
For Hexadecimal: python3 ip2dh.py H <Ip-address>
https://gist.github.com/mzfr
"""
#!/usr/bin/python3
import sys
if len(sys.argv) < 3:
print('\nYou must give desired format and IPv4 address as input...')
print('e.g.: D 192.168.10.100')
print('Valid formats D=Decimal H=Hexadecimal\n')
sys.exit(1)
Format = sys.argv[1]
def long(ip):
IP = ip.split('.')
IP = list(map(int, IP))
LongIP = IP[0]*2**24 + IP[1]*2**16 + IP[2]*2**8 + IP[3]
return LongIP
ip = long(sys.argv[2])
if Format == 'D':
print('\nIP as Decimal format: %s' % (ip))
if Format == 'H':
print('\nIP as Hexadecimal format: %s' % (hex(ip)))
| 22.151515 | 72 | 0.619699 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 385 | 0.526676 |
5ed4ceefc3e109900c35d9955460f833cabd84e2 | 485 | py | Python | manage.py | diogenesjusto/flask_leaderboard | 86dac90785e01747ffbde99e6ba65cf42e4c016e | [
"MIT"
] | 5 | 2020-06-15T02:56:39.000Z | 2021-12-28T19:18:18.000Z | manage.py | diogenesjusto/flask_leaderboard | 86dac90785e01747ffbde99e6ba65cf42e4c016e | [
"MIT"
] | 2 | 2019-12-01T15:50:05.000Z | 2021-12-17T07:54:23.000Z | manage.py | diogenesjusto/flask_leaderboard | 86dac90785e01747ffbde99e6ba65cf42e4c016e | [
"MIT"
] | 9 | 2020-01-19T11:21:33.000Z | 2022-02-22T06:28:52.000Z | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from config import Config
from main import User, Submission
app = Flask(__name__)
# app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| 22.045455 | 60 | 0.77732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.152577 |
5ed5c293015f6fb8f00d8af0364b706eaa0ed94e | 3,176 | py | Python | settings.py | brandonivey/alertas | b4bc9adad1ea01a7f15c3867ef9da00197e33301 | [
"BSD-3-Clause"
] | null | null | null | settings.py | brandonivey/alertas | b4bc9adad1ea01a7f15c3867ef9da00197e33301 | [
"BSD-3-Clause"
] | null | null | null | settings.py | brandonivey/alertas | b4bc9adad1ea01a7f15c3867ef9da00197e33301 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
eve-app settings
"""
# Use the MongoHQ sandbox as our backend.
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = 'notifications'
# also, correctly set the API entry point
# SERVER_NAME = 'localhost'
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
# (if you omit this line, the API will default to ['GET'] and provide
# read-only access to the endpoint).
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
# Enable reads (GET), edits (PATCH) and deletes of individual items
# (defaults to read-only item access).
ITEM_METHODS = ['GET', 'PATCH', 'DELETE']
# We enable standard client cache directives for all resources exposed by the
# API. We can always override these global settings later.
CACHE_CONTROL = 'max-age=20'
CACHE_EXPIRES = 20
X_DOMAINS = '*'
X_HEADERS = ['Authorization','If-Match','Access-Control-Expose-Headers','Content-Type','Pragma','Cache-Control']
X_EXPOSE_HEADERS = ['Origin', 'X-Requested-With', 'Content-Type', 'Accept']
units = ['ATG', 'ATC Mobile', 'Dealer Site', 'ATC', 'RealDeal', 'KBB', 'Tradein', 'vAuto', 'Fastlane', 'ATC SYC', 'VinSolution', 'HomeNet', 'ATX', 'CRM']
incident = {
# if 'item_title' is not provided API will just strip the final
# 's' from resource name, and use it as the item_title.
# 'item_title': 'incident',
'schema': {
'title': {
'type': 'string',
'minlength': 1,
'maxlength': 128,
},
'status': {
'type': 'string',
'allowed': ['red', 'yellow', 'green'],
'required': True,
},
'unit': {
'type': 'string',
'allowed': units,
'required': True,
},
'summary': {
'type': 'string',
'minlength': 1,
'maxlength': 512,
},
'created_by': {
'type': 'string',
'maxlength': 32,
},
}
}
update = {
# We choose to override global cache-control directives for this resource.
'cache_control': 'max-age=10,must-revalidate',
'cache_expires': 10,
'schema': {
'created_by': {
'type': 'string',
'maxlength': 32,
},
'description': {
'type': 'string',
'minlength': 1,
'maxlength': 512,
'required': True,
},
'incident': {
'type': 'objectid',
'required': True,
# referential integrity constraint: value must exist in the
# 'incidents' collection. Since we aren't declaring a 'field' key,
# will default to `incidents._id` (or, more precisely, to whatever
# ID_FIELD value is).
'data_relation': {
'resource': 'incidents',
# make the owner embeddable with ?embedded={"incident":1}
'embeddable': True
},
},
}
}
# The DOMAIN dict explains which resources will be available and how they will
# be accessible to the API consumer.
DOMAIN = {
'incidents': incident,
'updates': update,
}
| 28.872727 | 153 | 0.560139 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,021 | 0.636335 |
5ed705bdaa66e16d951b96579ba77a4976ae6a2d | 66 | py | Python | pygame_ui/__init__.py | oof6969696969/pygame_ui | ca59652f30718dd8c578d994239d3a2d7aadae9c | [
"MIT"
] | null | null | null | pygame_ui/__init__.py | oof6969696969/pygame_ui | ca59652f30718dd8c578d994239d3a2d7aadae9c | [
"MIT"
] | null | null | null | pygame_ui/__init__.py | oof6969696969/pygame_ui | ca59652f30718dd8c578d994239d3a2d7aadae9c | [
"MIT"
] | null | null | null | from lib.pygame_ui import UIManager, Widgets, Shapes, load_theme
| 33 | 65 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5ed74807e632739391f37e5ba60bcdb485e2cdb5 | 2,884 | py | Python | test/test_Motion/test_ForceMomentSystem.py | henrystoldt/MAPLEAF | af970d3e8200832f5e70d537b15ad38dd74fa551 | [
"MIT"
] | 15 | 2020-09-11T19:25:07.000Z | 2022-03-12T16:34:53.000Z | test/test_Motion/test_ForceMomentSystem.py | henrystoldt/MAPLEAF | af970d3e8200832f5e70d537b15ad38dd74fa551 | [
"MIT"
] | null | null | null | test/test_Motion/test_ForceMomentSystem.py | henrystoldt/MAPLEAF | af970d3e8200832f5e70d537b15ad38dd74fa551 | [
"MIT"
] | 3 | 2021-12-24T19:39:53.000Z | 2022-03-29T01:06:28.000Z | #Created by: Declan Quinn
#May 2019
#To run tests:
#In this file: [test_StandardAtmosphere.py]
#In all files in the current directory: [python -m unittest discover]
#Add [-v] for verbose output (displays names of all test functions)
import unittest
from MAPLEAF.Motion import ForceMomentSystem
from MAPLEAF.Motion import Vector
class TestForceMomentSystem(unittest.TestCase):
def setUp(self):
self.appliedForce1 = ForceMomentSystem(Vector(0, 0, 10), Vector(1, 0, 0), Vector(0,0,0))
self.appliedForce2 = ForceMomentSystem(Vector(0, 0, 10), Vector(2, 0, 0), Vector(0,0,0))
self.appliedForce3 = ForceMomentSystem(Vector(10, 0, 0), Vector(0, 1, 0), Vector(0,0,0))
self.correctForce1 = ForceMomentSystem(Vector(0, 0, 20), Vector(0,0,0), Vector(0, -30, 0))
self.correctForce2 = ForceMomentSystem(Vector(10, 0, 10), Vector(0,0,0), Vector(0, -10, -10))
def test_combineForceMomentSystems(self):
combinedForce = self.appliedForce1 + self.appliedForce2
forceAtCG = combinedForce.getAt(Vector(0,0,0))
self.assertEqual(forceAtCG, self.correctForce1)
combinedForce2 = self.appliedForce1 + self.appliedForce3
forceAtCG2 = combinedForce2.getAt(Vector(0,0,0))
self.assertEqual(forceAtCG2, self.correctForce2)
def test_combineForceMomentSystems_2(self):
# Example Question 16 - http://www.ce.siue.edu/examples/Worked_examples_Internet_text-only/Data_files-Worked_Exs-Word_&_pdf/Equivalent_forces.pdf
# Define Force-Moment 1
force1 = Vector(0, -3.464, -2)
m1 = Vector(0, -51.962, -30)
location1 = Vector(4, 1.5, 4.402)
fms1 = ForceMomentSystem(force1, location1, m1)
# Define Force-Moment 2
force2 = Vector(-6, 0, 0)
m2 = Vector(-80, 0, 0)
location2 = Vector(8, 1.5, 1)
fms2 = ForceMomentSystem(force2, location2, m2)
# Combine
combinedForce = fms1 + fms2
combinedForceAtOrigin = combinedForce.getAt(Vector(0,0,0))
# Define correct/expected result
expectedResultantForce = Vector(-6, -3.464, -2)
expectedResultantMoment = Vector(12.249, 2, -4.856) # Only includes moments generated by forces, not the moments applied
resultantLocation = Vector(0,0,0)
expectedResult = ForceMomentSystem(expectedResultantForce, resultantLocation, expectedResultantMoment)
# Compare
from test.testUtilities import assertVectorsAlmostEqual
assertVectorsAlmostEqual(self, combinedForceAtOrigin.force, expectedResult.force)
assertVectorsAlmostEqual(self, combinedForceAtOrigin.location, expectedResult.location)
assertVectorsAlmostEqual(self, combinedForceAtOrigin.moment - m1 - m2, expectedResult.moment, 3)
#If this file is run by itself, run the tests above
if __name__ == '__main__':
unittest.main()
| 43.69697 | 153 | 0.694521 | 2,450 | 0.849515 | 0 | 0 | 0 | 0 | 0 | 0 | 597 | 0.207004 |
5ed76a1d4a9d801f30aced725248325dce473b59 | 2,349 | py | Python | rebench/environment.py | tobega/ReBench | 123a9187f74d32f93b823dd0c354244aecd7437e | [
"MIT"
] | null | null | null | rebench/environment.py | tobega/ReBench | 123a9187f74d32f93b823dd0c354244aecd7437e | [
"MIT"
] | null | null | null | rebench/environment.py | tobega/ReBench | 123a9187f74d32f93b823dd0c354244aecd7437e | [
"MIT"
] | null | null | null | import getpass
import os
import subprocess
from cpuinfo import get_cpu_info
from psutil import virtual_memory
try:
from urllib.parse import urlparse
except ImportError:
# Python 2.7
from urlparse import urlparse
def _encode_str(out):
as_string = out.decode('utf-8')
if as_string and as_string[-1] == '\n':
as_string = as_string[:-1]
return as_string
def _exec(cmd):
try:
out = subprocess.check_output(cmd)
except subprocess.CalledProcessError:
return None
return _encode_str(out)
def determine_source_details():
result = dict()
try:
repo_url = subprocess.check_output(['git', 'ls-remote', '--get-url'])
except subprocess.CalledProcessError:
repo_url = ''
parsed = urlparse(repo_url)
if parsed.password:
# remove password
parsed = parsed._replace(
netloc="{}@{}".format(parsed.username, parsed.hostname))
result['repoURL'] = _encode_str(parsed.geturl())
result['branchOrTag'] = _exec(['git', 'show', '-s', '--format=%D', 'HEAD'])
result['commitId'] = _exec(['git', 'rev-parse', 'HEAD'])
result['commitMsg'] = _exec(['git', 'show', '-s', '--format=%B', 'HEAD'])
result['authorName'] = _exec(['git', 'show', '-s', '--format=%aN', 'HEAD'])
result['committerName'] = _exec(['git', 'show', '-s', '--format=%cN', 'HEAD'])
result['authorEmail'] = _exec(['git', 'show', '-s', '--format=%aE', 'HEAD'])
result['committerEmail'] = _exec(['git', 'show', '-s', '--format=%cE', 'HEAD'])
return result
def determine_environment():
result = dict()
result['userName'] = getpass.getuser()
result['manualRun'] = not ('CI' in os.environ and os.environ['CI'] == 'true')
u_name = os.uname()
result['hostName'] = u_name[1]
result['osType'] = u_name[0]
cpu_info = get_cpu_info()
result['cpu'] = cpu_info['brand']
result['clockSpeed'] = (cpu_info['hz_advertised_raw'][0]
* (10 ** cpu_info['hz_advertised_raw'][1]))
result['memory'] = virtual_memory().total
result['software'] = []
result['software'].append({'name': 'kernel', 'version': u_name[3]})
result['software'].append({'name': 'kernel-release', 'version': u_name[2]})
result['software'].append({'name': 'architecture', 'version': u_name[4]})
return result
| 32.178082 | 83 | 0.611324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 651 | 0.277139 |
5ed81a7d9ae972c6969d8b4ad2ed74cbe180da8e | 1,138 | py | Python | pipupgrade/cli/__init__.py | codingCoffee/pipupgrade | cdb2921fbe294afe9da5caf7cf07c222a3780acb | [
"MIT"
] | null | null | null | pipupgrade/cli/__init__.py | codingCoffee/pipupgrade | cdb2921fbe294afe9da5caf7cf07c222a3780acb | [
"MIT"
] | null | null | null | pipupgrade/cli/__init__.py | codingCoffee/pipupgrade | cdb2921fbe294afe9da5caf7cf07c222a3780acb | [
"MIT"
] | null | null | null | # imports - compatibility imports
from __future__ import print_function
from pipupgrade._compat import input
# imports - standard imports
import inspect
# imports - module imports
from pipupgrade.cli.parser import get_parsed_args
from pipupgrade.util import get_if_empty, merge_dict
_ACCEPTABLE_YES = ("", "y", "Y")
BOLD = "\033[0;1m"
UNDERLINE = "\033[0;4m"
RED = "\033[0;31m"
GREEN = "\033[0;32m"
YELLOW = "\033[0;33m"
CYAN = "\033[0;36m"
CLEAR = "\033[0m"
def confirm(query):
query = "{} [Y/n]: ".format(query)
output = input(query)
return output in _ACCEPTABLE_YES
def format(string, type_):
string = "{}{}{}".format(type_, string, CLEAR)
return string
def echo(string, nl = True):
print(string, end = "\n" if nl else "")
def command(fn):
argspec = inspect.getargspec(fn)
keys = argspec.args
values = get_if_empty(argspec.defaults, [ ])
fnargs = dict(zip(keys, values))
parsed = get_parsed_args()
merged = merge_dict(fnargs, parsed.__dict__)
def wrapper(*args, **kwargs):
return fn(**merged)
return wrapper | 22.76 | 58 | 0.640598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 200 | 0.175747 |
0d5987b219b64d6b388ff98a861c6bc4ea2a00e4 | 321 | py | Python | csv_to_table/urls.py | KariSpace/CRM_Sedicomm | cb19e90ca99c7a50a1841afbfb878191f62dec5c | [
"MIT"
] | null | null | null | csv_to_table/urls.py | KariSpace/CRM_Sedicomm | cb19e90ca99c7a50a1841afbfb878191f62dec5c | [
"MIT"
] | null | null | null | csv_to_table/urls.py | KariSpace/CRM_Sedicomm | cb19e90ca99c7a50a1841afbfb878191f62dec5c | [
"MIT"
] | null | null | null |
from . import views
from django.contrib.auth import views as auth_views
from django.urls import path
urlpatterns = [ #Kari CSV_TO_TABLE Commit
path('csv_upload/', views.csv_table, name='csv_table'),
path('today/', views.today_table, name='today_table'),
path('search/', views.search, name='search'),
] | 26.75 | 59 | 0.71028 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 87 | 0.271028 |
0d5a30331837c0027e55c2d8e2c93ae9b5c82120 | 810 | py | Python | tests/test_data_utils.py | claydodo/lineout | f3de9ac40c62c8231a63dbf64d91aa2aa299f9c6 | [
"Unlicense"
] | null | null | null | tests/test_data_utils.py | claydodo/lineout | f3de9ac40c62c8231a63dbf64d91aa2aa299f9c6 | [
"Unlicense"
] | null | null | null | tests/test_data_utils.py | claydodo/lineout | f3de9ac40c62c8231a63dbf64d91aa2aa299f9c6 | [
"Unlicense"
] | null | null | null | from unittest import TestCase
from src.lineout.data import *
class TestDataUtils(TestCase):
def test_get_result_list(self):
sample_list = [{'id': 1, 'name': 'a'}, {'id': 2, 'name': 'b'}]
paginated = {
'count': 2,
'previous': None,
'next': None,
'results': sample_list
}
not_paginated_1 = {
'foo': 'bar'
}
not_paginated_2 = {
'results': {'foo': 'bar'}
}
self.assertListEqual(get_result_list(sample_list), sample_list)
self.assertListEqual(get_result_list(paginated), sample_list)
with self.assertRaises(ValueError):
get_result_list(not_paginated_1)
with self.assertRaises(ValueError):
get_result_list(not_paginated_2)
| 27.931034 | 71 | 0.57037 | 746 | 0.920988 | 0 | 0 | 0 | 0 | 0 | 0 | 87 | 0.107407 |
0d5ad59ba224418983bf8048ad6cf393b546dc31 | 738 | py | Python | tests/core/sdo/tests.py | aceofwings/Cantactular | a6eb8d7128fd1388d3e75c1a8415123d1d5930e1 | [
"MIT"
] | 3 | 2017-01-26T01:37:42.000Z | 2018-07-22T02:42:52.000Z | tests/core/sdo/tests.py | aceofwings/Cantactular | a6eb8d7128fd1388d3e75c1a8415123d1d5930e1 | [
"MIT"
] | 1 | 2017-07-07T18:02:20.000Z | 2017-07-07T18:02:20.000Z | tests/core/sdo/tests.py | aceofwings/Evt-Gateway | a6eb8d7128fd1388d3e75c1a8415123d1d5930e1 | [
"MIT"
] | null | null | null | # import unittest
# import os
# from gateway.can.sdo.message import SdoMessage
# from gateway.can.sdo.message import CommandByte
# class TestSdoMessage(unittest.TestCase):
# def setUp(self):
# self.sdoMessage = SdoMessage.getMessege(0x1018,0x01,0x00)
# self.anotherMessage = SdoMessage.getMessege(0x2220,0x00,0x00)
#
# self.rawUpload = b'@\x18\x10\x02\x02\x00\x00\x00\x00\x00\x00\x00'
# self.rawDownload = b'+\x18\x10\x01\x00\x00\x00\x00\x00\x00\x00\x00'
#
# def tearDown(self):
# pass
# def test_toBytes(self):
# """Translate the SDO message correctly"""
# pass
# def test_init(self):
# """See if intialize will have correct default value"""
# pass
| 35.142857 | 77 | 0.650407 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 718 | 0.9729 |
0d5bea25696485b9e77b020b40679ffea0d8c825 | 6,295 | py | Python | app/views/v2/scan_results.py | TLSInventory/backend | 81e660e6c54168e0574636771f6425c9a6ed3599 | [
"MIT"
] | 1 | 2021-03-13T09:46:46.000Z | 2021-03-13T09:46:46.000Z | app/views/v2/scan_results.py | TLSInventory/backend | 81e660e6c54168e0574636771f6425c9a6ed3599 | [
"MIT"
] | 14 | 2020-09-20T10:00:24.000Z | 2021-07-10T14:28:54.000Z | app/views/v2/scan_results.py | TLSInventory/backend | 81e660e6c54168e0574636771f6425c9a6ed3599 | [
"MIT"
] | 2 | 2021-01-17T20:24:45.000Z | 2021-01-24T12:03:51.000Z | from typing import List
import app.views.v1.misc
import app.db_models as db_models
from . import bp
from flask import request, jsonify
from loguru import logger
import flask_jwt_extended
import app.db_schemas as db_schemas
import app.utils.authentication_utils as authentication_utils
import app.actions as actions
def get_user_targets_only(user_id: int) -> dict:
res = db_models.db.session \
.query(db_models.ScanOrder, db_models.Target) \
.filter(db_models.ScanOrder.target_id == db_models.Target.id) \
.filter(db_models.ScanOrder.user_id == user_id) \
.all()
schema = db_schemas.TargetSchema(many=True)
json_dict = schema.dump([x.Target for x in res])
assert len(res) == len(json_dict), "ERROR - Current implementation relies on having the same len for two fields"
for i in range(len(res)):
json_dict[i]["active"] = 'yes' if res[i].ScanOrder.active else 'no'
return json_dict
@bp.route('/history/scans_timeline', methods=['GET'])
@bp.route('/history/scans_timeline/<int:x_days>', methods=['GET'])
def api_scan_result_history_without_certs(user_id=None, x_days=30):
if user_id is None:
user_id = authentication_utils.get_user_id_from_jwt_or_exception()
res = actions.get_scan_history(user_id, x_days)
if res is None:
return "[]", 200
server_info_schema = db_schemas.ServerInfoSchemaWithoutCiphers()
res_dict = {}
for x in res:
try:
res_dict[x.ScanResultsHistory.id] = {
"timestamp": x.ScanResultsHistory.timestamp,
"server_info": server_info_schema.dump(x.ServerInfo),
"target_id": x.Target.id,
"scan_result_id": x.ScanResultsSimplified.scanresult_id if x.ScanResultsSimplified else None,
}
except Exception as e:
logger.error(f"{x} | {e}")
raise
return jsonify(res_dict)
@bp.route('/history/scan_results_simplified', methods=['GET'])
@bp.route('/history/scan_results_simplified/<int:x_days>', methods=['GET'])
def api_get_users_scan_results_simplified(user_id=None, x_days=30):
if user_id is None:
user_id = authentication_utils.get_user_id_from_jwt_or_exception()
res = actions.get_scan_history(user_id, x_days)
if res is None:
return "[]", 200
scan_results_simplified = list(map(lambda x: x.ScanResultsSimplified, res))
scan_results_simplified2 = list(filter(lambda x: x, scan_results_simplified))
res2: List[dict] = db_schemas.ScanResultsSimplifiedWithoutCertsSchema().dump(scan_results_simplified2, many=True)
res_dict_of_dicts = db_schemas.convert_arr_of_dicts_to_dict_of_dicts(res2)
return jsonify(res_dict_of_dicts)
@bp.route('/history/certificate_chains', methods=['GET'])
@bp.route('/history/certificate_chains/<int:x_days>', methods=['GET'])
def api_get_users_certificate_chains(user_id=None, x_days=30):
if user_id is None:
user_id = authentication_utils.get_user_id_from_jwt_or_exception()
res = actions.get_certificate_chains(user_id, x_days)
res_dicts: List[dict] = db_schemas.CertificateChainSchemaWithoutCertificates().dump(res, many=True)
res_dict_of_dicts = db_schemas.convert_arr_of_dicts_to_dict_of_dicts(res_dicts)
return jsonify(res_dict_of_dicts)
@bp.route('/history/certificates', methods=['GET'])
@bp.route('/history/certificates/<int:x_days>', methods=['GET'])
def api_get_users_certificates(user_id=None, x_days=30):
if user_id is None:
user_id = authentication_utils.get_user_id_from_jwt_or_exception()
# logger.debug("Start getting certificate chains")
res_chains = actions.get_certificate_chains(user_id, x_days)
# logger.debug("Start getting certificates")
res_certs = actions.get_certificates(res_chains)
# logger.debug("Start serializing certificates")
res_dicts: List[dict] = db_schemas.CertificateSchema().dump(res_certs, many=True)
res_dict_of_dicts = db_schemas.convert_arr_of_dicts_to_dict_of_dicts(res_dicts)
return jsonify(res_dict_of_dicts)
def convert_scan_results_to_v1(a, b, c, d, e) -> List[dict]:
for chain_key in c:
c[chain_key]["certificate_chain"] = [d[str(x)] for x in c[chain_key]["chain_arr"]]
for scan_result_id in b:
received_certificate_chain_list_id = b[scan_result_id].get("received_certificate_chain_list_id")
if received_certificate_chain_list_id:
b[scan_result_id]["received_certificate_chain_list"] = c[str(received_certificate_chain_list_id)]
b[scan_result_id]["verified_certificate_chains_list"] = [c[str(x)] for x in b[scan_result_id]["verified_certificate_chains_lists_ids_arr"]]
# logger.debug(e)
e_dict = db_schemas.convert_arr_of_dicts_to_dict_of_dicts(e)
for single_scan_attempt_id in a:
# logger.warning(a[single_scan_attempt_id])
scan_result_id = a[single_scan_attempt_id]["scan_result_id"]
if scan_result_id:
a[single_scan_attempt_id]["result_simplified"] = b[str(scan_result_id)]
target_id = a[single_scan_attempt_id]["target_id"]
a[single_scan_attempt_id]["target"] = e_dict[target_id]
pass
new_res = []
for single_scan_attempt_id in a:
new_res.append(a[single_scan_attempt_id])
return new_res
@bp.route('/history/scan_results', methods=['GET'])
@bp.route('/history/scan_results/<int:x_days>', methods=['GET'])
def api_scan_results_history_v2(user_id=None, x_days=30):
if user_id is None:
user_id = authentication_utils.get_user_id_from_jwt_or_exception()
logger.debug("before API requests")
a = api_scan_result_history_without_certs(user_id, x_days).json
b = api_get_users_scan_results_simplified(user_id, x_days).json
c = api_get_users_certificate_chains(user_id, x_days).json
d = api_get_users_certificates(user_id, x_days).json
e = get_user_targets_only(user_id)
logger.debug("after API requests")
new_res = convert_scan_results_to_v1(a, b, c, d, e)
new_res_2 = sorted(new_res, key=lambda x: x["timestamp"])
logger.debug("after conversion of scan_results for backwards compatibility")
# return json.dumps(sorted(new_res, key=lambda x: x["timestamp"]), indent=4, sort_keys=True), 200
return jsonify(new_res_2)
| 38.384146 | 147 | 0.727244 | 0 | 0 | 0 | 0 | 4,087 | 0.649245 | 0 | 0 | 1,191 | 0.189198 |
0d5bf4fc7b9f858d534dd913cfd8d7635732cd1a | 3,305 | py | Python | gameOfLife.py | andrewKv/pythonGraphicsPrograms | a24b3e56e729820f191fed58c2a01aa9b947a7b6 | [
"MIT"
] | null | null | null | gameOfLife.py | andrewKv/pythonGraphicsPrograms | a24b3e56e729820f191fed58c2a01aa9b947a7b6 | [
"MIT"
] | null | null | null | gameOfLife.py | andrewKv/pythonGraphicsPrograms | a24b3e56e729820f191fed58c2a01aa9b947a7b6 | [
"MIT"
] | null | null | null | from Graphics import *
CELL_SIZE = 20
ROWS, COLUMNS = 40, 40
class Cell:
def __init__(self, pos):
self.pos = pos
self.alive = False
self.flipNextGen = False
def switch(self):
self.alive = not self.alive
def draw(self, win):
r = Rectangle(Point(self.pos[0], self.pos[1]), Point(self.pos[0] + CELL_SIZE, self.pos[1] + CELL_SIZE))
if self.alive:
r.setFill("black")
else:
r.setFill("white")
r.draw(win)
def showEmptyGrid():
win = GraphWin("Game of Life", 500, 500)
cellGrid = []
for y in range(0, COLUMNS * CELL_SIZE, CELL_SIZE):
for x in range(0, ROWS * CELL_SIZE, CELL_SIZE):
c = Cell([x, y])
cellGrid.append(c)
c.draw(win)
return win, cellGrid
def clickToGrid(pos):
def myRound(x, base):
return int(base * round(float(x) / base))
return myRound(pos.getX(), CELL_SIZE), myRound(pos.getY(), CELL_SIZE)
def inputToGrid(win, cGrid):
placing = True
while placing:
mPos = win.getMouse()
xPos, yPos = clickToGrid(mPos)
for c in cGrid:
if c.pos == [xPos,yPos]:
c.switch()
c.draw(win)
placing = win.checkKey() != "space"
return cGrid
def getNeighbs(c, cGrid):
neighbs = 0
cPlace = cGrid.index(c)
x = c.pos[0]/CELL_SIZE
y = c.pos[1]/CELL_SIZE
squarePerRow = COLUMNS
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Ugly, try-catch?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
if x > 0: # Left
if cGrid[cPlace - 1].alive:
neighbs += 1
if y > 0: # Top Left
if cGrid[cPlace - (squarePerRow + 1)].alive:
neighbs += 1
if y < ROWS - 1: # Bottom Left
if cGrid[cPlace + (squarePerRow - 1)].alive:
neighbs += 1
if cGrid[cPlace - squarePerRow].alive: # Top
neighbs += 1
if x < COLUMNS - 1: # Right
if cGrid[cPlace + 1].alive:
neighbs += 1
if y > 0: # Top Right
if cGrid[cPlace - (squarePerRow - 1)].alive:
neighbs += 1
if y < ROWS - 1: # Bottom Right
if cGrid[cPlace + (squarePerRow + 1)].alive:
neighbs += 1
if cGrid[cPlace + squarePerRow].alive: # Bottom
neighbs += 1
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
return neighbs
def runSimulation(win, cGrid):
while win.checkMouse() == None:
for c in cGrid: #Once through determines changes
nCount = getNeighbs(c, cGrid)
if c.alive:
if nCount < 2 or nCount > 3:
c.flipNextGen = True # Death conditions
# Else lives on
elif nCount == 3: # Birth condition
c.flipNextGen = True
time.sleep(0.05)
for c in cGrid: #Second time activates changes
if c.flipNextGen:
c.switch()
c.flipNextGen = False
c.draw(win)
def main():
# Space to stop clicking inputs
# Click anywhere to end simulation
win, grid = showEmptyGrid()
grid = inputToGrid(win, grid)
runSimulation(win, grid)
main()
| 29.508929 | 111 | 0.500454 | 437 | 0.132224 | 0 | 0 | 0 | 0 | 0 | 0 | 469 | 0.141906 |
0d5c7cacb296792d24de984ae8cecf8793347e8c | 679 | py | Python | projecteuler/projectEuler69.py | qingfengxia/python-projecteuler | a2cba042fe7256364f6a5fa55df805a87da9a301 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | projecteuler/projectEuler69.py | qingfengxia/python-projecteuler | a2cba042fe7256364f6a5fa55df805a87da9a301 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | projecteuler/projectEuler69.py | qingfengxia/python-projecteuler | a2cba042fe7256364f6a5fa55df805a87da9a301 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, absolute_import, division
"""
Find the value of n ≤ 1,000,000 for which n/φ(n) is a maximum.
"""
from factorization import primefactorize, totient
def problem69():
"""
in fact, it can be solved by print 2*3*5*7*11*13*17=510510 by the set union, theory! smartest way
"""
ratio=2.0 #
pos=2
N=10**6
for d in range(3,N+1):
count=totient(d)
r=float(d)/count
if r>ratio:
ratio=r
pos=d
#
print("maximum ratio is found at n=:",pos)
if __name__ == "__main__":
#test()
problem69() | 25.148148 | 103 | 0.569956 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 269 | 0.394428 |
0d5d262e5b19e0ba4bdabadc738b737c68da0c82 | 5,018 | py | Python | games/pathfinder.py | wnormandin/resources | 43be223b0c66e944985357a6d23891b551ac2937 | [
"MIT"
] | null | null | null | games/pathfinder.py | wnormandin/resources | 43be223b0c66e944985357a6d23891b551ac2937 | [
"MIT"
] | null | null | null | games/pathfinder.py | wnormandin/resources | 43be223b0c66e944985357a6d23891b551ac2937 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import collections
import heapq
class Queue:
""" Basic queue implementation using collections.deque() """
def __init__(self):
self.elements = collections.deque()
def empty(self):
""" Test if queue is empty """
return len(self.elements)==0
def put(self, x):
self.elements.append(x)
def get(self):
return self.elements.popleft()
class PriorityQueue:
""" Queue implementation using binary heaps """
def __init__(self):
self.elements = []
def empty(self):
return len(self.elements)==0
def put(self, item, priority):
heapq.heappush(self.elements,(priority,item))
def get(self):
return heapq.heappop(self.elements)[1]
class Pathfinder:
""" Performs basic pathfinding operations """
a_star = 0 # A* algorithm (default)
b_first = 1 # Breadth first
gb_first = 2 # Greedy best-first
def __init__(self,alg=0):
self.early_exit = True
self.impediments = []
self.alg = alg
self.g = None
self.start = None
self.dest = None
def execute(self):
assert self.g is not None, 'Graph (Pathfinder.g) not initialized!'
assert self.start is not None, 'Start point not specified!'
assert self.dest is not None, 'End point not specified!'
if self.alg==Pathfinder.a_star:
results = self.a_star(self.g,self.start,self.dest)
elif self.alg==Pathfinder.b_first:
results = self.bf_search(self.g,self.start,self.dest)
elif self.alg==Pathfinder.gb_first:
results = self.gbf_search(self.g,self.start,self.dest)
else:
results = False
return results
def gbf_search(self,graph,start,goal):
""" Greedy Best-First search """
frontier = PriorityQueue()
frontier.put(start,0)
came_from = {}
came_from[start] = None
while not frontier.empty():
current = frontier.get()
if current == goal and self.early_exit:
break
for next in graph.neighbor(current):
if next not in came_from:
priority = self.heuristic(goal,next)
frontier.put(next,priority)
came_from[next] = current
return came_from
def bf_search(self,graph,start,goal):
""" Breadth-First algorithm search function """
frontier = Queue()
frontier.put(start)
came_from = {}
came_from[start]=True
# Loop until the frontier is empty
while not frontier.empty():
current = frontier.get()
# Early exit point, optional for
# breadth-first searching (faster)
if current == goal and self.early_exit:
break
for next in graph.neighbors(current):
if next not in came_from:
priority = heuristic(goal,next)
frontier.put(next, priority)
came_from[next] = True
return came_from
def a_star(self,graph,start,goal):
frontier = PriorityQueue()
frontier.put(start,0)
came_from = {}
cost_so_far = {}
came_from[start] = None
cost_so_far[start] = 0
while not frontier.empty():
current = frontier.get()
if current == goal and self.early_exit:
break
for next in graph.neighbor(current):
new_cost = cost_so_far[current]+graph.cost(current,next)
if next not in cost_so_far or new_cost<cost_so_far[next]:
cost_so_far[next] = new_cost
priority = new_cost + self.heuristic(goal,next)
frontier.put(next,priority)
came_from[next] = current
return came_from, cost_so_far
def heuristic(self,a,b):
(x1,y1) = a
(x2,y2) = b
return abs(x1-x2) + abs(y1-y2)
class GraphGrid:
def __init__(self,grid):
# Detect grid size
self.dim_y = len(grid)
self.dim_x = len(grid[0])
self.impassable = None
# Tests whether the point exists
def in_bounds(self, id):
(x,y) = id
return 0 <= x < self.dim_x and 0 <= y < self.dim_y
# Checks for coordinate in list of unpassable tiles
def passable(self, id):
assert self.impassable is not None, 'Not assigned! grid.passable'
return id not in self.impassable
# Sets edges by inspecting neighboring tiles
def neighbor(self, id):
(x,y) = id
results = [(x+1,y),(x,y-1),(x-1,y),(x,y+1)]
if (x+y) % 2 == 0:
results.reverse()
results = filter(self.in_bounds, results)
results = filter(self.passable, results)
return results
# Calculates the movement cost
def cost(self,start,end):
return 1
| 27.571429 | 74 | 0.567955 | 4,927 | 0.981865 | 0 | 0 | 0 | 0 | 0 | 0 | 763 | 0.152053 |
0d5d3731a59994ba117b8a32c3775bfdf7b49b71 | 1,259 | py | Python | 7. Trees/binary_euler_tour.py | vivek28111992/data_structure_and_algorithm_in_python_practice | 16cb3ba5d02049352b40482de647acaad4b3b44a | [
"MIT"
] | null | null | null | 7. Trees/binary_euler_tour.py | vivek28111992/data_structure_and_algorithm_in_python_practice | 16cb3ba5d02049352b40482de647acaad4b3b44a | [
"MIT"
] | null | null | null | 7. Trees/binary_euler_tour.py | vivek28111992/data_structure_and_algorithm_in_python_practice | 16cb3ba5d02049352b40482de647acaad4b3b44a | [
"MIT"
] | null | null | null | # Binary Euler Tour
# A Binary Euler Tour base class providing a specialized tour for binary tree.
from eulerTour import EulerTour
class BinaryEulerTour(EulerTour):
"""Abstract base class for performing Euler tour of a binary tree.
This version includes an additional _hook_invisit that is called after the tour of the left subtree (if any), yet before the tour of the right subtree (if any).
Note: Right child is always assigned index 1 in path, even if no left subling.
"""
def _tour(self, p, d, path):
results = [None, None] # will update with results od recursions
self._hook._previsit(p, d, path) # "pre visit" for p
if self._tree.left(p) is not None: # consider left child
path.append(0)
results[0] = self._tour(self._tree.left(p), d+1, path)
path.pop()
self._hook_invisit(p, d, path) # "in visit" for p
if self._tree.right(p) is not None: # consider right child
path.append(1)
results[1] = self._tour(self._tree.right(p), d+1, path)
path.pop()
answer = self._hook_postvisit(p, d, path, results)
return answer
def _hook_invisit(self, p, d, path): pass
| 41.966667 | 164 | 0.625894 | 1,125 | 0.893566 | 0 | 0 | 0 | 0 | 0 | 0 | 541 | 0.429706 |
0d5d6ad9d06bd923dd1bb90a10f3405d61fda119 | 200 | py | Python | wallet/admin.py | curanetwork/curwallet | fef5896a6d4b9f99c03d71ab0385655cdadf9a6e | [
"MIT"
] | 5 | 2018-11-26T16:35:40.000Z | 2019-01-14T02:35:47.000Z | wallet/admin.py | curanetwork/curwallet | fef5896a6d4b9f99c03d71ab0385655cdadf9a6e | [
"MIT"
] | null | null | null | wallet/admin.py | curanetwork/curwallet | fef5896a6d4b9f99c03d71ab0385655cdadf9a6e | [
"MIT"
] | null | null | null | from django.contrib import admin
from base.conf import settings
admin.site.site_title = f'{settings.ICO_TOKEN_NAME} Wallet'
admin.site.site_header = f'{settings.ICO_TOKEN_NAME} Wallet Administration' | 40 | 75 | 0.825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 85 | 0.425 |
0d5d8e4687fb579fa2049d6dbbb448c87e9ea36b | 462 | py | Python | shoppinglist/models.py | christiankuhl/foodplanner | 20c4a577849bf0ba9304f82f43c307552e846bf2 | [
"MIT"
] | null | null | null | shoppinglist/models.py | christiankuhl/foodplanner | 20c4a577849bf0ba9304f82f43c307552e846bf2 | [
"MIT"
] | null | null | null | shoppinglist/models.py | christiankuhl/foodplanner | 20c4a577849bf0ba9304f82f43c307552e846bf2 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class Ingredient(models.Model):
account = models.CharField(max_length=255)
member = models.CharField(max_length=255)
ref_meal = models.CharField(max_length=255,blank=True)
ref_date = models.DateField(blank=True)
ingredient = models.CharField(max_length=255)
created = models.DateTimeField(auto_now_add=True)
ingredient_there = models.BooleanField()
| 38.5 | 62 | 0.714286 | 404 | 0.874459 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.056277 |