content
stringlengths 5
1.05M
|
|---|
import pandas
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cross_validation import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
#Read data
games = pandas.read_csv("game.csv")
print games.columns
print games.shape
plt.hist (games["average_rating"])
plt.show ()
games[games["average_rating"] == 0]
#print games[games["average_rating"] == 0].iloc[0]
#print(games[games["average_rating"] == 0].iloc[0])
# Print the first row of all the games with scores greater than 0.
#print(games[games["average_rating"] > 0].iloc[0])
# Remove any rows without user reviews.
games = games[games["users_rated"] > 0]
# Remove any rows with missing values.
games = games.dropna(axis=0)
print games.corr()["average_rating"]
# Get all the columns from the dataframe.
columns = games.columns.tolist()
# Filter the columns to remove ones we don't want.
columns = [c for c in columns if c not in ["bayes_average_rating", "average_rating", "type", "name"]]
# Store the variable we'll be predicting on.
target = "average_rating"
# Generate the training set. Set random_state to be able to replicate results.
train = games.sample(frac=0.9, random_state=1)
# Select anything not in the training set and put it in the testing set.
test = games.loc[~games.index.isin(train.index)]
# Print the shapes of both sets.
print(train.shape)
print(test.shape)
model = LinearRegression()
# Fit the model to the training data.
model.fit(train[columns], train[target])
# Generate our predictions for the test set.
x = np.array(train[target][:, 1].A1)
predictions = model.predict(test[columns])
fig, ax = plt.subplots(figsize=(12,8))
ax.plot(x, predictions, 'r', label='Prediction')
ax.scatter(data.Population, data.Profit, label='Traning Data')
ax.legend(loc=2)
ax.set_xlabel('Population')
ax.set_ylabel('Profit')
ax.set_title('Predicted Profit vs. Population Size')
# Compute error between our test predictions and the actual values.
print mean_squared_error(predictions, test[target])
print test["average_rating"]
print predictions
print 'Error rate', mean_squared_error(predictions, test[target])
|
import komand
from .schema import ReadIncidentInput, ReadIncidentOutput, Input, Output, Component
# Custom imports below
from komand.exceptions import PluginException
class ReadIncident(komand.Action):
def __init__(self):
super(self.__class__, self).__init__(
name='read_incident',
description=Component.DESCRIPTION,
input=ReadIncidentInput(),
output=ReadIncidentOutput())
def run(self, params={}):
url = f'{self.connection.incident_url}/{params.get(Input.SYSTEM_ID)}'
method = "get"
response = self.connection.request.make_request(url, method)
fields = params.get(Input.FILTERING_FIELDS).split(",")
filtered_incident = {}
try:
for field in fields:
filtered_incident[field] = response["resource"]["result"].get(field, "")
except KeyError as e:
raise PluginException(preset=PluginException.Preset.UNKNOWN,
data=response.text) from e
return {
Output.FILTERED_INCIDENT: filtered_incident
}
|
# Copyright ClusterHQ Inc. See LICENSE file for details.
from __future__ import absolute_import
from functools import partial
from characteristic import attributes
from eliot import write_failure, Message, MessageType, Field
from effect import TypeDispatcher, ComposedDispatcher
from txeffect import (
make_twisted_dispatcher,
perform,
deferred_performer
)
from twisted.conch.endpoints import (
SSHCommandClientEndpoint,
# https://twistedmatrix.com/trac/ticket/7861
_NewConnectionHelper,
# https://twistedmatrix.com/trac/ticket/7862
_ReadFile, ConsoleUI,
_CommandChannel,
)
from twisted.conch.ssh.common import NS
import twisted.conch.ssh.session as session
from twisted.conch.client.knownhosts import KnownHostsFile
from twisted.internet.defer import Deferred, inlineCallbacks, CancelledError
from twisted.internet.endpoints import UNIXClientEndpoint, connectProtocol
from twisted.internet.error import ConnectionDone
from twisted.protocols.basic import LineOnlyReceiver
from twisted.python.filepath import FilePath
import os
from ...common import loop_until, timeout
from ._model import (
Run, Sudo, Put, Comment, RunRemotely, perform_comment, perform_put,
perform_sudo)
from .._effect import dispatcher as base_dispatcher
RUN_OUTPUT_MESSAGE = MessageType(
message_type="flocker.provision.ssh:run:output",
fields=[
Field.for_types(u"line", [bytes], u"The output."),
],
description=u"A line of command output.",
)
def extReceived(self, ext_type, data):
from twisted.conch.ssh.connection import EXTENDED_DATA_STDERR
if ext_type == EXTENDED_DATA_STDERR:
self.dataReceived(data)
@attributes([
"deferred",
"context",
])
class CommandProtocol(LineOnlyReceiver, object):
"""
Protocol that logs the lines of a remote command.
:ivar Deferred deferred: Deferred to fire when the command finishes
If the command finished successfully, will fire with ``None``.
Otherwise, errbacks with the reason.
:ivar Message context: The eliot message context to log.
"""
delimiter = b'\n'
def connectionMade(self):
self.transport.disconnecting = False
# SSHCommandClientEndpoint doesn't support capturing stderr.
# We patch the SSHChannel to interleave it.
# https://twistedmatrix.com/trac/ticket/7893
self.transport.extReceived = partial(extReceived, self)
def connectionLost(self, reason):
if reason.check(ConnectionDone):
self.deferred.callback(None)
else:
self.deferred.errback(reason)
def lineReceived(self, line):
self.context.bind(
message_type="flocker.provision.ssh:run:output",
line=line,
).write()
class CommandChannelWithTTY(_CommandChannel):
"""
CentOS/RHEL wants us to have a pty in order to run commands with sudo.
Create a pty that won't be used when creating the channel.
"""
def channelOpen(self, ignored):
"""
Create a pty by sending a pty-req to the server
"""
term = 'xterm'
winSize = (25, 80, 0, 0)
ptyReqData = session.packRequest_pty_req(term, winSize, '')
self.conn.sendRequest(self, 'pty-req', ptyReqData)
command = self.conn.sendRequest(
self, 'exec', NS(self._command), wantReply=True)
command.addCallbacks(self._execSuccess, self._execFailure)
class SSHCommandClientEndpointWithTTY(SSHCommandClientEndpoint):
"""
Subclass that spawns a TTY when connecting.
"""
def _executeCommand(self, connection, protocolFactory):
"""
Given a secured SSH connection, try to execute a command in a new
channel created on it and associate the result with a protocol from the
given factory.
@param connection: See L{SSHCommandClientEndpoint.existingConnection}'s
C{connection} parameter.
@param protocolFactory: See L{SSHCommandClientEndpoint.connect}'s
C{protocolFactory} parameter.
@return: See L{SSHCommandClientEndpoint.connect}'s return value.
"""
def disconnectOnFailure(passthrough):
# Close the connection immediately in case of cancellation, since
# that implies user wants it gone immediately (e.g. a timeout):
immediate = passthrough.check(CancelledError)
self._creator.cleanupConnection(connection, immediate)
return passthrough
commandConnected = Deferred()
commandConnected.addErrback(disconnectOnFailure)
channel = CommandChannelWithTTY(
self._creator, self._command, protocolFactory, commandConnected)
connection.openChannel(channel)
return commandConnected
def get_ssh_dispatcher(connection, context):
"""
:param Message context: The eliot message context to log.
:param connection: The SSH connection run commands on.
"""
@deferred_performer
def perform_run(dispatcher, intent):
context.bind(
message_type="flocker.provision.ssh:run",
command=intent.log_command_filter(intent.command),
).write()
endpoint = SSHCommandClientEndpointWithTTY.existingConnection(
connection, intent.command)
d = Deferred()
connectProtocol(endpoint, CommandProtocol(
deferred=d, context=context))
return d
return TypeDispatcher({
Run: perform_run,
Sudo: perform_sudo,
Put: perform_put,
Comment: perform_comment,
})
def get_connection_helper(reactor, address, username, port):
"""
Get a :class:`twisted.conch.endpoints._ISSHConnectionCreator` to connect to
the given remote.
:param reactor: Reactor to connect with.
:param bytes address: The address of the remote host to connect to.
:param bytes username: The user to connect as.
:param int port: The port of the ssh server to connect to.
:return _ISSHConnectionCreator:
"""
try:
agentEndpoint = UNIXClientEndpoint(
reactor, os.environ["SSH_AUTH_SOCK"])
except KeyError:
agentEndpoint = None
return _NewConnectionHelper(
reactor, address, port, None, username,
keys=None,
password=None,
agentEndpoint=agentEndpoint,
knownHosts=KnownHostsFile.fromPath(FilePath("/dev/null")),
ui=ConsoleUI(lambda: _ReadFile(b"yes")))
@deferred_performer
@inlineCallbacks
def perform_run_remotely(reactor, base_dispatcher, intent):
connection_helper = get_connection_helper(
reactor,
username=intent.username, address=intent.address, port=intent.port)
context = Message.new(
username=intent.username, address=intent.address, port=intent.port)
def connect():
connection = connection_helper.secureConnection()
connection.addErrback(write_failure)
timeout(reactor, connection, 30)
return connection
connection = yield loop_until(reactor, connect)
dispatcher = ComposedDispatcher([
get_ssh_dispatcher(
connection=connection,
context=context,
),
base_dispatcher,
])
yield perform(dispatcher, intent.commands)
# Work around https://twistedmatrix.com/trac/ticket/8138 by reaching deep
# into a different layer and closing a leaked connection.
if (connection.transport and
connection.transport.instance and
connection.transport.instance.agent):
connection.transport.instance.agent.transport.loseConnection()
# Set the agent to None as the agent is unusable and cleaned up at this
# point.
connection.transport.instance.agent = None
yield connection_helper.cleanupConnection(
connection, False)
def make_dispatcher(reactor):
return ComposedDispatcher([
TypeDispatcher({
RunRemotely: partial(perform_run_remotely, reactor),
}),
make_twisted_dispatcher(reactor),
base_dispatcher,
])
|
#!/bin/python3
import os, requests, zipfile, tempfile
t = tempfile.TemporaryFile()
print("Zipping lfs to temp ...")
def zipdir(path):
ziph = zipfile.ZipFile(t, 'w', zipfile.ZIP_DEFLATED)
# ziph is zipfile handle
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file))
zipdir(os.path.join(os.path.dirname(__file__), "lfs"))
t.seek(0)
print("Zipped.")
print("Uploading zip to blog.ellisons.org.uk ...")
resp = requests.post("https://blog.ellisons.org.uk/luac", files={'userfile': ("lfs.zip", t)})
print("{} {}".format(resp.status_code, resp.reason))
if resp.status_code == 200:
print("Success! Writing lfs.img ...")
with open("lfs.img", 'wb') as f:
f.write(resp.content)
|
import requests
from bs4 import BeautifulSoup
liste=list()
url="https://www.webtekno.com/"
veri=requests.get(url)
html=veri.content
parcala=BeautifulSoup(html,"html.parser")
for i in parcala.find_all("span",{"class":"content-timeline--underline"}):
liste.append(i.text)
for x in liste:
print(x)
|
import PySide2
from PySide2 import QtCore, QtWidgets, QtGui
class EmbeddedWidget(QtWidgets.QWidget):
def mousePressEvent(self, event:PySide2.QtGui.QMouseEvent):
print("EmbeddedWidget mousePress", event)
return super(EmbeddedWidget, self).mousePressEvent(event)
class EmbeddedChild(QtWidgets.QLabel):
def __init__(self, parent=None):
super(EmbeddedChild, self).__init__(parent=parent)
self.setText("Widget")
def mousePressEvent(self, event:PySide2.QtGui.QMouseEvent):
print("EmbeddedChild mousePress", event)
return super(EmbeddedChild, self).mousePressEvent(event)
class MyProxyWidget(QtWidgets.QGraphicsProxyWidget):
def mousePressEvent(self, event:PySide2.QtWidgets.QGraphicsSceneMouseEvent):
print("Proxy mousePress", event)
return super(MyProxyWidget, self).mousePressEvent(event)
class MyGraphicsRect(QtWidgets.QGraphicsRectItem):
def mousePressEvent(self, event:PySide2.QtWidgets.QGraphicsSceneMouseEvent):
print("Rect mousePress", event)
return super(MyGraphicsRect, self).mousePressEvent(event)
class MyGraphicsScene(QtWidgets.QGraphicsScene):
def mousePressEvent(self, event:PySide2.QtWidgets.QGraphicsSceneMouseEvent):
print("Scene mousePress", event)
return super(MyGraphicsScene, self).mousePressEvent(event)
class MyGraphicsView(QtWidgets.QGraphicsView):
def mousePressEvent(self, event:PySide2.QtGui.QMouseEvent):
print("View mousePress", event)
return super(MyGraphicsView, self).mousePressEvent(event)
class MainWindow(QtWidgets.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.view = MyGraphicsView(self)
self.view.setObjectName("view")
self.scene = MyGraphicsScene(self)
self.view.setScene(self.scene)
self.view.setBackgroundBrush(QtGui.QBrush(
QtCore.Qt.red, QtCore.Qt.SolidPattern))
# set up embedded widgets
rect = MyGraphicsRect()
proxy = MyProxyWidget(parent=rect)
proxy.setObjectName("proxy")
#parent = EmbeddedWidget(parent=proxy)
#parent = EmbeddedWidget()
#parent = EmbeddedWidget(parent=self) # works
# parent = EmbeddedWidget(parent=self.scene) # fails
parent = EmbeddedWidget(parent=self.view)
parent.setObjectName("parent")
child = EmbeddedChild(parent=parent)
child.setObjectName("child")
proxy.setWidget(parent)
self.scene.addItem(rect)
self.scene.addItem(proxy)
self.setCentralWidget(self.view)
def mousePressEvent(self, event:PySide2.QtGui.QMouseEvent):
print("MainWindow mousePress", event)
pos = QtGui.QCursor.pos()
ws = QtWidgets.QApplication.instance().widgetAt(pos)#type:QtWidgets.QWidget
print("ws", ws)
if ws is not None:
print(ws.objectName())
print(ws.parent())
#print(type(ws))
print("")
print("---")
print("")
return super(MainWindow, self).mousePressEvent(event)
"""
When clicking on EmbeddedChild, event flow is thus:
View mousePress <PySide2.QtGui.QMouseEvent object at 0x000001F2FFD71448>
Scene mousePress <PySide2.QtWidgets.QGraphicsSceneMouseEvent(GraphicsSceneMousePress, LeftButton, pos=0,0, scenePos=19,9, screenPos=692,302) at 0x000001F2FFD71548>
Proxy mousePress <PySide2.QtWidgets.QGraphicsSceneMouseEvent(GraphicsSceneMousePress, LeftButton, pos=19,9, scenePos=19,9, screenPos=692,302) at 0x000001F2FFD71548>
EmbeddedChild mousePress <PySide2.QtGui.QMouseEvent object at 0x000001F2FFD71588>
EmbeddedWidget mousePress <PySide2.QtGui.QMouseEvent object at 0x000001F2FFD71588>
MainWindow mousePress <PySide2.QtGui.QMouseEvent object at 0x000001F2FFD71448>
Compare to flow when clicking only on the QGraphicsView:
View mousePress <PySide2.QtGui.QMouseEvent object at 0x00000200DB050208>
Scene mousePress <PySide2.QtWidgets.QGraphicsSceneMouseEvent(GraphicsSceneMousePress, LeftButton, pos=0,0, scenePos=60,7, screenPos=733,300) at 0x00000200DB050288>
MainWindow mousePress <PySide2.QtGui.QMouseEvent object at 0x00000200DB050208>
How should the View and Scene know where the event originates from?
How should they know if it is a click event on graphics, or on a widget?
FIXED with call to scene.addItem(proxy)
"""
def show():
import sys
app = QtWidgets.QApplication(sys.argv)
w = MainWindow()
w.show()
sys.exit(app.exec_())
if __name__ == '__main__':
s = show()
|
import gym
from gym import Wrapper
from gym.wrappers import Monitor
from gym import error, version, logger
import os, json, numpy as np, six
from gym.wrappers.monitoring import stats_recorder, video_recorder
from gym.utils import atomic_write, closer
from gym.utils.json_utils import json_encode_np
import numpy as np
import sys
import random
import numpy as np
import os
import matplotlib.pyplot as plt
class Control:
#global declarations
#==================
# --HYPERPARAMETERS--
total_episodes = 50000 # Total episodes
learning_rate = 0.8 # Learning rate
max_steps = 2000 # Max steps per episode
gamma = 0.99 # Discount Factor
epsilon = 0.9 # minimum epsilon for epsilon greedy policies
env = ""
monitor_path = "" # path for recording monitor videos
record = False #boolean variable for recording the videos using monitor
def __init__(self,total_episodes,learning_rate,max_steps,gamma,env,epsilon,path):
self.monitor_path = os.path.join(path, "monitor")
self.total_episodes = total_episodes
self.learning_rate = learning_rate
self.max_steps = max_steps
self.gamma = gamma
self.epsilon = epsilon
if not os.path.exists(self.monitor_path):
os.makedirs(self.monitor_path)
self.env = gym.make(env)
def greedy_policy_action(self,q_table,state,i_episode):
eps = 0.9
decay = 0.9999
for i in range(i_episode):
eps= max(self.epsilon, eps*decay)
random_number = random.uniform(0,1)
if random_number < eps:
return self.env.action_space.sample()
else:
return np.argmax(q_table[state],axis=0)
def results(self,q_table,rewards):
print(q_table)
plt.plot(rewards)
plt.show()
self.env.render()
def Q_Learning(self):
def SARSA(self):
"""
SARSA using epsilon greedy policy.
env: OpenAI Gym Environment
"""
# q- value table
state_size = self.env.observation_space.n
action_space_size = self.env.action_space.n
q_table = np.zeros([state_size, action_space_size])
count_table = np.zeros([state_size, action_space_size])
total_reward = []
# Record videos
self.env= Monitor(self.env,
directory=self.monitor_path,
resume=True,
video_callable=self.record)
for i_episode in range(self.total_episodes):
reward_per_episode = 0
state = self.env.reset()
step = 0
done = False
if i_episode % 1000 == 0:
print("\rEpisode {}/{}.".format(i_episode, self.total_episodes))
sys.stdout.flush()
self.record = True
else:
self.record = False
action = self.greedy_policy_action(q_table,state,i_episode)
while done == False:
new_state, reward, done, info = self.env.step(action)
reward_per_episode += reward
next_action = self.greedy_policy_action(q_table,new_state,i_episode)
q_table[state][action] += self.learning_rate*(reward + self.gamma*q_table[new_state][next_action] - q_table[state][action])
state = new_state
action = next_action
total_reward.append(reward_per_episode)
self.results(q_table,total_reward)
def monte_carlo_on_policy_control(self):
"""
Monte Carlo Control using Epsilon-Greedy policies.
env: OpenAI gym environment.
"""
# q- value table
state_size = self.env.observation_space.n
action_space_size = self.env.action_space.n
q_table = np.zeros([state_size, action_space_size])
count_table = np.zeros([state_size, action_space_size])
for i_episode in range(self.total_episodes):
state = self.env.reset()
step = 0
done = False
if i_episode % 1000 == 0:
print("\rEpisode {}/{}.".format(i_episode, self.total_episodes))
sys.stdout.flush()
#generate an episode
episode = []
for steps in range(self.max_steps):
if done:
break
#epsilon_greedy_policy
action = self.greedy_policy_action(q_table,state,i_episode)
new_state, reward, done, info = self.env.step(action)
episode.append([state, action, reward])
state = new_state
#update the Q-values
for i in range(state_size):
for j in range(action_space_size):
returns_ = 0 #total reward obtained after this state
state_id = -1 #state id of the state being processed
count = 0 #number of time the state 'i,j' is visited
start_counting_reward = False #count rewards from this state
#for each state, search the whole episode for the first occurence of that state and then calculate returns from there.
for k in range(len(episode)):
if episode[k][0] == i and episode[k][1] == j:
count_table[i][j] +=1
count+=1
start_counting_reward = True
if count == 1:
state_id = k
if start_counting_reward:
returns_ += np.power(self.gamma,k - state_id)*episode[k][2]
if count> 0:
q_table[i][j]+=(1/count_table[i][j])*(returns_ - q_table[i][j])
del episode
self.results(q_table)
path = ""
env = Control(100000,0.8,5000,0.99,"Taxi-v2",0.1,path)
# train_monte_carlo(env, value_table)
env.SARSA()
|
import telepot
from keys import telegram_api_key, telegram_bot_url
bot = telepot.Bot(telegram_api_key)
bot.setWebhook(telegram_bot_url)
|
from django.views import generic
from django.conf import settings
from mnm.instances.models import Instance
class Index(generic.TemplateView):
template_name = 'pages/home.html'
def get_context_data(self):
context = super().get_context_data()
context['biggest_instances'] = Instance.objects.public().order_by('-users')[:10]
return context
|
#https://codeforces.com/problemset/problem/112/A
s1 = input().lower()
s2 = input().lower()
for i in range(len(s1)):
if(ord(s1[i])==ord(s2[i])):
ans=0
elif ord(s1[i])>ord(s2[i]):
ans=1
break
else:
ans=-1
break
print(ans)
|
from .. import utils
from . import NbGraderPreprocessor
from nbconvert.exporters.exporter import ResourcesDict
from nbformat.notebooknode import NotebookNode
from typing import Tuple
class ComputeChecksums(NbGraderPreprocessor):
"""A preprocessor to compute checksums of grade cells."""
def preprocess_cell(self,
cell: NotebookNode,
resources: ResourcesDict,
cell_index: int
) -> Tuple[NotebookNode, ResourcesDict]:
# compute checksums of grade cell and solution cells
if utils.is_grade(cell) or utils.is_solution(cell) or utils.is_locked(cell):
checksum = utils.compute_checksum(cell)
cell.metadata.nbgrader['checksum'] = checksum
cell.metadata.nbgrader['cell_type'] = cell.cell_type
if utils.is_grade(cell) or utils.is_solution(cell):
self.log.debug(
"Checksum for %s cell '%s' is %s",
cell.metadata.nbgrader['cell_type'],
cell.metadata.nbgrader['grade_id'],
checksum)
return cell, resources
|
from typing import Any, List, Optional
import phonenumbers
from django.core.exceptions import ValidationError as DjangoValidationError
from django.core.validators import URLValidator, validate_email
from django.db import transaction
from django.utils.translation import gettext as _
from rest_framework import serializers
from hacktheback.core.serializers import ValidationMixin
from hacktheback.forms import utils
from hacktheback.forms.models import (
Answer,
AnswerFile,
AnswerOption,
Form,
FormResponse,
HackathonApplicant,
Question,
QuestionOption,
)
from hacktheback.rest.account.serializers import UserSerializer
class AnswerOptionSerializer(serializers.ModelSerializer):
class Meta:
model = AnswerOption
fields = (
"id",
"option",
)
class AnswerSerializer(serializers.ModelSerializer, ValidationMixin):
answer_options = AnswerOptionSerializer(
many=True, required=False, allow_null=True
)
field_error_messages = {
"answer_is_required": ("answer", _("An answer is required.")),
"invalid_phone_number": (
"answer",
_("The answer provided is not a valid phone number."),
),
"invalid_email": (
"answer",
_("The answer provided is not a valid email address."),
),
"invalid_http_url": (
"answer",
_(
"The answer provided is not a valid url with an HTTP or "
"HTTPS scheme."
),
),
"invalid_file": (
"answer",
_(
"The answer provided is not a valid identifier for an "
"uploaded file for the associated question."
),
),
"only_answer_field": (
"answer_options",
_("Only the answer field can contain the answer."),
),
"only_answer_options_field": (
"answer",
_("Only answer_options field can contain the answer."),
),
"answer_options_for_same_option": (
"answer_options",
_(
"There shouldn't be multiple answer options to the same "
"option."
),
),
"more_than_one_answer_option": (
"answer_options",
_(
"There cannot be more than one selected option for this "
"answer."
),
),
"invalid_answer_option": (
"answer_options",
_("The selected option(s) are not valid choices."),
),
}
default_error_messages = {
"answer_for_invalid_question": _(
"The answer is not for a valid question in the form."
)
}
class Meta:
model = Answer
fields = (
"id",
"question",
"answer",
"answer_options",
)
def _get_form_question_instances(self) -> Optional[List[Question]]:
if self.root and hasattr(self.root, "form_question_instances"):
return self.root.form_question_instances
if self.context.get("form_response", None) is not None:
form_response = self.context.get("form_response")
if isinstance(form_response, FormResponse):
return list(Question.objects.filter(form=form_response.form))
raise Exception(
"To use this serializer, please provide a FormResponse object in "
"the context with `form_response` as the key."
)
def _run_validation_for_non_option_type(
self, data: Any, question: Question
) -> None:
"""
This validates multiple requirements for an answer that has a
corresponding question that of a non-option type:
- The `answer_options` field must be null
- The 'answer` field must have at least one character if the
corresponding question has `required` set to True
- The `answer` field is in the correct format, depending on the
type of question the answer corresponds to
"""
# Check that `answer` field is filled if the question says its required
# and also check that the `answer_options` field is null
if data.get("answer_options") is not None:
self.fail_for_field("only_answer_field")
if question.required and (
data.get("answer") is None or data.get("answer").strip() == ""
):
self.fail_for_field("answer_is_required")
# Validate if the `answer` field is in the correct format
if question.type == Question.QuestionType.PHONE:
try:
pn = phonenumbers.parse(data.get("answer"))
if not phonenumbers.is_valid_number(pn):
self.fail_for_field("invalid_phone_number")
except phonenumbers.NumberParseException:
self.fail_for_field("invalid_phone_number")
elif question.type == Question.QuestionType.EMAIL:
try:
validate_email(data.get("answer"))
except DjangoValidationError:
self.fail_for_field("invalid_email")
elif question.type == Question.QuestionType.HTTP_URL:
try:
validate_url = URLValidator(schemes=["http", "https"])
validate_url(data.get("answer"))
except DjangoValidationError:
self.fail_for_field("invalid_http_url")
elif question.type in [
Question.QuestionType.PDF_FILE,
Question.QuestionType.IMAGE_FILE,
]:
try:
af = AnswerFile.objects.get(id=data.get("answer"))
if af.question.id != question.id:
self.fail_for_field("invalid_file")
except AnswerFile.DoesNotExist:
self.fail_for_field("invalid_file")
def _run_validation_for_option_type(
self, data: Any, question: Question
) -> None:
"""
This validates multiple requirements for an answer that has a
corresponding question that of an option type:
- The `answer` field must be null and that the `answer_options`
field is not null
- The `answer_options` field must have at least one selected option
if the corresponding question has `required` set to True
- Each selected option in the `answer_options` field must only
correspond to one option
- The `answer_options` field must have at most one selected option
if the option type is solo (i.e. radio or select)
- The `answer_options` must contain valid answers for options
for the corresponding question
"""
# Validate that `answer` field is null and that the `answer_options`
# field is not null
if (
data.get("answer") is not None
or data.get("answer_options") is None
):
self.fail_for_field("only_answer_options_field")
# Validate that at least one option is selected if the question
# says its required
if question.required and len(data.get("answer_options")) == 0:
self.fail_for_field("answer_is_required")
# Validate that there aren't more than one answer options for the
# same option and that each option is a valid option for the
# question
answered_options = set()
for answer_option in data.get("answer_options"):
option: QuestionOption = answer_option["option"]
if option in answered_options:
self.fail_for_field("answer_options_for_same_option")
answered_options.add(option)
if option.question != question:
self.fail_for_field("invalid_answer_option")
# Validate that there can only be one answer option for solo option
# types
if (
question.type in Question.SOLO_OPTION_TYPES
and len(data.get("answer_options")) > 1
):
self.fail_for_field("more_than_one_answer_option")
def validate(self, data: Any) -> Any:
"""
This validates multiple requirements for an answer depending on the
corresponding question's type. This also validates that the answer
is for the valid question in the form.
"""
question: Question = data.get("question")
if question.type in Question.NON_OPTION_TYPES:
self._run_validation_for_non_option_type(data, question)
elif question.type in Question.OPTION_TYPES:
self._run_validation_for_option_type(data, question)
# Validate that the answer is for the right question
form_questions = self._get_form_question_instances()
if form_questions is not None and question not in form_questions:
self.fail("answer_for_invalid_question")
return data
def create(self, data: Any) -> Answer:
"""
Create a new :model: `forms.Answer` object and associated :model:
`forms.AnswerOption` objects.
"""
with transaction.atomic():
answer_options = None
if data.get("answer_options"):
answer_options = data.pop("answer_options")
# Format data in `answer` field before it's placed inside the
# database.
question: Question = data.get("question")
form_response = self.context.get("form_response")
data["answer"] = utils.format_answer(
data.get("answer"), question.type
)
answer_obj = Answer.objects.create(response=form_response, **data)
if answer_options:
for answer_option in answer_options:
AnswerOption.objects.create(
answer=answer_obj, **answer_option
)
return answer_obj
def update(self, instance: Answer, validated_data: Any) -> Answer:
"""
Update a :model: `forms.Answer` object and create associated :model:
`forms.AnswerOption` objects. Delete past associated :model:
`forms.AnswerOption` objects as well.
"""
with transaction.atomic():
question: Question = validated_data.get("question")
answer_options = validated_data.get("answer_options", None)
if question.type in Question.NON_OPTION_TYPES:
instance.answer = validated_data.get("answer", instance.answer)
instance.save()
else:
# Delete all past answer options
AnswerOption.objects.filter(answer=instance).delete()
# Create new answer options
for answer_option in answer_options:
AnswerOption.objects.create(
answer=instance, **answer_option
)
return instance
class HackathonApplicantSerializer(serializers.ModelSerializer):
class Meta:
model = HackathonApplicant
fields = ("status",)
class FormResponseSerializer(serializers.ModelSerializer, ValidationMixin):
applicant = HackathonApplicantSerializer(read_only=True)
answers = AnswerSerializer(many=True, read_only=True)
class Meta:
model = FormResponse
fields = (
"id",
"form",
"user",
"is_draft",
"answers",
"created_at",
"updated_at",
"applicant",
)
read_only_fields = ("user",)
class HackerApplicationResponseSerializer(FormResponseSerializer):
answers = AnswerSerializer(many=True, required=True)
field_error_messages = {
"answers_for_same_question": (
"answers",
_(
"There should not be multiple answers to the same question "
"`{question}`."
),
),
"missing_questions": (
"answers",
_("Not all required questions have been answered: {questions}"),
),
"invalid_question_in_form": {
"answers",
_("This is not a valid question in the form: {question}"),
},
}
class Meta(FormResponseSerializer.Meta):
pass
def __init__(self, *args, **kwargs):
self._form = None
self._form_questions = None
super().__init__(*args, **kwargs)
@property
def form_instance(self) -> Optional[Form]:
"""
Returns the model: `forms.Form` associated with the response for
this serializer.
"""
if self._form is not None:
return self._form
if self.initial_data and self.initial_data.get("form"):
try:
self._form = Form.objects.get(id=self.initial_data.get("form"))
return self._form
except Form.DoesNotExist:
pass
return None
@property
def form_question_instances(
self,
) -> Optional[List[Question]]:
"""
Returns a list of :model: `forms.Question` for the form associated with
the response for this serializer.
"""
if self._form_questions is not None:
return self._form_questions
if self.initial_data and self.initial_data.get("form"):
try:
self._form_questions = list(
Question.objects.filter(
form__id=self.initial_data.get("form")
)
)
return self._form_questions
except Form.DoesNotExist:
pass
return []
def validate(self, data: Any):
"""
Validate that:
- there aren't any duplicate answers
- if is_draft is False, that all answers to required questions
are provided in the response
"""
answers: List[Any] = data.get("answers")
answered_questions = list()
required_questions = list(
Question.objects.filter(form=data.get("form"), required=True)
)
# Validate that there aren't any duplicate answers
for answer in answers:
question: Question = answer.get("question")
if question in answered_questions:
self.fail_for_field(
"answers_for_same_question", **{"question": question.label}
)
answered_questions.append(question)
# If `is_draft` is set to False, validate that all answers to
# required questions are provided in the response
if not data.get("is_draft"):
missing_questions: List[Question] = utils.get_missing_questions(
required_questions, answered_questions
)
if len(missing_questions) > 0:
self.fail_for_field(
"missing_questions",
**{
"questions": "; ".join(
str(q) for q in missing_questions
)
},
)
return data
def create(self, validated_data: Any) -> FormResponse:
"""
Create a new :model: `forms.FormResponse` object, associated :model:
`forms.Answer` objects and associated :model: `forms.AnswerOption`
objects.
"""
answers = validated_data.pop("answers")
user = self.context["request"].user
with transaction.atomic():
form_response_obj = FormResponse.objects.create(
user=user, **validated_data
)
for answer in answers:
answer_options = None
if "answer_options" in answer.keys():
answer_options = answer.pop("answer_options")
# Format data in `answer` field before it's placed inside the
# database.
question: Question = answer.get("question")
answer["answer"] = utils.format_answer(
answer.get("answer"), question.type
)
answer_obj = Answer.objects.create(
response=form_response_obj, **answer
)
if answer_options:
for answer_option in answer_options:
AnswerOption.objects.create(
answer=answer_obj, **answer_option
)
if form_response_obj.form.type == Form.FormType.HACKER_APPLICATION:
if validated_data.get("is_draft"):
HackathonApplicant.objects.create(
application=form_response_obj,
status=HackathonApplicant.Status.APPLYING,
)
else:
HackathonApplicant.objects.create(
application=form_response_obj,
status=HackathonApplicant.Status.APPLIED,
)
return form_response_obj
def update(
self, instance: FormResponse, validated_data: Any
) -> FormResponse:
"""
This serializer cannot handle updates.
"""
raise Exception("This has not been implemented.")
class HackerApplicationResponseAdminSerializer(FormResponseSerializer):
"""
This contains the `applicant` associated object which should only be shown
to admin users.
"""
user = UserSerializer(read_only=True)
class Meta(FormResponseSerializer.Meta):
fields = FormResponseSerializer.Meta.fields + (
"admin_notes",
"user",
)
read_only_fields = FormResponseSerializer.Meta.fields
class HackerApplicationBatchStatusUpdateSerializer(serializers.Serializer):
status = serializers.ChoiceField(
choices=HackathonApplicant.Status,
required=True,
help_text="The status to update to.",
)
responses = serializers.PrimaryKeyRelatedField(
queryset=FormResponse.objects.all(),
many=True,
required=True,
allow_null=False,
allow_empty=False,
help_text="The ids of the hacker application responses to update.",
)
class HackerApplicationApplicantStatusSerializer(serializers.Serializer):
status = serializers.ChoiceField(choices=HackathonApplicant.Status.choices)
count = serializers.IntegerField()
class HackerApplicationOverviewSerializer(serializers.Serializer):
overview = HackerApplicationApplicantStatusSerializer(many=True)
__all__ = [
"HackerApplicationResponseSerializer",
"HackerApplicationResponseAdminSerializer",
"FormResponseSerializer",
"HackerApplicationBatchStatusUpdateSerializer",
"HackerApplicationOverviewSerializer",
"AnswerSerializer",
]
|
from .sac_trps import Agent
|
import numpy as np
import geopandas as gpd
from shapely.geometry import mapping
from shapely.geometry.polygon import Polygon
from shapely.geometry.multipolygon import MultiPolygon
def Geo2Array(geo, skip=0):
result = []
all_coords = mapping(geo)["coordinates"]
array = np.array(all_coords)
result=list(array[0])
if skip != 0:
result = result[0::skip]
return result
def MultiPolygon2Polygon(data):
outdf = gpd.GeoDataFrame(columns=data.columns)
for idx, row in data.iterrows():
if type(row.geometry) == Polygon:
outdf = outdf.append(row,ignore_index=True)
if type(row.geometry) == MultiPolygon:
multdf = gpd.GeoDataFrame(columns=data.columns)
recs = len(row.geometry)
multdf = multdf.append([row]*recs,ignore_index=True)
for geom in range(recs):
multdf.loc[geom,'geometry'] = row.geometry[geom]
outdf = outdf.append(multdf,ignore_index=True)
return outdf
def minValue(data, index):
return min(data, key = lambda t: t[index])
def maxValue(data, index):
return max(data, key = lambda t: t[index])
def PointInPolygon(point, polygon):
minX = minValue(polygon, 0)[0]
minY = minValue(polygon, 1)[1]
maxX = maxValue(polygon, 0)[0]
maxY = maxValue(polygon, 1)[1]
x = point[0]
y = point[1]
if x > maxX or x < minX or y > maxY or x < minY:
return False
sumAngle = 0
i = 0
j = len(polygon)-1
inside = False
while i < len(polygon):
if x==polygon[i][0] and y==polygon[i][1]:
return True
if(((polygon[i][1] > y) != (polygon[j][1] > y)) and ( x <= (polygon[j][0]-polygon[i][0])*(y-polygon[i][1])/(polygon[j][1]-polygon[i][1])+polygon[i][0])):
if inside:
inside = False
else:
inside = True
j = i
i += 1
return inside
|
from sak_cowsay.cowsay import cowsay
def test_cowsay_default() -> None:
# GIVEN.
ref = """\
_____________
< Hello world >
-------------
\\ ^__^
\\ (oo)\\_______
(__)\\ )\\/\\
||----w |
|| ||
"""
# WHEN.
ret = cowsay()
# THEN
assert ret == ref
def test_cowsay_foo() -> None:
# GIVEN.
ref = """\
_____________
< foo >
-------------
\\ ^__^
\\ (oo)\\_______
(__)\\ )\\/\\
||----w |
|| ||
"""
# WHEN.
ret = cowsay("foo")
# THEN
assert ret == ref
|
# coding=utf-8
# @Time : 2020/12/3 17:31
# @Auto : zzf-jeff
'''
onnx model inference
using to check onnx output equal torch output
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import onnxruntime
import numpy as np
from torch import nn
import torch
class ONNXModel(nn.Module):
def __init__(self, onnx_path):
"""Onnx Model inference function
Args:
onnx_path: onnx model path
"""
super().__init__()
self.onnx_session = onnxruntime.InferenceSession(onnx_path)
self.input_name = self.get_input_name(self.onnx_session)
self.output_name = self.get_output_name(self.onnx_session)
def __del__(self):
del self.onnx_session
del self.input_name
del self.output_name
def to_numpy(self, data):
if not isinstance(data, np.ndarray):
if data.requires_grad:
data = data.detach().cpu().numpy()
else:
data = data.cpu().numpy()
return data
def get_output_name(self, onnx_session):
""" 根据onnx session获取输出name
output_name = onnx_session.get_outputs()[0].name
:param onnx_session:
:return:
"""
output_name = []
for node in onnx_session.get_outputs():
output_name.append(node.name)
return output_name
def get_input_name(self, onnx_session):
"""根据onnx session获取输入name
input_name = onnx_session.get_inputs()[0].name
:param onnx_session:
:return:
"""
input_name = []
for node in onnx_session.get_inputs():
input_name.append(node.name)
return input_name
def get_input_feed(self, input_name, image_numpy):
""" 组装需要的输入格式
input_feed={self.input_name: image_numpy}
:param input_name:
:param image_numpy:
:return:
"""
input_feed = {}
for name in input_name:
input_feed[name] = image_numpy
return input_feed
def forward(self, image):
input_feed = self.get_input_feed(self.input_name, self.to_numpy(image))
outputs = self.onnx_session.run(self.output_name, input_feed=input_feed)
# keep output
return [torch.from_numpy(out) for out in outputs]
|
# ------------------------------------------------------------------------------
# CodeHawk Java Analyzer
# Author: Henny Sipma
# ------------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016-2020 Kestrel Technology LLC
# Copyright (c) 2021 Andrew McGraw
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
import chj.util.fileutil as UF
import chj.util.IndexedTable as IT
from chj.util.IndexedTable import (
IndexedTable,
IndexedTableValue,
IndexedTableSuperclass
)
import chj.index.Taint as T
import chj.index.TaintDictionaryRecord as TD
import xml.etree.ElementTree as ET
from typing import Any, Callable, Dict, cast, List, Optional, Union, Tuple, TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import ValuesView
from chj.index.DataDictionary import DataDictionary
class TaintDictionary(object):
def __init__(self, jd: "DataDictionary", xnode: ET.Element):
self.jd = jd # DataDictionary
self.string_table: IT.IndexedTable[T.TStringConstant] = IT.IndexedTable('string-table')
self.symbol_table: IT.IndexedTable[T.TSymbol] = IT.IndexedTable('symbol-table')
self.variable_table: IT.IndexedTable[T.TVariable] = IT.IndexedTable('variable-table')
self.method_target_table: IT.IndexedTable[T.TMethodTarget] = IT.IndexedTable('method-target-table')
self.taint_origin_table: IT.IndexedTable[T.TaintBase] = IT.IndexedTable('taint-origin-table')
self.taint_origin_list_table: IT.IndexedTable[T.TaintOriginList] = IT.IndexedTable('taint-origin-list-table')
self.tainted_variable_table: IT.IndexedTable[T.TaintedVariable] = IT.IndexedTable('tainted-variable-table')
self.tainted_variable_ids_table: IT.IndexedTable[T.TaintedVariableIds] = IT.IndexedTable('tainted-variable-ids-table')
self.taint_node_type_table: IT.IndexedTable[T.TaintNodeBase] = IT.IndexedTable('taint-node-type-table')
self.tables: List[Tuple[IT.IndexedTableSuperclass, Callable[[ET.Element], None]]] = [
(self.string_table, self._read_xml_string_table),
(self.symbol_table, self._read_xml_symbol_table),
(self.variable_table, self._read_xml_variable_table),
(self.method_target_table, self._read_xml_method_target_table),
(self.taint_origin_table, self._read_xml_taint_origin_table),
(self.taint_origin_list_table, self._read_xml_taint_origin_list_table),
(self.tainted_variable_table, self._read_xml_tainted_variable_table),
(self.tainted_variable_ids_table, self._read_xml_tainted_variable_ids_table),
(self.taint_node_type_table, self._read_xml_taint_node_type_table)
]
self.initialize(xnode)
def get_string(self, ix: int) -> T.TStringConstant:
return self.string_table.retrieve(ix)
def get_symbol(self, ix: int) -> T.TSymbol:
return self.symbol_table.retrieve(ix)
def get_variable(self, ix: int) -> T.TVariable:
return self.variable_table.retrieve(ix)
def get_method_target(self, ix: int) -> T.TMethodTarget:
return self.method_target_table.retrieve(ix)
def get_taint_origin(self, ix: int) -> T.TaintBase:
return self.taint_origin_table.retrieve(ix)
def get_taint_origin_list(self, ix: int) -> T.TaintOriginList:
return self.taint_origin_list_table.retrieve(ix)
def get_tainted_variable(self, ix: int) -> T.TaintedVariable:
return self.tainted_variable_table.retrieve(ix)
def get_tainted_variable_ids(self, ix: int) -> T.TaintedVariableIds:
return self.tainted_variable_ids_table.retrieve(ix)
def get_taint_origins(self) -> List[T.TaintBase]:
return self.taint_origin_table.values()
def get_taint_node_type(self, ix: int) -> T.TaintNodeBase:
return self.taint_node_type_table.retrieve(ix)
def iter_taint_node_types(self, f: Callable[[int, T.TaintNodeBase], None]) -> None:
self.taint_node_type_table.iter(f)
def iter_var_taint_node_types(self,f: Callable[[int, T.VariableTaintNode], None]) -> None:
def g(i: Any, n: T.TaintNodeBase) -> None:
if n.is_var():
n = cast(T.VariableTaintNode, n)
f(i,n)
else: pass
self.iter_taint_node_types(g)
def read_xml_tainted_variable_ids(self,
node:ET.Element,
tag: str='itvids') -> T.TaintedVariableIds:
return self.get_tainted_variable_ids(UF.safe_get(node, tag, tag + ' missing from tainted variable ids in xml', int))
def write_xml(self, node: ET.Element) -> None:
def f(n: ET.Element, r: Any) -> None:r.write_xml(n)
for (t,_) in self.tables:
tnode = ET.Element(t.name)
cast(IndexedTable[IndexedTableValue], t).write_xml(tnode,f)
node.append(tnode)
def __str__(self) -> str:
lines = []
for (t,_) in self.tables:
if t.size() > 0:
lines.append(str(t))
return '\n'.join(lines)
# ----------------------- Initialize dictionary from file ------------------
def initialize(self, xnode: Optional[ET.Element], force: bool=False) -> None:
if xnode is None: return
for (t,f) in self.tables:
t.reset()
f(UF.safe_find(xnode, t.name, t.name + ' missing from xml'))
def _read_xml_string_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TStringConstant:
rep = IT.get_rep(node)
args = (self,) + rep
return T.TStringConstant(*args)
self.string_table.read_xml(txnode,'n',get_value)
def _read_xml_symbol_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TSymbol:
rep = IT.get_rep(node)
args = (self,) + rep
return T.TSymbol(*args)
self.symbol_table.read_xml(txnode,'n',get_value)
def _read_xml_variable_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TVariable:
rep = IT.get_rep(node)
args = (self,) + rep
return T.TVariable(*args)
self.variable_table.read_xml(txnode,'n',get_value)
def _read_xml_method_target_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TMethodTarget:
rep = IT.get_rep(node)
args = (self,) + rep
return T.TMethodTarget(*args)
self.method_target_table.read_xml(txnode,'n',get_value)
def _read_xml_taint_origin_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TaintBase:
return TD.construct_t_dictionary_record(*((self,) + IT.get_rep(node)), T.TaintBase)
self.taint_origin_table.read_xml(txnode,'n',get_value)
def _read_xml_taint_origin_list_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TaintOriginList:
rep = IT.get_rep(node)
args = (self,) + rep
return T.TaintOriginList(*args)
self.taint_origin_list_table.read_xml(txnode,'n',get_value)
def _read_xml_tainted_variable_table(self, txnode:ET.Element) -> None:
def get_value(node: ET.Element) -> T.TaintedVariable:
rep = IT.get_rep(node)
args = (self,) + rep
return T.TaintedVariable(*args)
self.tainted_variable_table.read_xml(txnode,'n',get_value)
def _read_xml_tainted_variable_ids_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TaintedVariableIds:
rep = IT.get_rep(node)
args = (self,) + rep
return T.TaintedVariableIds(*args)
self.tainted_variable_ids_table.read_xml(txnode,'n',get_value)
def _read_xml_taint_node_type_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> T.TaintNodeBase:
return TD.construct_t_dictionary_record(*((self,) + IT.get_rep(node)), T.TaintNodeBase)
self.taint_node_type_table.read_xml(txnode,'n',get_value)
|
"""
A pytorch implementation for cvpr17 paper "Deep Video Deblurring for Hand-held Cameras"
Author: Mingdeng Cao
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from ...build import BACKBONE_REGISTRY
@BACKBONE_REGISTRY.register()
class DBN(nn.Module):
def __init__(self, num_frames, in_channels, inner_channels):
super(DBN, self).__init__()
self.num_frames = num_frames
self.in_channels = in_channels
self.inner_channels = inner_channels
self.eps = 1e-3
# define the model blocks
# F0
self.F0 = nn.Sequential(
nn.Conv2d(self.num_frames * self.in_channels, self.inner_channels, kernel_size=5, stride=1, padding=2),
nn.BatchNorm2d(self.inner_channels, self.eps),
)
# down-sampling stage1
self.D1 = nn.Sequential(
nn.Conv2d(self.inner_channels, self.inner_channels, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(self.inner_channels, self.eps)
)
self.F1_1 = nn.Sequential(
nn.Conv2d(self.inner_channels, self.inner_channels*2, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*2, self.eps),
)
self.F1_2 = nn.Sequential(
nn.Conv2d(self.inner_channels*2, self.inner_channels*2, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*2, self.eps),
)
# down-sampling stage2
self.D2 = nn.Sequential(
nn.Conv2d(self.inner_channels*2, self.inner_channels*4, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps),
)
self.F2_1 = nn.Sequential(
nn.Conv2d(self.inner_channels*4, self.inner_channels*4, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps),
)
self.F2_2 = nn.Sequential(
nn.Conv2d(self.inner_channels*4, self.inner_channels*4, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps),
)
self.F2_3 = nn.Sequential(
nn.Conv2d(self.inner_channels*4, self.inner_channels*4, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps),
)
# down-sampling stage3
self.D3 = nn.Sequential(
nn.Conv2d(self.inner_channels*4, self.inner_channels*8, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(self.inner_channels*8, self.eps),
)
self.F3_1 = nn.Sequential(
nn.Conv2d(self.inner_channels*8, self.inner_channels*8, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*8, self.eps),
)
self.F3_2 = nn.Sequential(
nn.Conv2d(self.inner_channels*8, self.inner_channels*8, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*8, self.eps),
)
self.F3_3 = nn.Sequential(
nn.Conv2d(self.inner_channels*8, self.inner_channels*8, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*8, self.eps),
)
# up-sampling stage1
self.U1 = nn.Sequential(
nn.ConvTranspose2d(self.inner_channels*8, self.inner_channels*4, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps)
)
self.F4_1 = nn.Sequential(
nn.Conv2d(self.inner_channels*4, self.inner_channels*4, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps),
)
self.F4_2 = nn.Sequential(
nn.Conv2d(self.inner_channels*4, self.inner_channels*4, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps)
)
self.F4_3 = nn.Sequential(
nn.Conv2d(self.inner_channels*4, self.inner_channels*4, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*4, self.eps)
)
# up-sampling stage2
self.U2 = nn.Sequential(
nn.ConvTranspose2d(self.inner_channels*4, self.inner_channels*2, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(self.inner_channels*2, self.eps)
)
self.F5_1 = nn.Sequential(
nn.Conv2d(self.inner_channels*2, self.inner_channels*2, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels*2, self.eps)
)
self.F5_2 = nn.Sequential(
nn.Conv2d(self.inner_channels*2, self.inner_channels, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.inner_channels, self.eps)
)
# up-sampling stage3
self.U3 = nn.Sequential(
nn.ConvTranspose2d(self.inner_channels, self.inner_channels, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(self.inner_channels, self.eps)
)
self.F6_1 = nn.Sequential(
nn.Conv2d(self.inner_channels, self.in_channels*self.num_frames, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(self.in_channels*self.num_frames, self.eps)
)
self.F6_2 = nn.Sequential(
nn.Conv2d(self.num_frames*self.in_channels, 3, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(3, self.eps)
)
def init_parameters(self):
pass
def forward(self, x: torch.Tensor):
assert x.dim() == 5, "input frames' num should be 5"
b, n, c, h, w = x.shape
central_frame = x[:, self.num_frames // 2]
x = x.reshape(b, -1, h, w)
assert n == self.num_frames and c == self.in_channels, "inputs should be {} frames and {} channels".format(self.num_frames, self.in_channels)
f0 = F.relu(self.F0(x))
f1 = F.relu(self.D1(f0))
f1 = F.relu(self.F1_1(f1))
f1 = F.relu(self.F1_2(f1))
f2 = F.relu(self.D2(f1))
f2 = F.relu(self.F2_1(f2))
f2 = F.relu(self.F2_2(f2))
f2 = F.relu(self.F2_3(f2))
f3 = F.relu(self.D3(f2))
f3 = F.relu(self.F3_1(f3))
f3 = F.relu(self.F3_2(f3))
f3 = F.relu(self.F3_3(f3))
f4 = F.relu(self.U1(f3))
f4 = F.relu(f4 + f2)
f4 = F.relu(self.F4_1(f4))
f4 = F.relu(self.F4_2(f4))
f4 = F.relu(self.F4_3(f4))
f5 = F.relu(self.U2(f4))
f5 = F.relu(f5 + f1)
f5 = F.relu(self.F5_1(f5))
f5 = F.relu(self.F5_2(f5))
f6 = F.relu(self.U3(f5))
f6 = F.relu(f6 + f0)
f6 = F.relu(self.F6_1(f6))
f6 = self.F6_2(f6) + central_frame
return f6
|
def f():
b = 1
#a = 1 + 3 + 5 + 1.1 + b
return b
a = 1 + f() + '111'
|
def reverse_array(lista):
"""
-> Funcao de inverter valores de uma lista
:param lista: lista que ta sendo importada
:return: Nao ta retornando nada, ele faz o esquema e executa la msm
"""
print(f'Normal = {lista}')
x = 0
y = len(lista) - 1 # -1 Aqui pq len() retorna o número de valores q tem dentro e como eu vou usar isso como key preciso diminuir pois comeca com 0 e ele ignora o ultimo valor lembra??
while x < y:
temp = lista[x]
lista[x] = lista[y]
lista[y] = temp
x += 1
y -= 1
print(f'Invertido = {lista}')
help(reverse_array)
input()
lista = [3, 5, 7, 2, 3, 5, 4, 0]
reverse_array(lista)
#! Outros métodos de conseguir os valores invertidos de uma lista, só que criando outra lista
# Metodo 1 de conseguer uma lista inversa
# listareversa = []
# numbers = list(range(9))
# print(numbers)
# for i in range(numbers[-1], -1, -1):
# listareversa.append(i)
# print(listareversa)
###### Metodo 2 ######
# numbers = list(range(9))
# print(numbers)
# newList = numbers[::-1]
# print(newList)
###### Metodo 3 ######
# numbers = list(range(9))
# novalista = list(reversed(numbers))
# print(numbers, novalista)
###### Metodo 4 ######
# numbers = list(range(9))
# # ! Isso aqui é interessante, primeira vez que tô vendo essa forma de atribuição
# # Ele vai adicionando valor por valor dentro daquela lista, só que reversed
# newList = [num for num in reversed(numbers)]
# print(numbers)
# print(newList)
|
"""
MongoDB is very easy to setup. There are no configuration options, just mention
it:
.. sourcecode:: yaml
deploy:
- mongodb
There remains one glitch, by default the mongodb port 27017 is open. Maybe add
``bind_ip = 127.0.0.1`` to ``/etc/mongodb.conf``? I have no idea how insecure
it is, but it seems to be an open door.
"""
from fabric.api import sudo
from . import Package
def deploy(settings):
def start():
# Start the service - e.g. it's stopped on travis by default.
sudo('/etc/init.d/mongodb start')
return Package('mongodb'), Package('curl'), start
|
from graph_common import *
def build(graph:Graph):
deps = []
added = [False] * len(graph.nodes)
for i in range(len(graph.nodes)):
error = resolve_dependencies(graph.nodes[i], deps, added)
if error == -1:
return "Projects have a cycle"
return ".".join([str(c) for c in deps])
def resolve_dependencies(node:GraphNode, deps, added, visited = None):
if visited is None:
visited = set()
if node.value in visited:
return -1
if added[node.value]:
return
visited.add(node.value)
for n in node.adjacent:
resolve_dependencies(n, deps, added, visited)
deps.append(node.value)
added[node.value] = True
if __name__ == "__main__":
graph = Graph()
#graph.addVertexes(["a","b","c","d", "e", "f"])
#graph.addEdges({"a":["f"], "b":["f"], "c":["d"], "d":["a", "b"]})
graph.addVertexes([0,1,2,3,4,5])
graph.addEdges({0:[5], 1:[5], 2:[3], 3:[0, 1]})
print(build(graph))
|
# coding: utf-8
##############################################################################
# Copyright (C) 2019 Microchip Technology Inc. and its subsidiaries.
#
# Subject to your compliance with these terms, you may use Microchip software
# and any derivatives exclusively with Microchip products. It is your
# responsibility to comply with third party license terms applicable to your
# use of third party software (including open source software) that may
# accompany Microchip software.
#
# THIS SOFTWARE IS SUPPLIED BY MICROCHIP "AS IS". NO WARRANTIES, WHETHER
# EXPRESS, IMPLIED OR STATUTORY, APPLY TO THIS SOFTWARE, INCLUDING ANY IMPLIED
# WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY, AND FITNESS FOR A
# PARTICULAR PURPOSE.
#
# IN NO EVENT WILL MICROCHIP BE LIABLE FOR ANY INDIRECT, SPECIAL, PUNITIVE,
# INCIDENTAL OR CONSEQUENTIAL LOSS, DAMAGE, COST OR EXPENSE OF ANY KIND
# WHATSOEVER RELATED TO THE SOFTWARE, HOWEVER CAUSED, EVEN IF MICROCHIP HAS
# BEEN ADVISED OF THE POSSIBILITY OR THE DAMAGES ARE FORESEEABLE. TO THE
# FULLEST EXTENT ALLOWED BY LAW, MICROCHIP'S TOTAL LIABILITY ON ALL CLAIMS IN
# ANY WAY RELATED TO THIS SOFTWARE WILL NOT EXCEED THE AMOUNT OF FEES, IF ANY,
# THAT YOU HAVE PAID DIRECTLY TO MICROCHIP FOR THIS SOFTWARE.
##############################################################################
import re
def instantiateComponent(comp):
GFX_INTF_EBI_H = comp.createFileSymbol("GFX_INTF_EBI_H", None)
GFX_INTF_EBI_H.setDestPath("gfx/interface/")
GFX_INTF_EBI_H.setSourcePath("../drv_gfx_disp_intf.h")
GFX_INTF_EBI_H.setOutputName("drv_gfx_disp_intf.h")
GFX_INTF_EBI_H.setProjectPath("config/" + Variables.get("__CONFIGURATION_NAME") + "/gfx/interface")
GFX_INTF_EBI_H.setType("HEADER")
GFX_INTF_EBI = comp.createFileSymbol("GFX_INTF_SMC", None)
GFX_INTF_EBI.setDestPath("gfx/interface/parallel_ebi/")
GFX_INTF_EBI.setSourcePath("templates/drv_gfx_disp_intf_parallel_ebi.c.ftl")
GFX_INTF_EBI.setOutputName("drv_gfx_disp_intf_parallel_ebi.c")
GFX_INTF_EBI.setProjectPath("config/" + Variables.get("__CONFIGURATION_NAME") + "/gfx/interface/parallel_ebi")
GFX_INTF_EBI.setMarkup(True)
GFX_INTF_EBI.setType("SOURCE")
### Interface type is required for all interface components. This is queried by the driver to determine
### the display interface supported by the interface component. Valid values are "SPI 4-line", "Parallel 16-bit",
### and "Parallel 8-bit"
InterfaceType = comp.createStringSymbol("InterfaceType", None)
InterfaceType.setLabel("Interface Type")
InterfaceType.setDescription("The interface configuration")
InterfaceType.setDefaultValue("Parallel 16-bit")
InterfaceType.setVisible(False)
### Driver settings menu
DriverSettingsMenu = comp.createMenuSymbol("DriverSettingsMenu", None)
DriverSettingsMenu.setLabel("Driver Settings")
UseSyncBarriers = comp.createBooleanSymbol("UseSyncBarriers", DriverSettingsMenu)
UseSyncBarriers.setLabel("Use Synchronization Barriers")
UseSyncBarriers.setDescription("Use Synchronization Barriers.")
UseSyncBarriers.setDefaultValue(True)
UseSyncBarriers.setVisible(False)
DelayNOPCount = comp.createIntegerSymbol("DelayNOPCount", DriverSettingsMenu)
DelayNOPCount.setLabel("Number of NOP for delay")
DelayNOPCount.setDescription("Number of NOP for delay")
DelayNOPCount.setDefaultValue(12)
###
### Interface settings menu
InterfaceSettingsSMCMenu = comp.createMenuSymbol("InterfaceSettingsSMCMenu", None)
InterfaceSettingsSMCMenu.setLabel("Parallel 8080 Display Interface Settings")
EBIChipSelectIndex = comp.createIntegerSymbol("EBIChipSelectIndex", InterfaceSettingsSMCMenu)
EBIChipSelectIndex.setLabel("EBI Chip Select Index")
EBIChipSelectIndex.setDescription("The chip select index")
EBIChipSelectIndex.setDefaultValue(0)
EBIChipSelectIndex.setMin(0)
EBIChipSelectIndex.setMax(4)
#EBIChipSelectIndex.setVisible(False)
ControlPinsMenu = comp.createMenuSymbol("ControlPinsMenu", InterfaceSettingsSMCMenu)
ControlPinsMenu.setLabel("Control Pin Settings")
ChipSelectControl = comp.createComboSymbol("ChipSelectControl", ControlPinsMenu, ["GPIO", "Peripheral"])
ChipSelectControl.setLabel("CS# Control")
ChipSelectControl.setDescription("Chip Select Control")
ChipSelectControl.setDefaultValue("GPIO")
ChipSelectControl.setReadOnly(True)
DataCommandSelectControl = comp.createComboSymbol("DataCommandSelectControl", ControlPinsMenu, ["GPIO", "Peripheral"])
DataCommandSelectControl.setLabel("D/C# Control")
DataCommandSelectControl.setDescription("Data Command Select Control")
DataCommandSelectControl.setDefaultValue("GPIO")
DataCommandSelectControl.setDependencies(onDataCommandSelectSet, ["DataCommandSelectControl"])
ReadStrobeControl = comp.createComboSymbol("ReadStrobeControl", ControlPinsMenu, ["GPIO", "Peripheral"])
ReadStrobeControl.setLabel("RD# Control")
ReadStrobeControl.setDescription("Read Strobe Control")
ReadStrobeControl.setDefaultValue("GPIO")
WriteStrobeControl = comp.createComboSymbol("WriteStrobeControl", ControlPinsMenu, ["GPIO", "Peripheral"])
WriteStrobeControl.setLabel("WR# Control")
WriteStrobeControl.setDescription("Write Strobe Control")
WriteStrobeControl.setDefaultValue("GPIO")
DCXAddressBit = comp.createIntegerSymbol("DCXAddressBit", DataCommandSelectControl)
DCXAddressBit.setLabel("DCX Address Bit")
DCXAddressBit.setDescription("Address bit used for DCX signal.")
DCXAddressBit.setDefaultValue(1)
DCXAddressBit.setMin(0)
DCXAddressBit.setMax(31)
DCXAddressBit.setVisible(False)
###
def configureEBIComponent(component, ebiComponent, ebiChipSelNum):
print("Parallel_ebi: Connecting EBI_CS" + str(ebiChipSelNum))
ebiComponent.setSymbolValue("CFGEBIC_EBIDAT", 2, 1) #2 = 16-bit
ebiComponent.setSymbolValue("CFGEBIC_EBICSEN" + str(ebiChipSelNum), True, 1)
ebiComponent.setSymbolValue("EBIMSK" + str(ebiChipSelNum) + "_MEMSIZE", 8, 1) #8 = 8MB
ebiComponent.setSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TRC", 0, 1)
ebiComponent.setSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TAS", 3, 1)
ebiComponent.setSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TWR", 3, 1)
ebiComponent.setSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TWP", 2, 1)
ebiComponent.setSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TBTA", 0, 1)
ebiComponent.setSymbolValue("EBISMCON_SMDWIDTH" + str(ebiChipSelNum), 0, 1)
if (component.getSymbolByID("WriteStrobeControl").getValue() == "GPIO"):
print("Clearing EBIWEEN")
ebiComponent.clearSymbolValue("CFGEBIC_EBIWEEN")
else:
print("Setting EBIWEEN")
ebiComponent.setSymbolValue("CFGEBIC_EBIWEEN", True, 1)
component.setSymbolValue("EBIChipSelectIndex", ebiChipSelNum, 1)
def resetEBIComponent(component, ebiComponent, ebiChipSelNum):
print("Parallel_ebi: Disconnecting EBI_CS" + str(ebiChipSelNum))
ebiComponent.clearSymbolValue("CFGEBIC_EBIDAT")
ebiComponent.clearSymbolValue("CFGEBIC_EBICSEN" + str(ebiChipSelNum))
ebiComponent.clearSymbolValue("EBIMSK" + str(ebiChipSelNum) + "_MEMSIZE")
ebiComponent.clearSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TRC")
ebiComponent.clearSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TAS")
ebiComponent.clearSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TWR")
ebiComponent.clearSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TWP")
ebiComponent.clearSymbolValue("EBISMT" + str(ebiChipSelNum) + "_TBTA")
ebiComponent.clearSymbolValue("EBISMCON_SMDWIDTH" + str(ebiChipSelNum))
ebiComponent.clearSymbolValue("CFGEBIC_EBIWEEN")
component.clearSymbolValue("EBIChipSelectIndex")
def onDataCommandSelectSet(sourceSymbol, event):
if (sourceSymbol.getComponent().getSymbolByID("WriteStrobeControl").getValue() == "GPIO"):
sourceSymbol.getComponent().getSymbolByID("DCXAddressBit").setVisible(True)
else:
sourceSymbol.getComponent().getSymbolByID("DCXAddressBit").setVisible(False)
def onInterfaceTypeChanged(sourceSymbol, event):
print("Interface type changed")
def onAttachmentConnected(source, target):
print("dependency Connected = " + str(target['id']))
#### test for EBI dependency
if (source["id"] == "EBI_CS"):
sub = re.search('ebi_cs(.*)', str(target["id"]))
if (sub and sub.group(1)):
configureEBIComponent(source["component"], target["component"], int(sub.group(1)))
def onAttachmentDisconnected(source, target):
if (source["id"] == "EBI_CS"):
sub = re.search('ebi_cs(.*)', str(target["id"]))
if (sub and sub.group(1)):
resetEBIComponent(source["component"], target["component"], int(sub.group(1)))
|
from fhir_helpers.resources.lpr import LPR
from .helpers import get_example_bundle, get_example_bundle_proto
def test_lpr_init() -> None:
lpr_dict = LPR(get_example_bundle())
lpr_proto = LPR(get_example_bundle_proto())
assert lpr_dict.patient
assert len(lpr_dict.observations) == 137
assert len(lpr_dict.medication_requests) == 6
assert len(lpr_dict.procedures) == 41
assert len(lpr_dict.conditions) == 12
assert lpr_proto.patient
assert len(lpr_proto.observations) == 137
assert len(lpr_proto.medication_requests) == 6
assert len(lpr_proto.procedures) == 41
assert len(lpr_proto.conditions) == 12
|
import liraparser
import linear as linear
import graph_plot as gp
import numpy as np
def select_P():
pass
def iteration(P_init, le, lp, steps):
K = None
U = np.zeros(len(lp)*2)
delta=1/steps
count = delta
P = P_init*delta
s1 = [] # Сигма критическое
s2 = [] # Сигма эквивалентное
u = list()
for j in range(steps):
K = linear.matrix_K(le, lp)
# пересчет матрицы К с новой геометрией
U = np.linalg.solve(K, P.transpose())
for i in lp:
i.displ_x = U[2 * (i.num-1)]
i.displ_y = U[2 * i.num - 1]
for elm in le:
elm.add_stress(*elm.define_stress())
for i in lp:
i.summ_displ()
# show_plast(count)
ne = 0
s1.append(le[ne].sigma_1/100)
s2.append(le[ne].sigma_2/100)
u.append(abs(np.sqrt(lp[1].tot_displ_y**2+lp[1].tot_displ_x**2)))
#~ if j in range(87,91):
#~ print(le[0].D,'-', le[0].D_ep)
print('{:.0%}'.format(count/1))
count += delta
gp.show_graph_P_u(np.arange(steps),u)
return s1, s2
if __name__ == '__main__':
print('Начинаем')
le, lp = liraparser.list_of_elem()
print('Загрузили список элементов')
loads = liraparser.list_of_loads()
print('Загрузили список загружений')
P_init = linear.matrix_P(len(lp), loads)
print('Получаем матрицу P')
l_scr, l_se = iteration(P_init, le, lp, 20)
print('Проитерировали')
gp.graph_sigma(l_se, l_scr)
gp.show_plast(le, lp)
|
from PyQt5 import QtCore
from PyQt5.QtCore import QPointF, QRect
from PyQt5.QtGui import QTextDocument, QTextOption
from PyQt5.QtGui import QPainter, QImage, QColor, QFont
import sys
from PyQt5 import QtGui
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QApplication
from epyseg.draw.shapes.rect2d import Rect2D
# log errors
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class TAText2D(Rect2D):
# TODO add bg to it so that it can be drawn
def __init__(self, text=None, doc=None, opacity=1., *args, **kwargs):
if doc is not None and isinstance(doc, QTextDocument):
self.doc = doc
self.doc.setDocumentMargin(0) # important so that the square is properly placed
else:
self.doc = QTextDocument()
self.doc.setDocumentMargin(0) # important so that the square is properly placed
textOption = self.doc.defaultTextOption()
textOption.setWrapMode(QTextOption.NoWrap)
self.doc.setDefaultTextOption(textOption)
if text is not None:
self.doc.setHtml(text)
self.isSet = True
self.doc.adjustSize()
size = self.getSize()
super(TAText2D, self).__init__(0, 0, size.width(), size.height())
self.opacity = opacity
def set_opacity(self, opacity):
self.opacity = opacity
def setText(self, html):
self.doc.setHtml(html)
self.doc.adjustSize()
size = self.getSize()
self.setWidth(size.width(), size.height())
def setDoc(self, doc):
self.doc = doc
self.doc.setDocumentMargin(0)
self.doc.adjustSize()
size = self.getSize()
self.setWidth(size.width(), size.height())
def draw(self, painter):
painter.save()
painter.setOpacity(self.opacity)
painter.translate(self.x(), self.y())
self.doc.drawContents(painter)
painter.restore()
# maybe activate this upon debug
# painter.save()
# painter.setPen(QtCore.Qt.red)
# painter.drawRect(self)
# painter.restore()
def boundingRect(self):
return self
def getSize(self):
return self.doc.size()
def getWidth(self):
return self.boundingRect().width()
def getHeight(self):
return self.boundingRect().height()
def setText(self, text):
self.doc.setHtml(text)
size = self.size()
self.setWidth(size.width(), size.height())
def set_P1(self, *args):
if not args:
logger.error("no coordinate set...")
return
if len(args) == 1:
self.moveTo(args[0].x(), args[0].y())
else:
self.moveTo(QPointF(args[0], args[1]))
def get_P1(self):
return QPointF(self.x(), self.y())
def getPlainText(self):
return self.doc.toPlainText()
def getHtmlText(self):
return self.doc.toHtml()
if __name__ == '__main__':
# this could be a pb ...
app = QApplication(sys.argv)# IMPORTANT KEEP !!!!!!!!!!!
# window = MyWidget()
# window.show()
# ça marche car ça override la couleur par defaut du truc
# c'est parfait et 2000X plus facile que ds java --> cool
html = '<!DOCTYPE html> <html> <font color=red> <head> <title>Font Face</title> </head> <body> <font face = "Symbol" size = "5">Symbol</font><br /> <font face = "Times New Roman" size = "5">Times New Roman</font><br /> <font face = "Verdana" size = "5">Verdana</font><br /> <font face = "Comic sans MS" size =" 5">Comic Sans MS</font><br /> <font face = "WildWest" size = "5">WildWest</font><br /> <font face = "Bedrock" size = "5">Bedrock</font><br /> </body> </html>'
# html = "<font color=blue size=24>this is a test<sup>2</sup><br></font><font color=green size=12>continued<sub>1</sub><br></font><font color=white size=12>test greek <font face='Symbol' size=32>a</font> another α<font face='Arial' color='Orange'>I am a sentence!</font>"
text = TAText2D(html)
# hexagon.append(QPointF(10, 20))
print(text)
# print(hexagon.translate(10, 20)) # why none ???
# translate and so on can all be saved...
image = QImage('./../data/handCorrection.png')
# image = QImage(QSize(400, 300), QImage.Format_RGB32)
painter = QPainter()
painter.begin(image)
# painter.setOpacity(0.3);
painter.drawImage(0, 0, image)
painter.setPen(QtCore.Qt.blue)
text.opacity = 0.7
painter.translate(10, 20)
painter.setPen(QColor(168, 34, 3))
text.draw(painter) # ça marche pourrait overloader ça avec du svg
painter.drawRect(text)# try to draw the bounds
# painter.setPen(QtCore.Qt.green)
# painter.setFont(QFont('SansSerif', 50))
painter.setFont(QFont('Decorative', 10))
# painter.drawText(256, 256, "this is a test")
# nothing works it just doesn't draw for unknown reason ????
# painter.drawText(QRect(60,60,256,256), Qt.AlignCenter, "this is a test")
painter.setPen(QtGui.QColor(200, 0, 0))
# painter.drawText(20, 20, "MetaGenerator") # fait planter le soft --> pkoi exit(139) ...
painter.drawText(QRect(60,60,256,256), Qt.AlignCenter, "Text centerd in the drawing area")
# painter.drawText(QRect(100, 100, 200, 100), "Text you want to draw...");
print('here')
painter.end()
# image = QImage(QSize(400, 300), QImage::Format_RGB32);
# QPainter
# painter( & image);
# painter.setBrush(QBrush(Qt::green));
# painter.fillRect(QRectF(0, 0, 400, 300), Qt::green);
# painter.fillRect(QRectF(100, 100, 200, 100), Qt::white);
# painter.setPen(QPen(Qt::black));
# painter.save()
# painter.setCompositionMode(QtGui.QPainter.CompositionMode_Clear)
# painter.eraseRect(r)
# painter.restore()
print('saving', './../trash/test_pyQT_draw_text.png')
image.save('./../trash/test_pyQT_draw_text.png', "PNG")
# split text and find bounding rect of the stuff --> so that it is well positioned
# or do everything in svg and just show what's needed ???
#pas mal TODO faire une classe drawsmthg qui dessine n'importe quelle forme que l'on lui passe avec des parametres de couleur, transparence, ...
# tt marche aps mal ça va très vite
sys.exit(0)
|
import json # py2.6+ TODO: add support for other JSON serialization modules
from google.protobuf.descriptor import FieldDescriptor as FD
from functools import partial
class ParseError(Exception): pass
def json2pb(pb, js, useFieldNumber=False):
''' convert JSON string to google.protobuf.descriptor instance '''
for field in pb.DESCRIPTOR.fields:
if useFieldNumber:
key = field.number
else:
key = field.name
if key not in js:
continue
if field.type == FD.TYPE_MESSAGE:
pass
elif field.type in _js2ftype:
ftype = _js2ftype[field.type]
else:
raise ParseError("Field %s.%s of type '%d' is not supported" % (pb.__class__.__name__, field.name, field.type, ))
value = js[key]
if field.label == FD.LABEL_REPEATED:
pb_value = getattr(pb, field.name, None)
for v in value:
if field.type == FD.TYPE_MESSAGE:
json2pb(pb_value.add(), v, useFieldNumber=useFieldNumber)
else:
pb_value.append(ftype(v))
else:
if field.type == FD.TYPE_MESSAGE:
json2pb(getattr(pb, field.name, None), value, useFieldNumber=useFieldNumber)
else:
setattr(pb, field.name, ftype(value))
return pb
def pb2json(pb, useFieldNumber=False):
''' convert google.protobuf.descriptor instance to JSON string '''
js = {}
# fields = pb.DESCRIPTOR.fields #all fields
fields = pb.ListFields() #only filled (including extensions)
for field,value in fields:
if useFieldNumber:
key = field.number
else:
key = field.name
if field.type == FD.TYPE_MESSAGE:
ftype = partial(pb2json, useFieldNumber=useFieldNumber)
elif field.type in _ftype2js:
ftype = _ftype2js[field.type]
else:
raise ParseError("Field %s.%s of type '%d' is not supported" % (pb.__class__.__name__, field.name, field.type, ))
if field.label == FD.LABEL_REPEATED:
js_value = []
for v in value:
js_value.append(ftype(v))
else:
js_value = ftype(value)
js[key] = js_value
return js
_ftype2js = {
FD.TYPE_DOUBLE: float,
FD.TYPE_FLOAT: float,
FD.TYPE_INT64: long,
FD.TYPE_UINT64: long,
FD.TYPE_INT32: int,
FD.TYPE_FIXED64: float,
FD.TYPE_FIXED32: float,
FD.TYPE_BOOL: bool,
FD.TYPE_STRING: unicode,
#FD.TYPE_MESSAGE: pb2json, #handled specially
FD.TYPE_BYTES: lambda x: x.encode('string_escape'),
FD.TYPE_UINT32: int,
FD.TYPE_ENUM: int,
FD.TYPE_SFIXED32: float,
FD.TYPE_SFIXED64: float,
FD.TYPE_SINT32: int,
FD.TYPE_SINT64: long,
}
_js2ftype = {
FD.TYPE_DOUBLE: float,
FD.TYPE_FLOAT: float,
FD.TYPE_INT64: long,
FD.TYPE_UINT64: long,
FD.TYPE_INT32: int,
FD.TYPE_FIXED64: float,
FD.TYPE_FIXED32: float,
FD.TYPE_BOOL: bool,
FD.TYPE_STRING: unicode,
# FD.TYPE_MESSAGE: json2pb, #handled specially
FD.TYPE_BYTES: lambda x: x.decode('string_escape'),
FD.TYPE_UINT32: int,
FD.TYPE_ENUM: int,
FD.TYPE_SFIXED32: float,
FD.TYPE_SFIXED64: float,
FD.TYPE_SINT32: int,
FD.TYPE_SINT64: long,
}
|
import prettytable as pt
from pymatgen.util.serialization import pmg_serialize
from monty.json import MSONable
def seconds_to_hms(seconds):
"""
Converts second to the format "h:mm:ss"
Args:
seconds: number of seconds
Returns:
A string representing the seconds with the format "h:mm:ss". An empty string if seconds is None.
"""
if seconds is None:
return ""
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return "%d:%02d:%02d" % (h, m, s)
class TimeReport(MSONable):
"""
Report of the time consumed for the whole workflow and for each single step.
Includes both the time required for the calculations as well as the cpu time consumed.
"""
def __init__(self, total_run_time, n_fws, total_cpu_time=None, contributed_cpu_time=0, total_run_time_per_tag=None,
total_cpu_time_per_tag=None, contributed_cpu_time_per_tag=None, worker=None):
self.total_run_time = total_run_time
self.n_fws = n_fws
self.total_cpu_time = total_cpu_time
self.contributed_cpu_time = contributed_cpu_time
self.total_run_time_per_tag = total_run_time_per_tag
self.total_cpu_time_per_tag = total_cpu_time_per_tag
self.contributed_cpu_time_per_tag = contributed_cpu_time_per_tag
self.worker = worker
@pmg_serialize
def as_dict(self):
d = dict(total_run_time=self.total_run_time, n_fws=self.n_fws, total_cpu_time=self.total_cpu_time,
contributed_cpu_time=self.contributed_cpu_time, total_run_time_per_tag=self.total_run_time_per_tag,
total_cpu_time_per_tag=self.total_cpu_time_per_tag,
contributed_cpu_time_per_tag=self.contributed_cpu_time_per_tag,
worker=self.worker)
return d
@classmethod
def from_dict(cls, d):
d = d.copy()
d.pop("@module", None)
d.pop("@class", None)
return cls(**d)
def __str__(self):
s = ''
if self.worker:
s += 'Worker: {}\n'.format(self.worker)
s += 'Total number of fireworks: {}\n\n'.format(self.n_fws)
t = pt.PrettyTable(['task tag', 'run time (h:m:s)', 'cpu time (h:m:s)', 'N cpu time'], float_format="5.3")
t.align['task tag'] = 'l'
if self.total_run_time_per_tag:
for task_tag in self.total_run_time_per_tag.keys():
# t.add_row([task_tag, self.total_run_time_per_tag[task_tag]/3600,
# self.total_cpu_time_per_tag.get(task_tag, 0)/3600,
# self.contributed_cpu_time_per_tag.get(task_tag, 0)])
t.add_row([task_tag, seconds_to_hms(self.total_run_time_per_tag[task_tag]),
seconds_to_hms(self.total_cpu_time_per_tag.get(task_tag, 0)),
self.contributed_cpu_time_per_tag.get(task_tag, 0)])
# t.add_row(['Total', self.total_run_time/3600, self.total_cpu_time/3600, self.contributed_cpu_time])
t.add_row(['Total', seconds_to_hms(self.total_run_time),
seconds_to_hms(self.total_cpu_time), self.contributed_cpu_time])
s += str(t)
return s
|
"""
JADS 2020 Data-Driven Food Value Chain course
Introduction to Sensors
Lunar Lander Micropython Exercise
"""
# ------------------------ Objective of the game -------------------------------------
# this game simulates a lunar landing module (LLM) landing on the moon
# you are the pilot and need to control how much fuel you burn in the
# retro rockets so that your descent speed slows to zero just as your
# altitude above the moon's surface reaches zero. If you impact the moon
# more than 5 m below the surface, or your speed on impact is
# greater than 5 m/s then you have considered to have crashed.
# Otherwise it is considered to be a 'good' landing.
# If you run out of fuel, LLM will accelerate towards moon by gravity.
# ---------------------------- Assignment --------------------------------------------
# Copy paste this code at https://micropython.org/unicorn/
# Then replace the constant burn rate in line 57 with interpolated input from the ADC slider,
# with some delay from a basic moving average filter (for instance with window of n = 3)
# ----------------------------- Source code ------------------------------------------
import time
import machine
import pyb
# The slider is connected to pin Y4
y4 = machine.Pin('Y4')
# read out slider through analog to digital converter
adc = pyb.ADC(y4)
# slider_out between 0 - 255
slider_out = adc.read()
# set up the game's initial parameters
speed = 30 # speed approaching the moon
fuel = 1500 # how much fuel is left
altitude = 1000 # altitude above moon
gravity = 1.622 # acceleration due to gravity
burn = 0 # initial rate of burning fuel in retrorockets
# while LLM is above the moon's surface,
# calculate flight data and take input from pilot
while altitude > 0:
# calculate how long until LLM will impact moon at current speed (impact)
if speed <= 0:
impact = 1000
else:
impact = altitude / speed
# display flight data
print(
"Altitude={:8.3f} Speed={:6.3f} Fuel={:8.3f} Impact={:6.3f} Previous burn={:6.3f}".format(altitude, speed, fuel,
impact, burn))
# take input from pilot
burn = 5 # <----- replace this line of code with filtered dynamic input from slider
# ensure rate of fuel burning is within rocket's capability and doesn't exceed remaining fuel
if burn < 0:
burn = 0
if burn > 50:
burn = 50
if burn > fuel:
burn = fuel
# adjust to change the speed of the game
time.sleep_ms(300)
# calculate new flight data
altitude -= speed
speed += gravity - burn / 10
fuel -= burn
# loop has ended so we must have hit moon's surface
# display final flight data and assess whether it was a crash or a good landing
print("Altitude={:8.3f} Speed={:6.3f} Fuel={:8.3f} Last burn={:6.3f}".format(altitude, speed, fuel, burn))
if altitude < - 5 or speed > 5:
print("You have crashed")
else:
print("You have landed")
|
#
"""tasks to configure machine
"""
import re, os
from .tasks import op_task_python_simple, fstab_template, efi_template, swap_template
#from .estimate import
from ..lib.util import get_triage_logger
from ..lib.grub import grub_set_wce_share
from ..ops.pplan import EFI_NAME
tlog = get_triage_logger()
#
# Generating pxeboot
#
class task_config_pxeboot(op_task_python_simple):
"""Read the pxeboot config file and generate it"""
def __init__(self, description, **kwargs):
super().__init__(description, time_estimate=1, **kwargs)
pass
def run_python(self):
#
self.linuxpart = self.disk.find_partition(self.partition_id)
if self.linuxpart is None:
msg = "Partition with %s does not exist." % str(self.partition_id)
self.set_progress(999, msg)
tlog.debug("part n = %d" % len(self.disk.partitions))
for part in self.disk.partitions:
tlog.debug( str(part))
pass
raise Exception(msg)
self.efi_part = self.disk.find_partition(EFI_NAME)
self.swappart = self.disk.find_partition_by_file_system("swap")
self.mount_dir = self.linuxpart.get_mount_point()
# patch up the restore
fstab = open("%s/etc/fstab" % self.mount_dir, "w")
fstab.write(fstab_template % (self.linuxpart.fs_uuid))
if self.efi_part:
fstab.write(efi_template % self.efi_part.fs_uuid)
pass
if self.swappart:
fstab.write(swap_template % (self.swappart.fs_uuid))
pass
fstab.close()
#
# New hostname
#
if self.newhostname:
# Set up the /etc/hostname
etc_hostname_filename = "%s/etc/hostname" % self.mount_dir
hostname_file = open(etc_hostname_filename, "w")
hostname_file.write("%s\n" % self.newhostname)
hostname_file.close()
# Set up the /etc/hosts file
etc_hosts_filename = "%s/etc/hosts" % self.mount_dir
hosts = open(etc_hosts_filename, "r")
lines = hosts.readlines()
hosts.close()
hosts = open(etc_hosts_filename, "w")
this_host = re.compile(r"127\.0\.1\.1\s+[a-z_A-Z0-9]+\n")
for line in lines:
m = this_host.match(line)
if m:
hosts.write("127.0.1.1\t%s\n" % self.newhostname)
else:
hosts.write(line)
pass
pass
hosts.close()
self.log("Hostname in %s/etc is updated with %s." % (self.mount_dir, self.newhostname))
pass
# Set the wce_share_url to /etc/default/grub
if self.wce_share_url is not None:
grub_file = "%s/etc/default/grub" % self.mount_dir
updated, contents = grub_set_wce_share(grub_file, self.wce_share_url)
if updated:
grub = open(grub_file, "w")
grub.write(contents)
grub.close()
pass
pass
#
# Remove the WCE follow up files
#
for removing in ["%s/var/lib/world-computer-exchange/computer-uuid",
"%s/var/lib/world-computer-exchange/access-timestamp"]:
try:
os.unlink( removing % self.mount_dir)
except:
# No big deal if the file isn't there or failed to remove.
pass
pass
pass
pass
|
#!/usr/bin/env python3
"""Python Essentials
Chapter 12, Script 1
"""
c= float(input("Temperature °C: "))
f = 32+9*c/5
print("C={c:.0f}°, F={f:.0f}°".format(c=c,f=f))
|
import tensorflow as tf
import matplotlib.pyplot as plt
from dlgAttack import *
if __name__ == "__main__":
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train = x_train[:1]
x_train = x_train.reshape(1, -1)
y_train = y_train[:1]
gt_data = x_train
gt_label = one_hot_labels(y_train)
print("true shape:", gt_data.shape, gt_label.shape)
model = MyModel()
dummy_x, dummy_y = DLG_Attack(
model=model, gt_data=gt_data, gt_label=gt_label, verbose=10)
faker = dummy_x.numpy().reshape(28, 28)
plt.imshow(faker)
plt.show()
|
import os
import requests
from . import base
from .. import utils
DEFAULT_SECTION = 'General'
class Client(base.Client):
options = {
'required': ['name', 'url', 'token'],
'properties': {
'name': {
'description': 'Name of the project.',
'type': 'string',
},
'url': {
'description': 'URL endpoint to the API.',
'type': 'string',
},
'token': {
'description': 'API token for authorization.',
'type': 'string',
},
'verify_ssl': {
'description': 'Require SSL certificate verfication.',
'type': 'boolean',
'default': True,
},
}
}
def setup(self):
resp = requests.post(self.options.url, data={
'token': self.options.token,
'content': 'metadata',
'format': 'json',
'verify': self.options.verify_ssl,
})
resp.raise_for_status()
self.metadata = resp.json()
def parse_project(self):
return {
'origins:ident': self.options.name,
'prov:label': self.options.name,
'prov:type': 'Project',
'name': self.options.name,
}
def parse_form(self, project, attrs):
name = attrs['form_name']
return {
'origins:ident': os.path.join(project['origins:ident'], name),
'prov:label': utils.prettify_name(name),
'prov:type': 'Form',
'name': name,
'project': project,
}
def parse_section(self, form, attrs):
name = attrs['section_header'] or DEFAULT_SECTION
stripped_name = utils.strip_html(name)
return {
'origins:ident': os.path.join(form['origins:ident'],
stripped_name),
'prov:label': stripped_name,
'prov:type': 'Section',
'name': attrs['section_header'],
'form': form,
}
def parse_field(self, section, attrs):
identifier = attrs['identifier'].lower() == 'y' and True or False
required = attrs['required_field'].lower() == 'y' and True or False
field = {
'origins:ident': os.path.join(section['origins:ident'],
attrs['field_name']),
'prov:label': utils.strip_html(attrs['field_label']),
'prov:type': 'Field',
'name': attrs['field_name'],
'label': attrs['field_label'],
'note': attrs['field_note'],
'type': attrs['field_type'],
'choices': attrs['select_choices_or_calculations'],
'display_logic': attrs['branching_logic'],
'validation_type': attrs['text_validation_type_or_show_slider_number'], # noqa
'validation_min': attrs['text_validation_min'],
'validation_max': attrs['text_validation_max'],
'identifier': identifier,
'required': required,
'alignment': attrs['custom_alignment'],
'survey_num': attrs['question_number'],
'matrix': attrs['matrix_group_name'],
'section': section,
}
return field
def parse(self):
project = self.parse_project()
self.document.add('entity', project)
form = None
section = None
for attrs in self.metadata:
if not form or attrs['form_name'] != form['name']:
form = self.parse_form(project, attrs)
self.document.add('entity', form)
# Reset section
section = None
# An explicit section is present, switch to section. Otherwise
# if this is the first section for the form, used the default
# section name.
name = attrs['section_header']
if not section or (name and name != section['name']):
section = self.parse_section(form, attrs)
self.document.add('entity', section)
field = self.parse_field(section, attrs)
self.document.add('entity', field)
|
from .CoreInternalError import InternalAPIError
class InvalidAPIMethod(InternalAPIError):
def __init__(self,method):
super().__init__(
message=f"{method} not allowed for this API",
returnCode=405)
class InvalidMethodForRoute(InternalAPIError):
def __init__(self,method,route):
super().__init__(
message=f"{method} not allowed for this specific route {route}",
returnCode=405)
|
import numpy as np
import logging
from scipy.stats import truncnorm
def selection(Z):
"""
Characterising selecting the top K Models from vector Z as a linear
combination.
input
Z : "Feature vector" with a normal distribution.
K : Number of selections.
return
ind_sel: Selected index.
A,b : The linear combination of the selection event Az < b.
"""
N = np.shape(Z)[0]
## Sorted list of Z
ind_sorted = np.argsort(Z)
## Pick top k
ind_sel = ind_sorted[0]
A = np.zeros((N-1,N))
for i in range(N-1):
A[i, ind_sorted[0]] = 1
A[i, ind_sorted[i+1]] = -1
b = np.zeros((N-1))
assert np.sum(np.matmul(A,Z) > 0) ==0, "Assumption error"
return ind_sel, A, b
def psi_inf(A,b,eta, mu, cov, z):
"""
Returns the p-value of the truncated normal. The mean,
variance, and truncated points [a,b] is determined by Lee et al 2016.
"""
l_thres, u_thres= calculate_threshold(z, A, b, eta, cov)
sigma2 = np.matmul(eta,np.matmul(cov,eta))
scale = np.sqrt(sigma2)
params = {"u_thres":u_thres,
"l_thres":l_thres,
"mean": np.matmul(eta,mu),
"scale":scale,
}
ppf = lambda x: truncnorm_ppf(x,
l_thres,
u_thres,
loc=np.matmul(eta,mu),
scale=scale)
sf = lambda x: truncnorm.sf(x, l_thres/scale, u_thres/scale, scale=scale)
return ppf, sf
def calculate_threshold(z, A, b, eta, cov):
"""
Calculates the respective threshold for the method PSI_Inf.
"""
etaz = eta.dot(z)
Az = A.dot(z)
Sigma_eta = cov.dot(eta)
deno = Sigma_eta.dot(eta)
alpha = A.dot(Sigma_eta)/deno
assert(np.shape(A)[0] == np.shape(alpha)[0])
pos_alpha_ind = np.argwhere(alpha>0).flatten()
neg_alpha_ind = np.argwhere(alpha<0).flatten()
acc = (b - np.matmul(A,z))/alpha+np.matmul(eta,z)
if (np.shape(neg_alpha_ind)[0] > 0):
l_thres = np.max(acc[neg_alpha_ind])
else:
l_thres = -10.0**10
if (np.shape(pos_alpha_ind)[0] > 0):
u_thres = np.min(acc[pos_alpha_ind])
else:
u_thres= 10**10
return l_thres, u_thres
def test_significance(A, b, eta, mu, cov, z, alpha):
"""
Compute an p-value by testing a one-tail.
Look at right tail or left tail?
Returns "h_0 Reject
"""
ppf, sf = psi_inf(A, b, eta, mu, cov, z)
stat = np.matmul(eta,z) ## Test statistic
sigma = np.sqrt(np.matmul(eta,np.matmul(cov,eta)))
## If the std dev is < 0 or undefined, do not reject the hypothesis.
if np.isnan(sigma) or not np.isreal(sigma):
logging.warning("Scale is not real or negative, test reject")
return False, 1.
threshold = ppf(1.-alpha)
pval = sf(stat)
return stat > threshold, pval
def generateEta(ind_sel, n_models):
"""
Generate multiple etas corresponding to testing
within the selected indices.
"""
etas = np.zeros((n_models-1, n_models))
for i in range(n_models-1):
index = i if i < ind_sel else i +1
etas[i,ind_sel] = -1
etas[i,index]=1
return etas
def truncnorm_ppf(x, a, b,loc=0., scale=1.):
"""
Approximate Percentile function of the truncated normal. Particularly in
the tail regions (where the standard SciPy function may be undefined.
"""
thres = truncnorm.ppf(x,(a-loc)/scale,(b-loc)/scale,loc=loc, scale=scale)
if np.any(np.isnan(thres)) or np.any(np.isinf(thres)):
logging.info("Threshold is Nan using approximations.")
thres = loc+scale*quantile_tn(x,(a-loc)/scale,(b-loc)/scale)
return thres
def quantile_tn(u,a,b,threshold=0.0005):
"""
Approximate quantile function in the tail region
https://www.iro.umontreal.ca/~lecuyer/myftp/papers/truncated-normal-book-chapter.pdf
"""
def q(x, r=10):
"""
Helper function.
"""
acc=0
for i in range(r):
acc = acc + (2*i-1)/((-1)**i*x**(2*i+1))
return 1/x + acc
q_a = q(a)
q_b = q(b)
c =q_a * (1- u) + q_b * u * np.exp((a**2 - b**2)/2)
d_x = 100
z = 1 - u + u * np.exp((a**2 - b**2)/2)
x = np.sqrt(a**2 - 2 * np.log(z))
while d_x > threshold and not np.isnan(d_x):
z = z - x * (z * q(x) - c)
x_new = np.sqrt(a**2 - 2 * np.log(z))
d_x = np.abs(x_new - x)/x
x = x_new
return x
|
# -*- Mode: Python -*-
# http://en.wikipedia.org/wiki/ANSI_escape_code
import array
import coro
import math
W = coro.write_stderr
CSI = '\x1B['
at_format = CSI + '%d;%dH%s' # + CSI + '0m'
def at (x, y, ch):
return at_format % (y, x, ch)
import random
class arena:
def __init__ (self, w=150, h=53):
self.w = w
self.h = h
self.data = [ array.array ('c', " " * w) for y in range (h) ]
# put some walls around the outside
for i in range (w):
self.data[0][i] = '='
self.data[-1][i] = '='
for j in range (h):
self.data[j][0] = '|'
self.data[j][-1] = '|'
self.data[0][0] = '+'
self.data[0][-1] = '+'
self.data[-1][0] = '+'
self.data[-1][-1] = '+'
self.worms = []
self.listeners = []
def random_pos (self):
while 1:
x = random.randrange (1, self.w-1)
y = random.randrange (1, self.h-1)
if self[x,y] == ' ':
break
return x, y
def render (self):
return '\n'.join (x.tostring() for x in self.data)
def __getitem__ (self, (x, y)):
return self.data[y][x]
def draw (self, pos, chr='*'):
x, y = pos
if self.data[y][x] in "=|+":
import pdb; pdb.set_trace()
self.data[y][x] = chr
for lx in self.listeners:
lx.draw (pos, chr)
def erase (self, pos):
x, y = pos
self.data[y][x] = ' '
for lx in self.listeners:
lx.erase (pos)
def populate (self, n=5):
for i in range (n):
x, y = self.random_pos()
self.worms.append (worm (self, x, y))
def cull (self, hoffa=False):
removed = []
for worm in self.worms:
if worm.stunned:
W ('culling worm %r\n' % (worm.chr,))
worm.kill (hoffa)
removed.append (worm)
for r in removed:
self.worms.remove (r)
class worm:
counter = 0
# up down left right
movex = [0,0,-1,1]
movey = [-1,1,0,0]
def __init__ (self, arena, x, y, length=10):
self.arena = arena
self.head = x, y
self.tail = []
self.dir = random.randrange (0, 4)
self.length = length
worm.counter += 1
self.chr = '0123456789abcdefghijklmnopqrstuvwxyz'[worm.counter%36]
self.speed = random.randrange (200, 400)
self.exit = False
self.stunned = False
W ('new worm %r @ %d, %d speed=%d\n' % (self.chr, x, y, self.speed))
coro.spawn (self.go)
def go (self):
while not self.exit:
coro.sleep_relative (self.speed / 10000.0)
if random.randrange (0,20) == 10:
if not self.turn():
return
else:
nx, ny = self.update()
while self.arena[(nx,ny)] != ' ':
if not self.turn():
return
nx, ny = self.update()
self.move ((nx, ny))
def update (self):
x, y = self.head
return (
x + self.movex[self.dir],
y + self.movey[self.dir]
)
def turn (self):
while not self.exit:
x, y = self.head
a = self.arena
choices = []
for i in range (4):
nx = x + self.movex[i]
ny = y + self.movey[i]
if a[nx,ny] == ' ':
choices.append (i)
if not choices:
return self.stun()
else:
self.dir = random.choice (choices)
return True
def stun (self):
self.stunned = True
for pos in self.tail:
self.arena.draw (pos, '*')
coro.sleep_relative (5)
self.stunned = False
return not self.exit
def move (self, pos):
self.tail.append (self.head)
self.head = pos
self.arena.draw (pos, self.chr)
if len (self.tail) > self.length:
tpos = self.tail.pop (0)
self.arena.erase (tpos)
def kill (self, hoffa=True):
self.arena.erase (self.head)
for pos in self.tail:
if hoffa:
self.arena.draw (pos, '#')
else:
self.arena.erase (pos)
self.exit = True
self.head = (None, None)
self.tail = []
class terminal:
def __init__ (self, conn):
self.conn = conn
self.conn.send (
'\xff\xfc\x01' # IAC WONT ECHO
'\xff\xfb\x03' # IAC WILL SUPPRESS_GO_AHEAD
'\xff\xfc"' # IAC WONT LINEMODE
)
# turn off the cursor
self.conn.send (CSI + '?25l')
self.fifo = coro.fifo()
self.redraw()
self.t0 = coro.spawn (self.listen)
self.t1 = coro.spawn (self.writer)
self.exit = False
def redraw (self):
self.fifo.push (''.join ([
# clear the screen
CSI + '2J',
# move to home
CSI + '1;1H',
# draw the arena
the_arena.render(),
'\n keys: [q]uit [r]edraw [n]ew [c]ull [l]engthen [h]offa\n',
]))
def draw (self, (x, y), chr):
chr = (
CSI
+ '%dm' % (40+ord(chr)%8,)
+ CSI
+ '%dm' % (30+ord(chr)%7,)
+ chr
+ CSI
+ '0m'
)
self.fifo.push (at (x+1, y+1, chr))
def erase (self, (x, y)):
self.fifo.push (at (x+1, y+1, ' '))
def writer (self):
while not self.exit:
data = self.fifo.pop()
if data is None:
break
else:
self.conn.send (data)
def listen (self):
while not self.exit:
byte = self.conn.recv (1)
if byte == '\xff':
# telnet stuff, dump it
self.conn.recv (2)
elif byte == 'r':
self.redraw()
elif byte == 'n':
the_arena.populate (1)
elif byte == 'c':
the_arena.cull()
elif byte == 'l':
for worm in the_arena.worms:
worm.length += 1
elif byte == 'h':
the_arena.cull (hoffa=True)
elif byte == 'q':
self.exit = True
# turn the cursor back on...
self.conn.send (CSI + '?25h')
the_arena.listeners.remove (self)
self.conn.close()
self.fifo.push (None)
def status():
while 1:
coro.sleep_relative (2)
coro.write_stderr ('%5d worms %5d threads\n' % (len(the_arena.worms), len(coro.all_threads)))
def serve():
s = coro.tcp_sock()
s.bind (('', 9001))
s.listen (5)
coro.write_stderr ('Try "telnet localhost 9001" to watch the worms!\n')
while 1:
c, a = s.accept()
t = terminal (c)
the_arena.listeners.append (t)
if __name__ == '__main__':
import coro.backdoor
the_arena = arena()
the_arena.populate (10)
coro.spawn (status)
coro.spawn (serve)
coro.spawn (coro.backdoor.serve, unix_path='/tmp/xx.bd')
#import coro.profiler
#coro.profiler.go (coro.event_loop)
coro.event_loop()
|
# encoding=utf8
# pylint: disable=too-many-function-args
from NiaPy.tests.test_algorithm import AlgorithmTestCase, MyBenchmark
from NiaPy.algorithms.basic import AntColonyOptimization
class ACOTestCase(AlgorithmTestCase):
def test_parameter_type(self):
d = AntColonyOptimization.typeParameters()
self.assertTrue(d['jumpLength'](0.7))
self.assertFalse(d['jumpLength'](0))
self.assertTrue(d['pheromone'](0.3))
self.assertFalse(d['pheromone'](-0.3))
self.assertTrue(d['evaporation'](0.1))
self.assertFalse(d['evaporation'](3))
self.assertTrue(d['NP'](10))
self.assertFalse(d['NP'](-10))
self.assertFalse(d['NP'](0))
def test_custom_works_fine(self):
aco_custom = AntColonyOptimization(NP=40, jumpLength=0.9, pheromone=0, evaporation=0.1, seed=self.seed)
aco_customc = AntColonyOptimization(NP=40, jumpLength=0.9, pheromone=0, evaporation=0.1, seed=self.seed)
AlgorithmTestCase.algorithm_run_test(self, aco_custom, aco_customc, MyBenchmark())
def test_griewank_works_fine(self):
aco_griewank = AntColonyOptimization(NP=40, jumpLength=0.9, pheromone=0, evaporation=0.1, seed=self.seed)
aco_griewankc = AntColonyOptimization(NP=40, jumpLength=0.9, pheromone=0, evaporation=0.1, seed=self.seed)
AlgorithmTestCase.algorithm_run_test(self, aco_griewank, aco_griewankc)
|
from setuptools import setup
from gecrypt import VERSION
def readme():
with open('README.md') as f:
return f.read()
setup(
name='git-easy-crypt',
version=VERSION,
description='The easy way to encrypt/decrypt private files in the git repo.',
long_description=readme(),
long_description_content_type='text/markdown',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Version Control :: Git',
],
keywords='git encrypt decrypt',
url='https://github.com/taojy123/git-easy-crypt',
author='tao.py',
author_email='taojy123@163.com',
license='MIT',
py_modules=['gecrypt'],
entry_points={
'console_scripts': ['gecrypt=gecrypt:main'],
},
include_package_data=True,
zip_safe=False,
)
|
import ctypes
from ctypes.util import find_library
import errno
from . import _libc
from ._constants import ScmpArch, ScmpCmp
__all__ = (
"ScmpArg",
"scmp_filter_ctx",
"seccomp_arch_add",
"seccomp_arch_native",
"seccomp_export_pfc",
"seccomp_init",
"seccomp_load",
"seccomp_release",
"seccomp_rule_add_array",
"seccomp_rule_add_exact_array",
"seccomp_syscall_resolve_name_arch",
"seccomp_syscall_resolve_num_arch",
)
_lsc_path = find_library("seccomp")
if _lsc_path is None:
raise ImportError("Unable to find library libseccomp")
_lsc = ctypes.CDLL(_lsc_path)
# errcheck functions
def _check_init(result, func, args):
if result is None:
# seccomp_init(3) returns negative errno
raise OSError(-result, func.__name__, args)
return result
def _check_success(result, func, args):
if result != 0:
raise OSError(-result, func.__name__, args)
return result
def _check_syscall_resolve_num(result, func, args):
if not result:
raise ValueError(args)
try:
# convert char* to Python bytes, then ASCII text
name = ctypes.string_at(result)
return name.decode("ascii")
finally:
_libc.free(result)
def _check_syscall_resolve_name(result, func, args):
if result == -1: # __NR_SCMP_ERROR
raise ValueError(args)
return result
def _check_arch(result, func, args):
if result == ScmpArch.SCMP_ARCH_NATIVE:
raise OSError(errno.EINVAL, func.__name__, args)
return ScmpArch(result)
# structures
class scmp_filter(ctypes.Structure):
__slots__ = ()
scmp_filter_ctx = ctypes.POINTER(scmp_filter)
class ScmpArg(ctypes.Structure):
"""scmp_arg_cmp"""
__slots__ = ()
_fields_ = [
("arg", ctypes.c_uint),
("op", ctypes.c_int),
("datum_a", ctypes.c_uint64),
("datum_b", ctypes.c_uint64),
]
def __init__(self, arg, op, datum_a, datum_b=0):
if arg < 0 or arg > 5:
raise ValueError("invalid arg '{}'".format(arg))
if op not in ScmpCmp:
raise ValueError("invalid op '{}'".format(op))
# datum_b is only used by SCMP_CMP_MASKED_EQ
super().__init__(arg, op, datum_a, datum_b)
@classmethod
def toarray(cls, *args):
if len(args) > 5:
raise ValueError("too many arguments")
array = (cls * len(args))()
for i, arg in enumerate(args):
array[i] = arg
return array
def __repr__(self):
return (
"{self.__class__.__name__}({self.arg}, ScmpCmp.{op}, "
"{self.datum_a}, {self.datum_b})"
).format(self=self, op=ScmpCmp(self.op)._name_)
# functions
seccomp_init = _lsc.seccomp_init
seccomp_init.argtypes = (ctypes.c_uint32,)
seccomp_init.restype = scmp_filter_ctx
seccomp_init.errcheck = _check_init
seccomp_release = _lsc.seccomp_release
seccomp_release.argtypes = (scmp_filter_ctx,)
seccomp_release.restype = None
seccomp_load = _lsc.seccomp_load
seccomp_load.argtypes = (scmp_filter_ctx,)
seccomp_load.restype = ctypes.c_int
seccomp_load.errcheck = _check_success
seccomp_arch_add = _lsc.seccomp_arch_add
seccomp_arch_add.argtypes = (scmp_filter_ctx, ctypes.c_uint32)
seccomp_arch_add.restype = ctypes.c_int
seccomp_arch_add.errcheck = _check_success
seccomp_arch_native = _lsc.seccomp_arch_native
seccomp_arch_native.argtypes = ()
seccomp_arch_native.restype = ctypes.c_uint32
seccomp_arch_native.errcheck = _check_arch
seccomp_rule_add_array = _lsc.seccomp_rule_add_array
seccomp_rule_add_array.argtypes = (
scmp_filter_ctx,
ctypes.c_uint32,
ctypes.c_int,
ctypes.c_uint,
ctypes.POINTER(ScmpArg),
)
seccomp_rule_add_array.restype = ctypes.c_int
seccomp_rule_add_array.errcheck = _check_success
seccomp_rule_add_exact_array = _lsc.seccomp_rule_add_exact_array
seccomp_rule_add_exact_array.argtypes = (
scmp_filter_ctx,
ctypes.c_uint32,
ctypes.c_int,
ctypes.c_uint,
ctypes.POINTER(ScmpArg),
)
seccomp_rule_add_exact_array.restype = ctypes.c_int
seccomp_rule_add_exact_array.errcheck = _check_success
seccomp_export_pfc = _lsc.seccomp_export_pfc
seccomp_export_pfc.argtypes = (scmp_filter_ctx, ctypes.c_int)
seccomp_export_pfc.restype = ctypes.c_int
seccomp_export_pfc.errcheck = _check_success
seccomp_syscall_resolve_name_arch = _lsc.seccomp_syscall_resolve_name_arch
seccomp_syscall_resolve_name_arch.argtypes = (ctypes.c_uint32, ctypes.c_char_p)
seccomp_syscall_resolve_name_arch.restype = ctypes.c_int
seccomp_syscall_resolve_name_arch.errcheck = _check_syscall_resolve_name
seccomp_syscall_resolve_num_arch = _lsc.seccomp_syscall_resolve_num_arch
seccomp_syscall_resolve_num_arch.argtypes = (ctypes.c_uint32, ctypes.c_int)
# result is allocated on the heap and must be freed, cannot use c_char_p here
seccomp_syscall_resolve_num_arch.restype = ctypes.POINTER(ctypes.c_char)
seccomp_syscall_resolve_num_arch.errcheck = _check_syscall_resolve_num
|
# Hardware: ESP32
# Required modules:
# picoweb
# pkg_resources (required by picoweb)
# logging (required by picoweb)
# microbmp
# Required actions:
# Set essid and password of your own wifi
import network
from io import BytesIO
import picoweb
import logging
from microbmp import MicroBMP
essid = "YOUR_ESSID"
password = "YOUR_PASSWORD"
def set_img_colour(colour_idx):
for y in range(32):
for x in range(32):
img[x, y] = colour_idx
def connect_wifi(essid, password):
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
if not wlan.isconnected():
print("connecting to network...")
wlan.connect(essid, password)
while not wlan.isconnected():
pass
print("network config:", wlan.ifconfig())
return wlan
app = picoweb.WebApp(__name__)
@app.route("/")
def index(req, resp):
yield from picoweb.start_response(resp)
yield from resp.awrite(b"Dynamic image: <img src='img.bmp'><br />")
@app.route("/img.bmp")
def squares(req, resp):
yield from picoweb.start_response(resp, "image/bmp")
set_img_colour(parameters["colour"])
bf_io = BytesIO()
img.write_io(bf_io)
bf_io.seek(0)
yield from resp.awrite(bf_io.read())
parameters["colour"] = (parameters["colour"] + 1) % 3
parameters = {"colour": 0} # 0:Red, 1:Green, 2:Blue
img = MicroBMP(32, 32, 2) # 2-bit(max 4 colours) 32x32 image
img.palette = [
bytearray([0xFF, 0, 0]),
bytearray([0, 0xFF, 0]),
bytearray([0, 0, 0xFF]),
bytearray([0, 0, 0]),
]
wlan = connect_wifi(essid, password)
logging.basicConfig(level=logging.INFO)
app.run(debug=True, host=wlan.ifconfig()[0])
|
from __future__ import absolute_import
import datetime
import django.test
from django.core.urlresolvers import reverse
from ..models import Topic, Question
class FAQModelTests(django.test.TestCase):
def test_model_save(self):
t = Topic.objects.create(name='t', slug='t')
q = Question.objects.create(
text = "What is your quest?",
answer = "I see the grail!",
topic = t
)
self.assertEqual(q.created_on.date(), datetime.date.today())
self.assertEqual(q.updated_on.date(), datetime.date.today())
self.assertEqual(q.slug, "what-is-your-quest")
def test_model_save_duplicate_slugs(self):
t = Topic.objects.create(name='t', slug='t')
q = Question.objects.create(
text = "What is your quest?",
answer = "I see the grail!",
topic = t
)
q2 = Question.objects.create(
text = "What is your quest?",
answer = "I see the grail!",
topic = t
)
self.assertEqual(q2.slug, 'what-is-your-quest-1')
def test_permalinks(self):
# Perhaps a bit overkill to test, but we missed it initially
t = Topic.objects.create(name='t', slug='t')
q = Question.objects.create(
text = "What is your quest?",
answer = "I see the grail!",
topic = t
)
t_url = t.get_absolute_url()
t_test_url = reverse('faq_topic_detail', args=[t.slug])
q_url = q.get_absolute_url()
q_test_url = reverse('faq_question_detail', args=[t.slug, q.slug])
self.assertEqual(t_url, t_test_url)
self.assertEqual(q_url, q_test_url)
|
import argparse
from onnxruntime.quantization import quantize_qat, QuantType
def parse_args():
parser = argparse.ArgumentParser(description='Evaluate metric of the '
'results saved in pkl format')
parser.add_argument('--model_path', type=str, default=None)
parser.add_argument('--quant_path', type=str, default=None)
args = parser.parse_args()
return args
def main():
args = parse_args()
quantize_qat(args.model_path, args.quant_path)
print("done")
if __name__ == '__main__':
main()
|
class Solution:
def fizzBuzz(self, n: 'int') -> 'List[str]':
store = []
for buzzer in range(1, n + 1):
result = ""
if buzzer % 3 == 0:
result += "Fizz"
if buzzer % 5 == 0:
result += "Buzz"
store.append(result or str(buzzer))
return store
|
import matplotlib.pyplot as plt
valor_apl=input("Qual o número que será aplicado?\n")
valor_apl=int(valor_apl)
valor_ger=0
valor=[]
valorb=[]
while valor_apl !=1:
if valor_ger==0:
valor.append(valor_apl)
valor_ger+=1
print("valores: ")
if valor_apl%2==0:
print (str(valor_apl))
valor_apl=(valor_apl/2)
valor.append(valor_apl)
else:
print (str(valor_apl))
valor_apl=valor_apl*3+1
valor.append(valor_apl)
valor_ger+=1
print("1.0")
print ("Chegou em 1 após "+str(valor_ger)+" gerações")
for x in range(valor_ger):
valorb.append(x)
plt.scatter(valorb,valor)
|
#!python
# -*- coding: UTF-8 -*-
import logging
from live_recorder import you_live
"""
File: recorder.py
Author: Doby2333
Date Created: 2021/01/24
Last Edited: 2021/01/24
Description: Does the recording thing.
Note: *** Deprecated ***
"""
class Recorder:
def __init__(self, room_id, save_dir='./download'):
"""
初始化Recorder,设定参数
:param room_id: 要录制的房间号
:param save_dir: 录制文件保存路径
"""
self.log = logging.getLogger("main")
self.room_id = int(room_id)
self.args = {
'save_folder': save_dir,
'flv_save_folder': None,
'delete_origin_file': True,
'check_flv': True,
'file_name_format': '{name}-{shortId} 的{liver}直播{startTime}-{endTime}',
'time_format': '%Y%m%d_%H-%M',
'cookies': None,
'debug': False
}
self.recorder = you_live.Recorder.createRecorder('bili', self.room_id, **self.args)
self.recorder.check_flv = True
self.download_thread = None
self.monitor_thread = None
def start(self):
self.log.info("开始录制直播间" + str(self.room_id) + "!")
self.download_thread = you_live.DownloadThread(self.recorder)
self.monitor_thread = you_live.MonitoringThread(self.recorder)
self.download_thread.start()
self.monitor_thread.start()
def stop(self):
self.log.info("结束录制" + str(self.room_id) + "!")
self.recorder.downloadFlag = False
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import pytest
from yagocd.resources import artifact, job, pipeline, property as prop, stage
class TestJobInstance(object):
PIPELINE_NAME = 'Consumer_Website'
PIPELINE_COUNTER = 31
STAGE_NAME = 'Commit'
STAGE_COUNTER = '1'
JOB_NAME = 'build'
URL = '{server}go/tab/build/detail/Consumer_Website/31/Commit/1/build'
@pytest.fixture()
def job_instance_from_pipeline(self, my_vcr, session_fixture):
with my_vcr.use_cassette("job/job_instance_from_pipeline"):
return pipeline.PipelineManager(session_fixture).find(self.PIPELINE_NAME).history()[0].stages()[0].jobs()[0]
@pytest.fixture()
def job_instance_from_stage(self, my_vcr, session_fixture):
with my_vcr.use_cassette("job/job_instance_from_stage"):
return stage.StageManager(session_fixture).get(
self.PIPELINE_NAME,
self.PIPELINE_COUNTER,
self.STAGE_NAME,
self.STAGE_COUNTER
).jobs()[0]
@pytest.fixture()
def job_instance_from_job_manager(self, my_vcr, session_fixture):
with my_vcr.use_cassette("job/job_instance_from_job_manager"):
return job.JobManager(session_fixture).history(self.PIPELINE_NAME, self.STAGE_NAME, self.JOB_NAME)[0]
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
job_instance_from_job_manager,
])
def test_pipeline_name(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.pipeline_name == self.PIPELINE_NAME, "Fixture: {}".format(job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
job_instance_from_job_manager,
])
def test_pipeline_counter(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.pipeline_counter == self.PIPELINE_COUNTER, "Fixture: {}".format(job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
job_instance_from_job_manager,
])
def test_stage_name(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.stage_name == self.STAGE_NAME, "Fixture: {}".format(job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
job_instance_from_job_manager,
])
def test_stage_counter(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert str(job_fixture.stage_counter) == self.STAGE_COUNTER, "Fixture: {}".format(job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
job_instance_from_job_manager,
])
def test_url(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.url == self.URL.format(server=job_fixture._session._options['server']), "Fixture: {}".format(
job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
])
def test_stage_provided(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.stage is not None, "Fixture: {}".format(job_fixture_func.__name__)
assert isinstance(job_fixture.stage, stage.StageInstance), "Fixture: {}".format(job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_job_manager,
])
def test_stage_is_empty(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.stage is None, "Fixture: {}".format(job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
job_instance_from_job_manager,
])
def test_artifact(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.artifacts is not None, "Fixture: {}".format(job_fixture_func.__name__)
assert isinstance(job_fixture.artifacts, artifact.ArtifactManager), "Fixture: {}".format(
job_fixture_func.__name__)
@pytest.mark.parametrize("job_fixture_func", [
job_instance_from_pipeline,
job_instance_from_stage,
job_instance_from_job_manager,
])
def test_property(self, job_fixture_func, my_vcr, session_fixture):
job_fixture = job_fixture_func(self, my_vcr, session_fixture)
assert job_fixture.properties is not None, "Fixture: {}".format(job_fixture_func.__name__)
assert isinstance(job_fixture.properties, prop.PropertyManager), "Fixture: {}".format(job_fixture_func.__name__)
|
"""Script that summarizes Skyfield test results."""
from pathlib import Path
import pandas as pd
RESULTS_DIR_PATH = Path(
'/Users/harold/Desktop/NFC/Data/Astronomy/Skyfield Test Results')
DIFF_COUNT_FILE_PATH = \
RESULTS_DIR_PATH / 'Twilight Event Difference Counts.csv'
UNMATCHED_EVENTS_FILE_PATH = RESULTS_DIR_PATH / 'Unmatched Twilight Events.csv'
DIFF_COLUMN_NAMES = ['-2 Diffs', '-1 Diffs', '0 Diffs', '1 Diffs', '2 Diffs']
def main():
df = pd.read_csv(DIFF_COUNT_FILE_PATH)
summarize_matched_events(df, 'all twilight')
grid_df = get_grid_data(df)
summarize_events(grid_df, 'lat/lon grid twilight')
ithaca_df = get_ithaca_data(df)
summarize_events(ithaca_df, 'Ithaca twilight')
mpg_ranch_df = get_mpg_ranch_data(df)
summarize_events(mpg_ranch_df, 'MPG Ranch twilight')
def get_grid_data(df):
"""
Prunes dataframe to rows whose longitudes are multiples of 60
degrees and whose years are multiples of 10. This includes only
lat/lon grid locations for which we have USNO data for all eight
types of twilight events.
"""
bools = (df['Longitude'] % 60 == 0) & (df['Year'] % 10 == 0)
return df[bools]
def get_ithaca_data(df):
bools = df['Latitude'] == 42.45
return df[bools]
def get_mpg_ranch_data(df):
bools = df['Latitude'] == 46.7
return df[bools]
def summarize_events(df, name):
summarize_matched_events(df, name)
summarize_unmatched_events(df, name)
def summarize_matched_events(df, name):
print(f'Summary of matches of {name} Skyfield and USNO events:')
print()
diff_counts = df[DIFF_COLUMN_NAMES]
counts_by_diff = diff_counts.sum(axis=0).to_numpy()
total_count = counts_by_diff.sum()
print(f'A total of {total_count} events matched.')
print()
print('Skyfield - USNO matched event counts by time difference:')
for count in range(-2, 3):
print(f' {count}: {counts_by_diff[count + 2]}')
print()
total_count = counts_by_diff.sum()
print(f'Difference counts as percentages of matched events:')
percentages = 100 * counts_by_diff / total_count
for count in range(-2, 3):
print(f' {count}: {percentages[count + 2]}')
print()
within_one_minute = percentages[1:4].sum()
print(
f'{within_one_minute} percent of matched events were within '
f'one minute of each other.')
print(f'{percentages[2]} percent of matched events were the same.')
one_minute = percentages[1] + percentages[3]
print(f'{one_minute} percent of matched events differed by one minute.')
print()
print()
print()
def summarize_unmatched_events(df, name):
matched_count = df[DIFF_COLUMN_NAMES].to_numpy().sum()
print(
f'Summary of unmatched {name} events (for comparison, '
f'{matched_count} events matched):')
print()
group_by = df.groupby(['Latitude', 'Longitude'], as_index=False)
usno_table = group_by['Unmatched USNO Events'].sum().pivot(
'Latitude', 'Longitude')
usno_count = usno_table.to_numpy().sum()
print(
f'A total of {usno_count} USNO events were not matched by '
f'Skyfield events.')
print()
print(usno_table)
print()
sf_table = group_by['Unmatched Skyfield Events'].sum().pivot(
'Latitude', 'Longitude')
sf_count = sf_table.to_numpy().sum()
print(
f'A total of {sf_count} Skyfield events were not matched by '
f'USNO events.')
print()
print(sf_table)
print()
print()
print()
if __name__ == '__main__':
main()
|
from statsmodels import NoseWrapper as Tester
test = Tester().test
from formulatools import handle_formula_data
|
import cv2
import os
import tqdm
import argparse
argparser = argparse.ArgumentParser(description='Help =)')
argparser.add_argument('-i', '--input', help='path to folder to flip')
argparser.add_argument('-o', '--output', help='path to folder to output')
def main(args):
inputFldr = args.input
outputFldr = args.output
if not os.path.exists(outputFldr):
os.mkdir(outputFldr)
print("Directory {} created".format(outputFldr))
else:
print("Directory {} already exists".format(outputFldr))
image_fpaths = []
image_fnames = []
if os.path.isdir(inputFldr):
for inp_file in os.listdir(inputFldr):
image_fpaths += [os.path.join(inputFldr, inp_file)]
image_fnames += [inp_file]
else:
print('--input must be set to directory')
for i, image_path in enumerate(image_fpaths):
img = cv2.imread(image_path)
img = cv2.flip(img, 1);
outputFname = os.path.join(outputFldr, image_fnames[i])
print(outputFname)
cv2.imwrite(outputFname, img)
if __name__ == '__main__':
args = argparser.parse_args()
main(args)
|
import actions
def test_start_failure():
assert True
|
import distutils.cmd
import glob
from setuptools import setup
import setuptools.command.build_ext
import shutil
import subprocess
class WafCommand(distutils.cmd.Command):
user_options=[]
def initialize_options(self):
pass
def finalize_options(self):
pass
class WafConfigureCommand(WafCommand):
def run(self):
subprocess.check_call(
["./waf", "--top=snn", "--out=build", "configure"])
class WafBuildCommand(WafCommand):
def run(self):
subprocess.check_call(
["./waf", "--top=snn", "--out=build", "build"])
for f in glob.glob("build/*.so"):
shutil.copy(f, ".")
class BuildExt(setuptools.command.build_ext.build_ext):
def run(self):
self.run_command("waf_configure")
self.run_command("waf_build")
setup(
name="snn",
cmdclass={
"waf_configure": WafConfigureCommand,
"waf_build": WafBuildCommand,
"build_ext": BuildExt,
},
)
|
from pathlib import Path
# Generic file class
class File:
def __init__(self, file_path: str):
"""
Constructor for File object
:type file_path: full path to file
"""
self.file_path = file_path
def exists(self):
"""
Chek if the file specified by file_path exists
:rtype: True if file exists. False otherwise
"""
file = Path(self.file_path)
if file.exists():
return True
return False
def read_file(self):
"""
Read the contents of a file, in binary.
:rtype: bytearray
"""
with open(self.file_path, 'rb') as f:
file_stream = f.read()
return file_stream
def write_file(self, file_stream):
"""
Write to a file, in binary
"""
with open(self.file_path, 'wb') as f:
f.write(file_stream)
class EncryptedFile(File):
def __init__(self, file: File):
"""
Constructor for EncryptedFile object
"""
super().__init__(file.file_path)
# Add by default ".enc" extension for the new encrypted file
self.file_path = file.file_path + ".enc"
class DecryptedFile(File):
def __init__(self, file: File):
"""
Constructor for DecryptedFile object
"""
super().__init__(file.file_path)
def update_file_name(self):
"""
Check if the file ends with ".enc" extension. Otherwise, we consider it invalid by design.
If the file does end with ".enc", the decrypted file will need to have this extension removed.
:rtype: False is file extension fails. True otherwise.
"""
if not self.file_path.endswith(".enc"):
return False
# Remove file extension
self.file_path = self.file_path[:-4]
return True
|
#! /usr/bin/env python
import multiprocessing, Queue
import time
from log_it import *
#if temporalBAStack is already imported from a higher level script, then
#this import is not needed
#from process_temporal_ba_stack import temporalBAStack
class parallelSceneWorker(multiprocessing.Process):
"""Runs the scene resampling in parallel for a stack of scenes.
"""
def __init__ (self, work_queue, result_queue, stackObject):
# base class initialization
multiprocessing.Process.__init__(self)
# job management stuff
self.work_queue = work_queue
self.result_queue = result_queue
self.stackObject = stackObject
self.kill_received = False
def run(self):
while not self.kill_received:
# get a task
try:
xml_file = self.work_queue.get_nowait()
except Queue.Empty:
break
# process the scene
msg = 'Processing %s ...' % xml_file
logIt (msg, self.stackObject.log_handler)
status = SUCCESS
status = self.stackObject.sceneResample (xml_file)
if status != SUCCESS:
msg = 'Error resampling the surface reflectance bands in ' \
'the XML file (%s). Processing will terminate.' % xml_file
logIt (msg, self.stackObject.log_handler)
# store the result
self.result_queue.put(status)
class parallelSummaryWorker(multiprocessing.Process):
"""Runs the seasonal summaries in parallel for a temporal stack.
"""
def __init__ (self, work_queue, result_queue, stackObject):
# base class initialization
multiprocessing.Process.__init__(self)
# job management stuff
self.work_queue = work_queue
self.result_queue = result_queue
self.stackObject = stackObject
self.kill_received = False
def run(self):
while not self.kill_received:
# get a task
try:
year = self.work_queue.get_nowait()
except Queue.Empty:
break
# process the scene
msg = 'Processing year %d ...' % year
logIt (msg, self.stackObject.log_handler)
status = SUCCESS
status = self.stackObject.generateYearSeasonalSummaries (year)
if status != SUCCESS:
msg = 'Error processing seasonal summaries for year %d. ' \
'Processing will terminate.' % year
logIt (msg, self.stackObject.log_handler)
# store the result
self.result_queue.put(status)
class parallelMaxWorker(multiprocessing.Process):
"""Runs the annual maximums in parallel for a temporal stack.
"""
def __init__ (self, work_queue, result_queue, stackObject):
# base class initialization
multiprocessing.Process.__init__(self)
# job management stuff
self.work_queue = work_queue
self.result_queue = result_queue
self.stackObject = stackObject
self.kill_received = False
def run(self):
while not self.kill_received:
# get a task
try:
year = self.work_queue.get_nowait()
except Queue.Empty:
break
# process the scene
msg = 'Processing year %d ...' % year
logIt (msg, self.stackObject.log_handler)
status = SUCCESS
status = self.stackObject.generateYearMaximums (year)
if status != SUCCESS:
msg = 'Error processing maximums for year %d. Processing ' \
'will terminate.' % year
logIt (msg, self.stackObject.log_handler)
# store the result
self.result_queue.put(status)
|
from rest_framework import serializers
from question.models import Question, QuestionOption
class QuestionOptionSerializer(serializers.ModelSerializer):
class Meta:
model = QuestionOption
fields = '__all__'
class QuestionSerializer(serializers.ModelSerializer):
option = serializers.StringRelatedField(many=True)
class Meta:
model = Question
fields = ('text', 'type_answer', 'option')
|
import numpy as np
argsorted = lambda x: sorted(range(len(x)), key=x.__getitem__, reverse=True)
def log_probabilities(preferences):
probabilities = np.exp(np.array(preferences))
probabilities = list(probabilities)
#print(sum(list(probabilities)))
return probabilities / sum(probabilities)
class Builder:
def germinate(self, seed):
pass
def build(self, branch):
pass
class Plucker:
def prune(self, branches):
pass
def pluck(self, branches):
pass
def stop(self, branches):
pass
def _select_branches(self, branches, selected_idx):
selected_idx = sorted(selected_idx, reverse=True)
for i in range(len(branches))[::-1]:
if i not in selected_idx:
branches.pop(i)
class TreeSolver:
def __init__(self, builder, plucker):
self.builder = builder
self.plucker = plucker
self.branches = None
def solve(self, seed):
while True:
self.branches = self.builder.germinate(seed) if self.branches is None else self.get_new_branches()
self.plucker.prune(self.branches)
if self.plucker.stop(self.branches):
break
self.plucker.pluck(self.branches)
solution = self.branches
self.branches = None
return solution
def get_new_branches(self):
new_branches = []
for branch in self.branches:
new_branches += self.builder.build(branch)
return new_branches
class BeamPlucker(Plucker):
def __init__(self, beam_size=1, max_length=1, leaf_test=lambda x: 0, to_probabilities=None):
self.beam_size = beam_size
self.max_length = max_length
self.leaf_test = leaf_test
self.to_probabilities = to_probabilities
def prune(self, branches):
self.__beam_branches(branches, self.beam_size)
def pluck(self, branches):
self.__beam_branches(branches)
def stop(self, branches):
length_selected_idx = [i for i in range(len(branches))
if branches[i].size() >= self.max_length]
leaf_selected_idx = [i for i in range(len(branches))
if self.leaf_test(branches[i].nodes[-1])]
selected_idx = list(set(length_selected_idx).union(leaf_selected_idx))
if len(selected_idx) == 0:
return False
self._select_branches(branches, selected_idx)
return True
def __beam_branches(self, branches, beam_size=1):
preferences = [branch.preference for branch in branches]
if self.to_probabilities is None:
selected_idx = argsorted(preferences)[:self.beam_size]
else:
#print(len(branches), sum(self.to_probabilities(preferences)))
selected_idx = list(np.random.choice(np.arange(len(branches)), beam_size, False, self.to_probabilities(preferences)))
#print(selected_idx)
self._select_branches(branches, selected_idx)
|
# -*- coding: utf-8 -*-
# Copyright 2020 PyPAL authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
from functools import partial
import pickle
import click
import joblib
import numpy as np
import pandas as pd
from lightgbm import LGBMRegressor
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from dispersant_screener.ga import FEATURES, predict_gpy_coregionalized, run_ga, regularizer_novelty, _get_average_dist
TIMESTR = time.strftime('%Y%m%d-%H%M%S')
DATADIR = '../data'
df_full_factorial_feat = pd.read_csv(os.path.join(DATADIR, 'new_features_full_random.csv'))[FEATURES].values
a2 = pd.read_csv(os.path.join(DATADIR, 'b1-b21_random_virial_large_new.csv'))['A2_normalized'].values
deltaGMax = pd.read_csv(os.path.join(DATADIR, 'b1-b21_random_virial_large_new.csv'))['A2_normalized'].values
gibbs = pd.read_csv(os.path.join(DATADIR, 'b1-b21_random_deltaG.csv'))['deltaGmin'].values
gibbs_max = pd.read_csv(os.path.join(DATADIR, 'b1-b21_random_virial_large_new.csv'))['deltaGmax'].values
force_max = pd.read_csv(os.path.join(DATADIR, 'b1-b21_random_virial_large_fit2.csv'))['F_repel_max'].values
rg = pd.read_csv(os.path.join(DATADIR, 'rg_results.csv'))['Rg'].values
y = np.hstack([
rg.reshape(-1, 1),
gibbs.reshape(-1, 1) * (-1),
gibbs_max.reshape(-1, 1),
])
assert len(df_full_factorial_feat) == len(a2) == len(gibbs) == len(y)
with open('sweeps3/20201021-235927_dispersant_0.01_0.05_0.05_60-models.pkl', 'rb') as fh:
coregionalized_model = pickle.load(fh)[0]
feat_scaler = StandardScaler()
X = feat_scaler.fit_transform(df_full_factorial_feat)
@click.command('cli')
@click.argument('target', type=int, default=0)
@click.argument('runs', type=int, default=10)
@click.argument('outdir', type=click.Path(), default='.')
@click.option('--all', is_flag=True)
def main(target, runs, outdir, all):
if all:
targets = [0, 1, 2]
else:
targets = [target]
for target in targets:
y_selected = y[:, target]
predict_partial = partial(predict_gpy_coregionalized, model=coregionalized_model, i=target)
regularizer_novelty_partial = partial(regularizer_novelty,
y=y_selected,
X_data=X,
average_dist=_get_average_dist(X))
gas = []
for novelty_penalty_ratio in [0, 0.1, 0.2, 0.5, 1, 2]:
for _ in range(runs):
gas.append(
run_ga( # pylint:disable=invalid-name
predict_partial,
regularizer_novelty_partial,
features=FEATURES,
y_mean=np.median(y_selected),
novelty_pentaly_ratio=novelty_penalty_ratio))
if not os.path.exists(outdir):
os.mkdir(outdir)
joblib.dump(gas, os.path.join(outdir, TIMESTR + '-ga_{}.joblib'.format(target)))
if __name__ == '__main__':
main()
|
from Apartamento import Apartamento
from Torre import Torre
from ListaApartamento import ListaApartamento
from Fila import Fila
class main:
t1 = Torre()
t1.cadastrar(1, "A", "Acesso 123")
t2 = Torre()
t2.cadastrar(2, "B", "Acesso 456")
t3 = Torre()
t3.cadastrar(3, "C", "Acesso 789")
print("------ TORRE ------")
t1.imprimir()
t2.imprimir()
t3.imprimir()
ap1 = Apartamento()
ap1.cadastrar(1, 304, t1)
ap2 = Apartamento()
ap2.cadastrar(2, 305, t1)
ap3 = Apartamento()
ap3.cadastrar(3, 108, t2)
ap4 = Apartamento()
ap4.cadastrar(4, 107, t2)
ap5 = Apartamento()
ap5.cadastrar(5, 206, t3)
print("\n----- APTO -----")
ap1.imprimir()
ap2.imprimir()
ap3.imprimir()
ap4.imprimir()
ap5.imprimir()
#listaApto = ListaApartamento()
#print("\n----- LISTA DE APTO -----")
#listaApto.imprimirListaApto()
#listaApto.adicionarListaApto(ap1)
#listaApto.adicionarListaApto(ap2)
#listaApto.adicionarListaApto(ap3)
#listaApto.adicionarListaApto(ap4)
#listaApto.adicionarListaApto(ap5)
#listaApto.imprimirListaApto()
fila = Fila()
print("\n----- FILA DE VAGA DE ESTACIONAMENTO -----")
fila.removerAptoFilaEspera("E76")
print("\n----- APTO ADD NA FILA DE ESPERA -----")
fila.adicionarAptoFilaEspera(ap1)
fila.adicionarAptoFilaEspera(ap2)
fila.adicionarAptoFilaEspera(ap3)
fila.adicionarAptoFilaEspera(ap4)
fila.adicionarAptoFilaEspera(ap5)
fila.imprimirFilaEspera()
print("\n----- ATUAL ESTADO DOS APTOS -----")
ap1.imprimir()
ap2.imprimir()
ap3.imprimir()
ap4.imprimir()
ap5.imprimir()
print("\n----- REMOVER APTO FILA DE ESPERA -----")
fila.removerAptoFilaEspera("A12")
fila.removerAptoFilaEspera("F56")
fila.imprimirFilaEspera()
print("\n----- ATUAL ESTADO DOS APTOS -----")
ap1.imprimir()
ap2.imprimir()
ap3.imprimir()
ap4.imprimir()
ap5.imprimir()
|
from os.path import dirname, join
from my_lib.loader.specific.json_loader import load_json
from my_lib.loader.specific.yaml_loader import load_yaml
from my_lib.loader.txt_loader import load_text
def display_content(file_name: str):
if file_name.endswith('.txt'):
print('Text file:')
txt_content = load_text(file_name)
print(txt_content)
elif file_name.endswith('.json'):
print('Json file:')
json_content = load_json(file_name)
print(json_content)
elif file_name.endswith('.yml'):
print('Json file:')
yaml_content = load_yaml(file_name)
print(yaml_content)
if __name__ == '__main__':
current_path = dirname(__file__)
file_1 = join(current_path, '..', 'data', 'file.txt')
display_content(file_1)
file_2 = join(current_path, '..', 'data', 'file.specific')
display_content(file_2)
|
from matplotlib import pyplot as plt, animation
from random import randint
fig: plt.Figure = plt.figure()
axis: plt.Axes = fig.add_subplot()
data = [randint(1, 10) for _ in range(100)]
def animate(i):
global data
data = [it + randint(-5, 0) for it in data]
axis.clear()
for i, v in enumerate(data):
axis.bar([i], [v])
anim = animation.FuncAnimation(fig, animate, interval=300)
plt.show()
|
import logging
DEFAULT_PARTITIONS = 1
logging.basicConfig(level=logging.INFO, format="%(message)s")
__version__ = "0.0.1"
from .pyroar import Pyroar
from .manipulator import Manipulator
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.server.flask.request_processing.middleware import url_normalize
from keystone.tests import unit
class FakeApp(object):
"""Fakes a WSGI app URL normalized."""
def __init__(self):
self.env = {}
def __call__(self, env, start_response):
self.env = env
return
class UrlMiddlewareTest(unit.TestCase):
def setUp(self):
super(UrlMiddlewareTest, self).setUp()
self.fake_app = FakeApp()
self.middleware = url_normalize.URLNormalizingMiddleware(self.fake_app)
def test_trailing_slash_normalization(self):
"""Test /v3/auth/tokens & /v3/auth/tokens/ normalized URLs match."""
expected = '/v3/auth/tokens'
no_slash = {'PATH_INFO': expected}
with_slash = {'PATH_INFO': '/v3/auth/tokens/'}
with_many_slash = {'PATH_INFO': '/v3/auth/tokens////'}
# Run with a URL that doesn't need stripping and ensure nothing else is
# added to the environ
self.middleware(no_slash, None)
self.assertEqual(expected, self.fake_app.env['PATH_INFO'])
self.assertEqual(1, len(self.fake_app.env.keys()))
# Run with a URL that needs a single slash stripped and nothing else is
# added to the environ
self.middleware(with_slash, None)
self.assertEqual(expected, self.fake_app.env['PATH_INFO'])
self.assertEqual(1, len(self.fake_app.env.keys()))
# Run with a URL that needs multiple slashes stripped and ensure
# nothing else is added to the environ
self.middleware(with_many_slash, None)
self.assertEqual(expected, self.fake_app.env['PATH_INFO'])
self.assertEqual(1, len(self.fake_app.env.keys()))
def test_rewrite_empty_path(self):
"""Test empty path is rewritten to root."""
environ = {'PATH_INFO': ''}
self.middleware(environ, None)
self.assertEqual('/', self.fake_app.env['PATH_INFO'])
self.assertEqual(1, len(self.fake_app.env.keys()))
|
import re
from collections import defaultdict
import textwrap
import copy
###Constants###
LABEL_SECTION = 'Label: '
PIC_TITLE = 'Detected labels for '
END_LABEL = '----------'
###Variables###
maxLevel = 0
oldest = ""
delete_list = defaultdict()
def LoadData(impLabels,text):
print(impLabels)
labels=defaultdict()
textExtracted=" "
for label in impLabels['Labels']:
parents=[]
instances=[]
if(len(label['Parents'])>0):
for parent in label['Parents']:
parents.append(parent['Name'])
if(len(label['Instances'])>0):
for instance in label['Instances']:
instances.append(instance)
labels[label['Name']]={'Confidence': label['Confidence'], 'Parents': parents, 'Instances': label['Instances']}
print(text)
for reading in text['TextDetections']:
if('ParentId' not in reading): # i can piggy back on the label??
textExtracted+=reading["DetectedText"] + " "
textExtracted=textExtracted[1:-1]
return labels, textExtracted
def oldestAncestor(labels, label, level):
global oldest
global maxLevel
level += 1
print(level)
print("looking at " + label)
parents = labels[label]['Parents']
if(labels[label]['Confidence']>90):
if (level > maxLevel):
oldest = label
print('deepest so far is :' + label)
maxLevel = level
for parent in parents:
delete_list[parent]=""
oldestAncestor(labels, parent, level)
def location(labels, label):
location =[]
for instance in labels[label]['Instances']:
if("BoundingBox" in instance):
theBox = defaultdict()
theBox["left"] = instance["BoundingBox"]["Left"]
theBox["top"] = instance["BoundingBox"]["Top"]
theBox["right"] = instance["BoundingBox"]["Left"] + instance["BoundingBox"]["Width"]
theBox["bottom"] = instance["BoundingBox"]["Top"] + instance["BoundingBox"]["Height"]
if(theBox["left"]<.33):
if(theBox["top"]<.33):
if(theBox["bottom"]<.33):
if(theBox["right"]<.33):
location.append("near the top left corner")
elif(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("along the top left")
elif(theBox["right"]>=.66):
location.append("across the top")
if(theBox["bottom"]>=.33 and theBox["bottom"]<.66):
if(theBox["right"]<.33):
location.append("near the upper left hand side")
elif(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the center left area")
elif(theBox["right"]>=.66):
location.append("across the upper middle")
if(theBox["bottom"]>=.66):
if(theBox["right"]<.33):
location.append("along the left side")
elif(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the left side")
elif(theBox["right"]>=.66):
location.append("across the whole image")
if(theBox["top"]>=.33 and theBox["top"]<.66):
if(theBox["bottom"]>=.33 and theBox["bottom"]<.66):
if(theBox["right"]<.33):
location.append("near the very middle of the left")
elif(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("through the middle left area")
elif(theBox["right"]>=.66):
location.append("horizontally across the middle")
if(theBox["bottom"]>=.66):
if(theBox["right"]<.33):
location.append("along the left side")
elif(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the lower left side")
elif(theBox["right"]>=.66):
location.append("across the lower middle")
if(theBox["top"]>=.66):
if(theBox["bottom"]>=.66):
if(theBox["right"]<.33):
location.append("near the lower left corner")
elif(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the lower left")
elif(theBox["right"]>=.66):
location.append("across the bottom")
elif(theBox["left"]>=.33 and theBox["left"]<.66):
if(theBox["top"]<.33):
if(theBox["bottom"]<.33):
if(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the top middle")
elif(theBox["right"]>=.66):
location.append("near the top right side")
if(theBox["bottom"]>=.33 and theBox["bottom"]<.66):
if(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the upper center")
elif(theBox["right"]>=.66):
location.append("near the upper right area")
if(theBox["bottom"]>=.66):
if(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("vertically across the middle")
elif(theBox["right"]>=.66):
location.append("near the right side")
if(theBox["top"]>=.33 and theBox["top"]<.66):
if(theBox["bottom"]>=.33 and theBox["bottom"]<.66):
if(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the very middle")
elif(theBox["right"]>=.66):
location.append("near the middle right")
if(theBox["bottom"]>=.66):
if(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the bottom middle")
elif(theBox["right"]>=.66):
location.append("near the middle right area")
if(theBox["top"]>=.66):
if(theBox["bottom"]>=.66):
if(theBox["right"]>=.33 and theBox["right"]<.66):
location.append("near the bottom center")
elif(theBox["right"]>=.66):
location.append("along center right")
elif(theBox["left"]>=.66):
if(theBox["top"]<.33):
if(theBox["bottom"]<.33):
if(theBox["right"]>=.66):
location.append("near the top right corner")
if(theBox["bottom"]>=.33 and theBox["bottom"]<.66):
if(theBox["right"]>=.66):
location.append("near the top right")
if(theBox["bottom"]>=.66):
if(theBox["right"]>=.66):
location.append("along the right side")
if(theBox["top"]>=.33 and theBox["top"]<.66):
if(theBox["bottom"]>=.33 and theBox["bottom"]<.66):
if(theBox["right"]>=.66):
location.append("near the middle of the right side")
if(theBox["bottom"]>=.66):
if(theBox["right"]>=.66):
location.append("near the lower right")
if(theBox["top"]>=.66):
if(theBox["bottom"]>=.66):
if(theBox["right"]>=.66):
location.append("near the bottom right corner")
i= labels[label]['Instances'].index(instance)
if i > len(location)-1:
print("BAD BOX: ")
print(theBox)
else:
print("GOOD BOX: " + location[i])
print(theBox)
return location
def theCollapse(labels, label) :
print("The quest for: " + label)
global oldest
oldest = ""
level = 0
global maxLevel
maxLevel = -1
if(labels[label]['Confidence']>90):
oldestAncestor(labels, label, level)
else:
delete_list[label]=""
return oldest
def GenerateSummary(labels,textExtracted):
summary=""
# collapse into parent
global delete_list
delete_list = defaultdict()
ref_labels = copy.deepcopy(labels)
for label in ref_labels:
collapsed = theCollapse(ref_labels, label)
print(label + " <---- " + collapsed)
if(len(collapsed)>0):
labels[label]['Parents']=[collapsed]
instances = ref_labels[label]['Instances']
current_big = len(instances)
for parent in ref_labels[label]['Parents']:
if(len(ref_labels[parent]['Instances'])>current_big):
instances=[]
for instance in ref_labels[parent]['Instances']:
instances.append(instance)
labels[label]["Instances"]=instances
for label in delete_list:
print('DELETING: ' + label)
del labels[label]
pretty_parents = defaultdict()
for label in labels:
if(len(labels[label]['Parents'])>0):
if(labels[label]['Parents'][0] != label):
pretty_parents[labels[label]['Parents'][0]]={'Children': []}
pretty_parents[labels[label]['Parents'][0]]['Instances']=[] #labels[label]['Instances']]
pretty_loners = defaultdict()
for label in labels:
if(len(labels[label]['Parents'])>0):
if(labels[label]['Parents'][0] != label):
pretty_parents[labels[label]['Parents'][0]]['Children'].append(label)
for instance in labels[label]['Instances']:
pretty_parents[labels[label]['Parents'][0]]['Instances'].append(instance)
else:
pretty_loners[label]={'Confidence': labels[label]['Confidence'], 'Instances': labels[label]['Instances']}
else:
pretty_loners[label]={'Confidence': labels[label]['Confidence'], 'Instances': labels[label]['Instances']}
print(labels)
print(pretty_parents)
print(pretty_loners)
for label in pretty_parents:
print("pretty parent: " + label)
ch = label[0]
if(ch == 'a' or ch == 'e' or ch == 'i' or ch == 'o' or ch == 'u' or ch == 'A' or ch == 'E' or ch == 'I' or ch == 'O' or ch == 'U'):
prefix='is an'
else:
prefix='is a'
suffix = ''
locs = ""
if(len(pretty_parents[label]['Instances'])>1):
loc = location(pretty_parents,label)
if(len(loc)>0):
if(len(set(loc))>4):
locs="They are located throughout. "
else:
locs="Their locations: "
pretty_loc = []
for it in set(loc):
pretty_loc.append(str(loc.count(it)) + ' ' + it)
locs+= ', '.join(pretty_loc)+'. '
prefix = 'are ' + str(len(loc))
suffix = 's'
kids=' or '.join(pretty_parents[label]['Children'])
summary+= "There "+ prefix +" " + label+ suffix+" in the image. "+ locs
if(len(pretty_parents[label]['Children'])>0):
summary+="Some description of the " + label + suffix+": " + kids+suffix+". "
loner_list=[]
for label in pretty_loners:
print("pretty loner: " + label)
ch = label[0]
if(ch == 'a' or ch == 'e' or ch == 'i' or ch == 'o' or ch == 'u' or ch == 'A' or ch == 'E' or ch == 'I' or ch == 'O' or ch == 'U'):
prefix='an'
else:
prefix='a'
suffix = ''
locs = ""
if(len(pretty_loners[label]['Instances'])>1):
loc = location(pretty_loners,label)
if(len(loc)>0):
if(len(set(loc))>4):
locs=" (located throughout"
else:
locs=" (their locations: "
pretty_loc = []
for it in set(loc):
pretty_loc.append(str(loc.count(it)) + ' ' + it)
locs+= ', '.join(pretty_loc)
locs+=')'
prefix = str(len(pretty_loners[label]['Instances']))
suffix = 's'
loner_list.append(prefix +" " + label+ suffix+ locs)
if(len(pretty_loners)>0):
if(len(loner_list)>1):
loners = 'Some other things we saw: ' + ', '.join(loner_list[:-1]) + ', and '+ loner_list[-1] + ". "
else:
loners = 'Another thing we saw: ' + loner_list[0] + ". "
summary+=loners
if textExtracted:
text_str = ', '.join(textExtracted.split('\n'))
summary += 'The image has text, which says, ' + text_str + '.'
# sort into locations
# location_stuff(labels)
# append
return summary
def PrettyPrint(summary):
pretty = '\n\n\n\nA Summary for the Image :\n'
w = 50
pretty+=textwrap.fill(summary,w)
print(pretty)
def Run(impLabels,text):
lab, ext = LoadData(impLabels,text)
summary = GenerateSummary(lab,ext)
PrettyPrint(summary)
return summary
|
from faked_nfvo.api.validation import parameter_types
"([0-9]{1,3}\.){3}[0-9]{1,3}"
uri_type = {'type': 'string',
'pattern': '^(https://)([0-9]{1,3}\.){3}[0-9]{1,3}:[0-9]{1,5}'
'/v1/vimPm/files/(.*)(\.gz)$'}
vim_src_type = {
"type": "string",
"enum": ["vpim", "vim"],
}
put_pm = {
'type': 'object',
'properties': {
"Version": parameter_types.vim_pim_version,
"VimId": parameter_types.len_constraint_string(1, 36),
"SrcType": vim_src_type,
"MsgType": {'type': 'string',
'enum': ['vimPmMetrics']},
"FileUri": uri_type,
},
"required": ["Version", "VimId", "SrcType", "MsgType", "FileUri"],
'additionalProperties': False
}
list_history_metrics_response = {
'status_code': [200],
'response_body':{
'type': 'object',
'properties': {
"Version": parameter_types.vim_pim_version,
"VimId": parameter_types.len_constraint_string(1, 36),
"SrcType": vim_src_type,
"MsgType": {'type': 'string',
'enum': ['vimPmHisMetrics']},
"FileUri": {'type': 'array',
'items': uri_type},
},
"required": ["Version", "VimId", "SrcType", "MsgType", "FileUri"],
'additionalProperties': False
}
}
|
from fastbay.fastbay import fbay
|
"""terraform_to_ansible/__init__.py"""
|
from common import *
import params
import util
import parrun
import weightsave
import net_mitral_centric as nmc
def build_complete_model(connection_file):
# distribute
nmc.build_net_round_robin(getmodel(), connection_file)
import distribute
import multisplit_distrib
multisplit_distrib.multisplit_distrib(distribute.getmodel())
# set initial weights
if len(params.initial_weights) > 0:
weightsave.weight_load(params.initial_weights)
# print sections
nc = h.List("NetCon")
nc = int(pc.allreduce(nc.count(),1))
if rank == 0: print "NetCon count = ", nc
nseg = 0
for sec in h.allsec():
nseg += sec.nseg
nseg = int(pc.allreduce(nseg, 1))
if rank == 0: print "Total # compartments = ", nseg
util.show_progress(200)
import custom_params
if custom_params.enableOdorInput:
from odorstim import OdorSequence
odseq = OdorSequence(params.odor_sequence)
if rank == 0: print 'total setup time ', h.startsw()-startsw
h("proc setdt(){}")
h.dt = 1./64. + 1./128.
def run():
parrun.prun(params.tstop)
weightsave.weight_file(params.filename + '.weight.dat')
util.finish()
|
"""Copyright 2021 Province of British Columbia.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pay_api.models import FeeSchedule, db
from .secured_view import SecuredView
class FeeScheduleConfig(SecuredView):
"""Fee Schedule config."""
column_list = ['corp_type_code', 'filing_type_code', 'fee', 'future_effective_fee', 'priority_fee', 'service_fee']
column_labels = {
'corp_type': 'Corp Type',
'corp_type_code': 'Corp Type',
'filing_type': 'Filing Type',
'filing_type_code': 'Filing Type',
'fee': 'Filing Fee',
'fee_start_date': 'Fee effective start date',
'fee_end_date': 'Fee End Date',
'future_effective_fee': 'Future Effective Fee',
'priority_fee': 'Priority Fee',
'service_fee': 'Service Fee',
'distribution_codes': 'Distribution Code'
}
column_searchable_list = ('corp_type_code', 'filing_type_code')
column_sortable_list = ('corp_type_code',)
column_default_sort = 'corp_type_code'
form_args = {}
form_columns = ['corp_type', 'filing_type', 'fee', 'fee_start_date',
'fee_end_date', 'future_effective_fee', 'priority_fee', 'service_fee',
'distribution_codes']
edit_columns = ['corp_type', 'filing_type', 'fee_start_date',
'fee_end_date', 'priority_fee', 'service_fee',
'distribution_codes']
@staticmethod
def _change_labels(form):
form.fee.label.text = "Filing Fee (Starts with 'EN')"
form.future_effective_fee.label.text = "Future Effective Fee (Starts with 'FUT')"
form.priority_fee.label.text = "Priority Fee (Starts with 'PRI')"
form.service_fee.label.text = "Service Fee (Starts with 'TRF')"
form.distribution_codes.label.text = 'Distribution Code (Mandatory for non-zero fees)'
def edit_form(self, obj=None):
"""Edit form overrides."""
form = super().edit_form(obj)
self._change_labels(form)
return form
def create_form(self, obj=None):
"""Create form overrides."""
form = super().create_form(obj)
self._change_labels(form)
return form
# If this view is going to be displayed for only special roles, do like below
FeeScheduleView = FeeScheduleConfig(FeeSchedule, db.session)
|
# -*- coding: utf-8 -*-
from collections import OrderedDict
import six
from django.contrib.admin.filters import (BooleanFieldListFilter, ChoicesFieldListFilter, DateFieldListFilter,
FieldListFilter)
from django.contrib.admin.views import main
from django.db.models import BooleanField, DateField
from ..exceptions import QueryablePropertyError
from ..managers import QueryablePropertiesQuerySetMixin
from ..properties import MappingProperty
from ..utils.internal import get_output_field, QueryPath, resolve_queryable_property
class QueryablePropertyField(object):
"""
Wrapper class for queryable property that offers an attribute interface
similar to Django fields. This allows to reuse Django's existing list
filter implementations for queryable properties by providing objects of
this class to list filters.
"""
def __init__(self, model_admin, query_path):
"""
Initialize a new property-to-field interface wrapper for the queryable
property with the given path.
:param model_admin: The admin instance for which a filter based on the
given queryable property should be created.
:type model_admin: django.contrib.admin.options.BaseModelAdmin
:param QueryPath query_path: The query path to the queryable property.
"""
property_ref, lookups = resolve_queryable_property(model_admin.model, query_path)
if not property_ref or lookups:
raise QueryablePropertyError('The query path must point to a valid queryable property and may not contain'
'lookups/transforms.')
self.output_field = get_output_field(property_ref.get_annotation())
self.model_admin = model_admin
self.property = property_ref.property
self.property_ref = property_ref
self.property_path = query_path
self.null = self.output_field is None or self.output_field.null
self.empty_strings_allowed = self.output_field is None or self.output_field.empty_strings_allowed
def __getattr__(self, item):
# Pass through attribute accesses to the associated queryable property.
# This allows to access properties common to both queryable properties
# and model fields (e.g. name, verbose_name) without having to copy
# their values explicitly.
return getattr(self.property, item)
@property
def empty_value_display(self):
"""
Get the value to display in the admin in place of empty values.
:return: The value to display for empty internal values.
:rtype: str
"""
return getattr(main, 'EMPTY_CHANGELIST_VALUE', None) or self.model_admin.get_empty_value_display()
@property
def flatchoices(self):
"""
Build the list filter choices for the associated queryable property
as 2-tuples.
This is an attribute expected by certain list filter classes and is
used for any queryable property that doesn't map to a specialized
filter class, which is why the :class:`ChoicesFieldListFilter` is used
as the last resort when determining filter classes.
:return: The filter choices as 2-tuples containing the internal value
as the first and the display value as the second item.
:rtype: (object, object)
"""
if isinstance(self.property, MappingProperty):
options = OrderedDict((to_value, to_value) for from_value, to_value in self.property.mappings)
options.setdefault(self.property.default, self.empty_value_display)
for value, label in six.iteritems(options):
yield value, label
elif not isinstance(self.output_field, BooleanField):
name = six.text_type(QueryPath(('', 'value')))
queryset = QueryablePropertiesQuerySetMixin.inject_into_object(
self.property_ref.model._default_manager.all())
queryset = queryset.annotate(**{name: self.property_ref.get_annotation()}).order_by(name).distinct()
for value in queryset.values_list(name, flat=True):
yield value, six.text_type(value) if value is not None else self.empty_value_display
def get_filter_creator(self, list_filter_class=None):
"""
Create a callable that can be used to create a list filter object based
on this property.
:param list_filter_class: The list filter class to use. If not given,
a suitable list filter class will be
determined for the associated queryable
property.
:return: A callable to create a list filter object.
:rtype: collections.Callable
"""
list_filter_class = list_filter_class or QueryablePropertyListFilter.get_class(self)
def creator(request, params, model, model_admin):
return list_filter_class(self, request, params, model, model_admin, six.text_type(self.property_path))
return creator
class QueryablePropertyListFilter(FieldListFilter):
"""
A base list filter class for queryable properties that allows to re-use
Django's filter class registration for queryable properties.
"""
_field_list_filters = []
_take_priority_index = 0
@classmethod
def get_class(cls, field):
"""
Determine a suitable list filter class for the given wrapped queryable
property based on the registered filter classes.
:param QueryablePropertyField field: The wrapped queryable property.
:return: An appropriate list filter class.
"""
for test, list_filter_class in cls._field_list_filters:
if test(field):
return list_filter_class
QueryablePropertyListFilter.register(lambda field: isinstance(field.output_field, BooleanField), BooleanFieldListFilter)
QueryablePropertyListFilter.register(lambda field: isinstance(field.property, MappingProperty), ChoicesFieldListFilter)
QueryablePropertyListFilter.register(lambda field: isinstance(field.output_field, DateField), DateFieldListFilter)
# Use the ChoicesFieldListFilter as the last resort since the general
# implementation of "list all possible values" is implemented in
# `QueryablePropertyField.flatchoices`.
# Django's last resort AllValuesFieldListFilter cannot be used as it performs
# a hardcoded query on its own, which wouldn't work with a queryable property.
QueryablePropertyListFilter.register(lambda field: True, ChoicesFieldListFilter)
|
fo = open("scripts_rnns_perfection.sh", "w")
for seed in range(100):
for model in ["gru", "lstm"]:
jobname = "perfection_" + model + "_trainsize_10000_dataset_length5_ninn_withholding_bs_10_patience_5_hidden_296_257_layers_2_seed_" + str(seed)
if model == "gru":
fo.write("time python main.py --model gru --dataset_prefix length5_ninn_withholding --batch_size 10 --eval_every 1000 --patience 5 --hidden 296 --vocab_size 10 --n_layers 2 --random_seed " + str(seed) + " --model_name " + jobname + " > logs/" + jobname + ".results" + "\n")
else:
fo.write("time python main.py --model lstm --dataset_prefix length5_ninn_withholding --batch_size 10 --eval_every 1000 --patience 5 --hidden 257 --vocab_size 10 --n_layers 2 --random_seed " + str(seed) + " --model_name " + jobname + " > logs/" + jobname + ".results" + "\n")
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from peewee import ForeignKeyField
from playhouse.postgres_ext import BinaryJSONField
from .base import BaseModel
from .round import Round
from .concerns.round_related_model import RoundRelatedModel
"""Feedback model"""
class Feedback(BaseModel, RoundRelatedModel):
"""Feedback model"""
round = ForeignKeyField(Round, related_name='feedbacks')
polls = BinaryJSONField()
cbs = BinaryJSONField()
povs = BinaryJSONField()
|
from settings import *
import torch.nn as nn
class Discriminator(nn.Module):
def __init__(self, ngpu):
super(Discriminator, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
# input is (nc) x 64 x 64
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf) x 32 x 32
nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 2),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*2) x 16 x 16
nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 4),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*4) x 8 x 8
nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 8),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*8) x 4 x 4
nn.Conv2d(ndf * 8, 1, 4, 1, 0, bias=False),
nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
|
import math
import unittest
import numpy as np
import fastestimator as fe
from fastestimator.architecture.pytorch import LeNet as LeNet_torch
from fastestimator.architecture.tensorflow import LeNet as LeNet_tf
from fastestimator.backend import get_lr
from fastestimator.dataset.data import mnist
from fastestimator.op.numpyop.univariate import ExpandDims, Minmax
from fastestimator.op.tensorop.loss import CrossEntropy
from fastestimator.op.tensorop.model import ModelOp, UpdateOp
from fastestimator.schedule import ARC
from fastestimator.trace.adapt import LRScheduler
class TestLRScheduler(unittest.TestCase):
@staticmethod
def create_estimator_for_arc(model, use_eval, axis):
train_data, eval_data = mnist.load_data()
pipeline = fe.Pipeline(train_data=train_data,
eval_data=eval_data if use_eval else None,
batch_size=8,
ops=[ExpandDims(inputs="x", outputs="x", axis=axis), Minmax(inputs="x", outputs="x")])
network = fe.Network(ops=[
ModelOp(model=model, inputs="x", outputs="y_pred"),
CrossEntropy(inputs=("y_pred", "y"), outputs="ce"),
UpdateOp(model=model, loss_name="ce")
])
estimator = fe.Estimator(pipeline=pipeline,
network=network,
epochs=2,
traces=LRScheduler(model=model, lr_fn=ARC(1)),
train_steps_per_epoch=10)
return estimator
def test_tf_model_arc_train_eval(self):
model_tf = fe.build(model_fn=LeNet_tf, optimizer_fn="adam")
lr_before = get_lr(model=model_tf)
estimator = self.create_estimator_for_arc(model_tf, use_eval=True, axis=-1)
estimator.fit()
lr_after = get_lr(model=model_tf)
lr_ratio = lr_after / lr_before
increased = math.isclose(lr_ratio, 1.618, rel_tol=1e-5)
constant = math.isclose(lr_ratio, 1.0, rel_tol=1e-5)
decreased = math.isclose(lr_ratio, 0.618, rel_tol=1e-5)
self.assertTrue(increased or constant or decreased)
def test_tf_model_arc_train_only(self):
model_tf = fe.build(model_fn=LeNet_tf, optimizer_fn="adam")
lr_before = get_lr(model=model_tf)
estimator = self.create_estimator_for_arc(model_tf, use_eval=False, axis=-1)
estimator.fit()
lr_after = get_lr(model=model_tf)
lr_ratio = np.round(lr_after / lr_before, 3)
increased = math.isclose(lr_ratio, 1.618, rel_tol=1e-5)
constant = math.isclose(lr_ratio, 1.0, rel_tol=1e-5)
decreased = math.isclose(lr_ratio, 0.618, rel_tol=1e-5)
self.assertTrue(increased or constant or decreased)
def test_torch_model_arc_train_eval(self):
model_tf = fe.build(model_fn=LeNet_torch, optimizer_fn="adam")
lr_before = get_lr(model=model_tf)
estimator = self.create_estimator_for_arc(model_tf, use_eval=True, axis=0)
estimator.fit()
lr_after = get_lr(model=model_tf)
lr_ratio = lr_after / lr_before
increased = math.isclose(lr_ratio, 1.618, rel_tol=1e-5)
constant = math.isclose(lr_ratio, 1.0, rel_tol=1e-5)
decreased = math.isclose(lr_ratio, 0.618, rel_tol=1e-5)
self.assertTrue(increased or constant or decreased)
def test_torch_model_arc_train_only(self):
model_tf = fe.build(model_fn=LeNet_torch, optimizer_fn="adam")
lr_before = get_lr(model=model_tf)
estimator = self.create_estimator_for_arc(model_tf, use_eval=False, axis=0)
estimator.fit()
lr_after = get_lr(model=model_tf)
lr_ratio = lr_after / lr_before
increased = math.isclose(lr_ratio, 1.618, rel_tol=1e-5)
constant = math.isclose(lr_ratio, 1.0, rel_tol=1e-5)
decreased = math.isclose(lr_ratio, 0.618, rel_tol=1e-5)
self.assertTrue(increased or constant or decreased)
|
"""empty message
Revision ID: 4887d7d44d6
Revises: None
Create Date: 2015-10-09 02:53:30.559553
"""
# revision identifiers, used by Alembic.
revision = '4887d7d44d6'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('request',
sa.Column('id', sa.String(length=80), nullable=False),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('generation_date', sa.Date(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('request')
### end Alembic commands ###
|
import datetime
import logging
import pandas as pd
import psycopg2
from psycopg2._psycopg import IntegrityError
from selenium.common.exceptions import StaleElementReferenceException
from database_operations import execute_sql_postgres
from live_betting.bookmaker import Bookmaker
def get_tournament_id(book_id: int, tournament_bookmaker_id: str, tournament_bookmaker_year_id: str) -> tuple:
query = "SELECT tournament_id FROM tournament_bookmaker WHERE bookmaker_id=%s AND tournament_bookmaker_id=%s AND \
tournament_bookmaker_extra_id=%s"
return execute_sql_postgres(query, [book_id, tournament_bookmaker_id, tournament_bookmaker_year_id])
def save_tournament(params: list) -> tuple:
query = f"INSERT INTO tournament (name, sex, type, surface, year) VALUES ({', '.join(['%s'] * 5)}) RETURNING id"
return execute_sql_postgres(query, params, True)
def save_match(params: list) -> tuple:
query = f"INSERT INTO matches (home, away, start_time_utc, tournament_id) VALUES ({', '.join(['%s'] * 4)}) \
RETURNING id"
return execute_sql_postgres(query, params, True)
def save_tournament_bookmaker(params: list) -> tuple:
query = f"INSERT INTO tournament_bookmaker (tournament_id, bookmaker_id, tournament_bookmaker_id, \
tournament_bookmaker_extra_id) VALUES ({', '.join(['%s'] * 4)})"
return execute_sql_postgres(query, params, True)
def save_match_bookmaker(params: list) -> tuple:
query = f"INSERT INTO matches_bookmaker (match_id, bookmaker_id, match_bookmaker_id) VALUES\
({', '.join(['%s'] * 3)})"
return execute_sql_postgres(query, params, True)
def set_inplay(inplay_available: bool, match_bookmaker_id: str) -> tuple:
query = f"UPDATE matches_bookmaker SET inplay_available = %s where match_bookmaker_id = %s"
return execute_sql_postgres(query, [inplay_available, match_bookmaker_id], True)
def get_save_tournaments(book: Bookmaker) -> pd.DataFrame:
tournaments = book.get_tournaments()
logging.debug("------------------------------------------------------\nPrematch")
logging.debug(tournaments)
book_id = book.database_id
year = datetime.datetime.now().year
for i, tournament in tournaments.iterrows():
if tournament['sex'] is None or tournament['type'] is None or tournament['surface'] is None:
continue
params = tournament[['tournament_name', 'sex', 'type', 'surface']].tolist()
params.append(year)
db_returned = save_tournament(params)
if type(db_returned) == IntegrityError or type(db_returned) == psycopg2.errors.UniqueViolation:
if 'duplicate key value violates unique constraint' in db_returned.args[0]:
continue
else:
raise db_returned
elif type(db_returned) == tuple and type(db_returned[0]) == int:
params = [db_returned[0], book_id, tournament.tournament_bookmaker_id,
tournament.tournament_bookmaker_year_id]
save_tournament_bookmaker(params)
logging.debug(f"Processed tournament {tournament.tournament_name}")
else:
raise db_returned
return tournaments
def update_match_start(match: pd.Series, tournament_id: int):
query = "SELECT start_time_utc FROM matches WHERE tournament_id=%s AND home=%s AND away=%s"
params = [tournament_id, match.home, match.away]
db_returned = execute_sql_postgres(query, params)
if match.start_time_utc == db_returned[0]:
pass
else:
logging.debug(
f'Updating starting time. Original time {db_returned[0]}. New time {match.start_time_utc}. Match: {match}')
query = "UPDATE matches SET start_time_utc = %s WHERE tournament_id=%s AND home=%s AND away=%s"
time_params = [match.start_time_utc]
time_params.extend(params)
execute_sql_postgres(query, time_params, True)
pass
def save_matches(matches: pd.DataFrame, tournament_id: int, book_id: int):
for i, match in matches.iterrows():
params = match[["home", "away", "start_time_utc"]].tolist()
params.append(tournament_id)
db_returned = save_match(params)
if type(db_returned) == IntegrityError or type(db_returned) == psycopg2.errors.UniqueViolation:
if 'duplicate key value violates unique constraint' in db_returned.args[0]:
update_match_start(match, tournament_id)
continue
else:
raise db_returned
elif type(db_returned) == tuple and type(db_returned[0]) == int:
params = [db_returned[0], book_id, match.bookmaker_matchid]
save_match_bookmaker(params)
else:
raise db_returned
pass
def process_tournaments_save_matches(book: Bookmaker, tournaments: pd.DataFrame):
for i, tournament in tournaments.iterrows():
logging.debug("------------------------------------------------------")
try:
matches = book.get_matches_tournament(tournament)
if len(matches) > 0:
tournament_id = get_tournament_id(book.database_id, tournament.tournament_bookmaker_id,
tournament.tournament_bookmaker_year_id)
if type(tournament_id) != tuple:
continue
save_matches(matches, tournament_id[0], book.database_id)
logging.debug(f"Matches from tournament {tournament.tournament_name} processed and saved.")
except StaleElementReferenceException as error:
logging.warning(f"Imposible to parse tournament {tournament.tournament_name}. Error: {error}")
continue
pass
def get_matchid(bookmaker_matchid, bookmaker_id):
query = "SELECT match_id FROM matches_bookmaker WHERE bookmaker_id = %s AND match_bookmaker_id = %s"
return execute_sql_postgres(query, [bookmaker_id, bookmaker_matchid])
|
from torchero.models.vision.nn.torchvision import *
|
#!/usr/bin/env python
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from __future__ import print_function, unicode_literals
import sys
sys.path.insert(0, '../../src')
def inner(signals = False, condition = None, event_notifier = None):
def before_shutdown():
print("Stopping servers...")
import signal
import voodoo.gen.launcher as Launcher
import voodoo.rt_debugger as rt_debugger
rt_debugger.launch_debugger()
if condition is not None:
waiters = ( Launcher.ConditionWait(condition), )
elif signals:
waiters = (
Launcher.SignalWait(signal.SIGTERM),
Launcher.SignalWait(signal.SIGINT),
Launcher.RawInputWait("Press <enter> or send a sigterm or a sigint to finish.\n"),
)
else:
waiters = (
Launcher.RawInputWait("Control+C to finish.\n"),
)
event_notifiers = (
Launcher.FileNotifier("_file_notifier", "server started"),
)
if event_notifier:
event_notifiers += ( Launcher.ConditionNotifier(event_notifier),)
launcher = Launcher.Launcher(
'.',
'main_machine',
'main_instance',
waiters,
"logging.configuration.txt",
before_shutdown,
event_notifiers
)
launcher._wait_condition = condition
return launcher
def launch(signals = False):
launcher = inner(signals = signals)
launcher.launch()
return launcher
if __name__ == '__main__':
try:
from werkzeug.serving import run_with_reloader
except ImportError:
print("werkzeug.serving not installed (pip install werkzeug). If you're developing, you'll have to restart the application in every change manually.")
launch(signals = True)
else:
run_with_reloader(launch)
|
import pytest
from django.test import RequestFactory
from django.urls import reverse
from api.models import UserStats, Log
from api.views import bots
from mixer.backend.django import mixer
from api.tests.utils import *
@pytest.fixture(scope='module')
def factory():
return RequestFactory()
@pytest.fixture
def user_stats(db):
return mixer.blend(UserStats)
@pytest.fixture
def logs(db):
return mixer.cycle(20).blend(Log, id_bot=1)
# Ver um workaround para queries com 2 tipos de base de dados
"""
@catch_exception
def test_successful_twitter_bot_logs_request(error_catcher, factory, logs):
path = reverse('twitter_bot_logs', kwargs={'bot_id': 1})
request = factory.get(path)
response = bots.twitter_bot_logs(request, bot_id=1)
assert is_response_successful(response)
@catch_exception
def test_unsuccessfully_twitter_bot_logs_request(error_catcher, factory, db):
entries_per_page = 1
page = 1
path = reverse('twitter_bot_logs', kwargs={'id': 1, 'entries_per_page': entries_per_page, 'page': page})
request = factory.get(path)
response = bots.twitter_bot_logs(request, id=1, entries_per_page=str(entries_per_page), page=str(page))
assert response.status_code == 200 and len(response.data['error_messages']) == 0 and \
len(response.data['success_messages']) > 0 and response.data['data']['entries'] == []
"""
|
from .seq import Seq
def matrix(seq1: Seq, seq2: Seq):
w, h = len(seq1), len(seq2)
Matrix = [[0 for x in range(w + 1)] for y in range(h + 1)]
for i, ibase in enumerate(seq1, 1):
for j, jbase in enumerate(seq2, 1):
if ibase == jbase:
Matrix[j][i] = Matrix[j - 1][i - 1] + 1
else:
Matrix[j][i] = max(Matrix[j - 1][i], Matrix[j][i - 1])
# remove zeros
Matrix = [M[1:] for M in Matrix[1:]]
return Matrix
def format_matrix(seq1, seq2, Matrix):
fM = []
fM.append(" " + " ".join([nt for nt in seq1]))
for i, b in enumerate(seq2):
fM.append(b + " " + " ".join([str(s) for s in Matrix[i]]))
return fM
def lcs_traceback(seq1: Seq, seq2: Seq, Matrix):
i, j = len(seq1) - 1, len(seq2) - 1
LCS = []
pos_seq1 = []
pos_seq2 = []
while i > -1 and j > -1:
if seq1[i] == seq2[j]:
LCS.append(seq1[i])
pos_seq1.append(i + 1)
pos_seq2.append(j + 1)
j -= 1
i -= 1
elif Matrix[j][i - 1] == Matrix[j][i]:
i -= 1
else:
j -= 1
LCS = "".join(LCS)[::-1]
return Seq(LCS), pos_seq1[::-1], pos_seq2[::-1]
def lcs(seq1: Seq, seq2: Seq) -> Seq:
"""longest common subsequence of two sequences"""
Matrix = matrix(seq1, seq2)
LCS, *_ = lcs_traceback(seq1, seq2, Matrix)
return LCS
|
#!/usr/bin/env python
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
Convert the Taurus Test suite XML to Junit XML
"""
# pylint: disable=redefined-builtin
import glob
import shutil
import os
from .reader import get_mon_metrics_list
def get_taurus_options(artifacts_dir, jmeter_path=None):
"""The options for Taurus BZT command"""
options = []
if jmeter_path:
options.append('-o modules.jmeter.path={}'.format(jmeter_path))
options.append('-o settings.artifacts-dir={}'.format(artifacts_dir))
options.append('-o modules.console.disable=true')
options.append('-o settings.env.BASEDIR={}'.format(artifacts_dir))
options_str = ' '.join(options)
return options_str
def update_taurus_metric_files(suite_artifacts_dir, test_file):
"""
It renames the server and local metric monitoring log files to metrics.csv.
The order of the columns in header of server metric monitoring SALogs file generated by taurus
is not inline with data. So as a work around this function rewrites the header based on order
defined in the test yaml.
"""
metrics_new_file = os.path.join(suite_artifacts_dir, "metrics.csv")
server_metric_file_pattern = os.path.join(suite_artifacts_dir, "SAlogs_*")
metrics_log_file = glob.glob(server_metric_file_pattern)
if metrics_log_file:
metrics = get_mon_metrics_list(test_file)
if metrics:
with open(metrics_log_file[0]) as from_file:
line = from_file.readline()
with open(metrics_log_file[0], mode="w") as to_file:
to_file.write(','.join(line.split(',')[0:1] + metrics) + "\n")
shutil.copyfileobj(from_file, to_file)
os.rename(metrics_log_file[0], metrics_new_file)
else:
metrics_log_file = os.path.join(suite_artifacts_dir, "local_monitoring_logs.csv")
if os.path.exists(metrics_log_file):
os.rename(metrics_log_file, metrics_new_file)
|
# Stdlib imports
import os
import pathlib
# Vendor imports
import yaml
# Local imports
from . import helper
_defaultConfig = {"v": 1, "profiles": {}}
def getDefaultConfigPath() -> str:
return os.environ.get("SPGILL_BACKUP_CONFIG", "~/.spgill.backup.yaml")
# Return the config values in the config file
def loadConfigValues(configPath: str = None) -> dict:
updateFile = False
# Resolve the path string to a path object
filePath = pathlib.Path(configPath or getDefaultConfigPath()).expanduser()
# If the config file doesn't already exist, create it
if not filePath.exists():
with filePath.open("w") as handle:
yaml.dump(_defaultConfig, handle)
# Open and decode the config file
values: dict = None
with filePath.open("r") as handle:
values = yaml.load(handle, Loader=yaml.SafeLoader)
if values["v"] < _defaultConfig["v"]:
helper.printWarning(
f'Warning: Config file at "{filePath}" is possibly incompatible with this version of the backup tool. Validate the contents of the config file are compatible and update the "v" property to "{_defaultConfig["v"]}", or delete the "v" property entirely to suppress this warning.'
)
# If the config file needs to be updated, do it now
if updateFile:
with filePath.open("w") as handle:
yaml.dump(values, handle)
# Finally, return the values
return values
|
"""
Factory Boy Factory Definition
"""
from django.contrib.auth import get_user_model
from factory.django import DjangoModelFactory as Factory
from factory import LazyAttribute, Sequence, SubFactory
model = get_user_model()
class UserFactory(Factory):
"""
Base User Factory
"""
email = LazyAttribute(lambda m: '{0}@example.com'.format(m.first_name))
first_name = Sequence(lambda n: 'User{0}'.format(n))
last_name = 'Smith'
token = Sequence(lambda n: 'Validation{0}'.format(n))
# pylint: disable=R0903
class Meta:
"""
Metaclass Definition
"""
model = model
|
#!/usr/bin/env python2.6
import lxml.html, re
class InvalidArguments(Exception):
pass
class MetaHeaders:
def __init__(self, url=None, page=None,name='name',content='content'):
if page:
self.root = lxml.html.document_fromstring(page)
elif url:
self.root = lxml.html.parse(url).getroot()
else:
raise InvalidArguments, "Need a URL or an HTML page"
meta = {}
for m in self.root.cssselect("meta"):
attr=m.attrib
if attr.has_key(name) and attr.has_key(content) and attr[content] != "":
k = attr[name]
v = attr[content].strip()
if not meta.has_key(k):
meta[k] = []
meta[k].append(v)
self.meta = meta
def get_item(self, k):
if self.meta.has_key(k):
return self.meta[k][0]
else:
return None
def get_multi_item(self, k):
if self.meta.has_key(k):
return self.meta[k]
else:
return None
def print_item(self, entry, key):
el = self.get_multi_item(key)
if not el:
return
for e in el:
print "%s\t%s" % (entry, e)
def print_date(self, key):
date = self.get_item(key)
if not date:
return
year = None
month = None
day = None
m = re.search(r'(\d\d\d\d)(?:[-/])(\d+)(?:[-/])(\d+)', date)
if m:
year = m.group(1)
month = m.group(2)
day = m.group(3)
if not year:
m = re.search(r'(\d\d\d\d)(?:[-/])(\d+)', date)
if m:
year = m.group(1)
month = m.group(2)
if not year:
m = re.search(r'(\d\d\d\d)', date)
if m:
year = m.group(1)
m = re.search(r"([a-z]+)", date, re.IGNORECASE)
if m:
months = {
'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 5, 'Jun': 6,
'Jul': 7, 'Aug': 8, 'Sep': 9, 'Oct': 10, 'Nov': 11, 'Dec': 12,
'January': 1, 'February': 2, 'March': 3, 'April': 4,
'May': 5, 'June': 6, 'July': 7, 'August': 8,
'September': 9, 'October': 10, 'November': 11, 'December': 12
}
try:
print "month\t%s" % months[m.group(1).capitalize()]
month = None
except:
pass
if year:
print "year\t%s" % year
if month:
print "month\t%s" % month
if day:
print "day\t%s" % day
def test():
url = "http://ieeexplore.ieee.org/xpl/freeabs_all.jsp?arnumber=4755987"
print "getting %s " % url
metaheaders = MetaHeaders(url=url)
for (k,v) in metaheaders.meta.items():
print "%s = %s" % (k,v)
print "===============\nRepeat with manual fetch"
from urllib2 import urlopen
page = urlopen(url).read()
metaheaders = MetaHeaders(page=page)
for (k,v) in metaheaders.meta.items():
print "%s = %s" % (k,v)
if __name__ == '__main__':
test()
|
from PyQt5 import uic, QtWidgets
import mysql.connector
import sqlite3
# Conexão com o Banco de Dados
banco = mysql.connector.connect(
host='127.0.0.1',
user='root',
password='',
database='fornecedores'
)
def do_login():
# Login no sistema
login.lineEdit.setText('')
user = login.lineEdit.text()
password = login.lineEdit_2.text()
def verify_duplicity_email():
# Verificação de duplicidade de cadastro
cursor = banco.cursor()
query = f'SELECT * FROM usuarios'
cursor.execute(query)
all_data = cursor.fetchall() # Todos os registros em matriz
email = register.lineEdit_5.text() # E-mail digitado
print(f'E-mail digitado: {email}')
# Isolamento dos e-mail do banco de dados da matriz
x = 0
email = register.lineEdit_5.text()
for x in range (0,len(all_data)):
email_to_confirm = all_data[x][4] # E-mails registrados listados em coluna
print(f"E-mails registrados: {email_to_confirm}")
if email_to_confirm == email:
print(f"E-mail repetido: {email}")
return False
else:
print("Nenhuma repetição")
return True
x += 1
def do_register():
# Cadastrar novo user
name = register.lineEdit.text()
login_name = register.lineEdit_2.text()
password = register.lineEdit_3.text()
password_to_confirm = register.lineEdit_4.text()
email = register.lineEdit_5.text()
if password == password_to_confirm:
# Senhas se confirmam
try:
if ({name} == ' ' or {login_name} == ' ' or {password} == ' ' or {password_to_confirm} == ' ' or {email == ' '}):
register.label_2.setText('Favor preencher todos os campos.')
else:
cursor = banco.cursor()
query = f"INSERT INTO usuarios (nome, login, senha, email) VALUES ('{name}', '{login_name}', '{password}', '{email}')"
cursor.execute(query)
banco.commit()
register.label_2.setText('Registro efetuado com sucesso!')
# Limpa os dados
name = register.lineEdit.setText('')
login_name = register.lineEdit_2.setText('')
password = register.lineEdit_3.setText('')
password_to_confirm = register.lineEdit_4.setText('')
email = register.lineEdit_5.setText('')
except sqlite3.Error as erro:
print('Erro no cadastro de novos usuários: ',erro)
register.label_2.setText('Ops... algo de errado aconteceu.')
else:
register.label_2.setText('As senhas são diferentes.')
def execute_register():
name = main.lineEdit.text()
cpf_cnpj = main.lineEdit_2.text()
apel = main.lineEdit_3.text()
email = main.lineEdit_4.text()
tel = main.lineEdit_11.text()
ender = main.lineEdit_5.text()
number = main.lineEdit_6.text()
neigh = main.lineEdit_7.text()
complement = main.lineEdit_8.text()
city = main.lineEdit_9.text()
uf = main.lineEdit_10.text()
cep = main.lineEdit_12.text()
if ((name == '') or (cpf_cnpj == '') or (email == '') or (tel == '') or (ender == '') or (number == '') or (neigh == '') or (city == '') or (uf == '') or (cep == '')):
main.label_2.setText('Há dados faltantes.')
else:
cursor = banco.cursor()
query = f"INSERT INTO fornecedores (nome, CPFouCNPJ, apelido, cep, endereco, numero, bairro, complemento, cidade, uf, telefone, email) VALUES ('{name}', '{cpf_cnpj}', '{apel}', '{cep}', '{ender}', '{number}', '{neigh}', '{complement}', '{city}', '{uf}', '{tel}', '{email}')"
print(query)
cursor.execute(query)
banco.commit()
main.label_2.setText('Cadastro realizado!')
# Limpeza
name = main.lineEdit.setText('')
cpf_cnpj = main.lineEdit_2.setText('')
apel = main.lineEdit_3.setText('')
email = main.lineEdit_4.setText('')
tel = main.lineEdit_11.setText('')
ender = main.lineEdit_5.setText('')
number = main.lineEdit_6.setText('')
neigh = main.lineEdit_7.setText('')
complement = main.lineEdit_8.setText('')
city = main.lineEdit_9.setText('')
uf = main.lineEdit_10.setText('')
cep = main.lineEdit_12.setText('')
def show_register_list():
main.close()
register_list.show()
cursor = banco.cursor()
query = "SELECT * FROM fornecedores"
cursor.execute(query)
data = cursor.fetchall()
# Montagem da tabela no formulário
register_list.tableWidget.setRowCount(len(data))
register_list.tableWidget.setColumnCount(13)
# Inserção da dados na tabela
for i in range (0,len(data)): # Número de linhas
for j in range (0,13): # Número de colunas
register_list.tableWidget.setItem(i,j,QtWidgets.QTableWidgetItem(str(data[i][j])))
def show_update():
update.show()
# Número da linha desejada para edição dos dados
number_line = register_list.tableWidget.currentRow()
# Reescrever na tela as informações do ID para modificações
cursor = banco.cursor()
query = 'SELECT id FROM fornecedores'
cursor.execute(query)
all_data = cursor.fetchall()
id = all_data[number_line][0]
# Obtenção das informações do referido ID
cursor = banco.cursor()
query = f'SELECT * FROM fornecedores WHERE id = {id}'
cursor.execute(query)
all_data_by_id = cursor.fetchall()
print(all_data_by_id)
# Obtenção dos valores referente às variáveis
name = all_data_by_id[0][1]
cpf_cnpj = all_data_by_id[0][2]
apel = all_data_by_id[0][3]
cep = all_data_by_id[0][4]
ender = all_data_by_id[0][5]
number = all_data_by_id[0][6]
neigh = all_data_by_id[0][7]
complement = all_data_by_id[0][8]
city = all_data_by_id[0][9]
uf = all_data_by_id[0][10]
tel = all_data_by_id[0][11]
email = all_data_by_id[0][12]
# Preenchimento dos campos para edição
update.lineEdit.setText(name)
update.lineEdit_2.setText(str(cpf_cnpj))
update.lineEdit_3.setText(apel)
update.lineEdit_4.setText(str(email))
update.lineEdit_5.setText(ender)
update.lineEdit_6.setText(str(number))
update.lineEdit_7.setText(neigh)
update.lineEdit_8.setText(str(complement))
update.lineEdit_9.setText(city)
update.lineEdit_10.setText(uf)
update.lineEdit_11.setText(str(tel))
update.lineEdit_12.setText(str(cep))
def save():
# Obtenção do ID
cursor = banco.cursor()
query = 'SELECT id FROM fornecedores'
cursor.execute(query)
all_data = cursor.fetchall()
number_line = register_list.tableWidget.currentRow()
id = all_data[number_line][0]
# Adaptações nas caixas de texto
name = update.lineEdit.text()
cpf_cnpj = update.lineEdit_2.text()
apel = update.lineEdit_3.text()
email = update.lineEdit_4.text()
ender = update.lineEdit_5.text()
number = update.lineEdit_6.text()
neigh = update.lineEdit_7.text()
complement = update.lineEdit_8.text()
city = update.lineEdit_9.text()
uf = update.lineEdit_10.text()
tel = update.lineEdit_11.text()
cep = update.lineEdit_12.text()
cursor = banco.cursor()
query = f"UPDATE fornecedores SET nome = '{name}', CPFouCNPJ = '{cpf_cnpj}', apelido = '{apel}', cep = '{cep}', endereco = '{ender}', numero = '{number}', bairro = '{neigh}', complemento = '{complement}', cidade = '{city}', uf = '{uf}', telefone = '{tel}', email = '{email}' WHERE id = '{id}'"
print(query)
cursor.execute(query)
banco.commit()
def delete():
# Obtenção do ID
cursor = banco.cursor()
query = 'SELECT id FROM fornecedores'
cursor.execute(query)
all_data = cursor.fetchall()
number_line = register_list.tableWidget.currentRow()
register_list.tableWidget.removeRow(number_line)
id = (all_data[number_line][0])
cursor = banco.cursor()
query = f"DELETE FROM fornecedores WHERE id = '{str(id)}'"
cursor.execute(query)
banco.commit()
quest_delete.close()
register_list.show()
# Controle de telas
def show_do_register():
login.close()
register.show()
def back_to_login():
register.close()
login.show()
def do_register_after_verify():
if verify_duplicity_email():
do_register()
else:
register.label_2.setText('E-mail já cadastrado.')
def show_main_form():
login.close()
main.show()
def logout():
main.close()
login.show()
def back_to_main():
register_list.close()
update.close()
main.show()
def show_update_form():
register_list.close()
show_update()
def back_to_register_list():
update.close()
quest_delete.close()
register_list.show()
def save_changes():
save()
back_to_register_list()
def quest_to_delete():
quest_delete.show()
app = QtWidgets.QApplication([])
# Importação das telas
login = uic.loadUi('login.ui')
register = uic.loadUi('register.ui')
main = uic.loadUi('main.ui')
register_list = uic.loadUi('register_list.ui')
update = uic.loadUi('update.ui')
quest_delete = uic.loadUi('quest_delete.ui')
# Chamadas
login.pushButton.clicked.connect(show_main_form)
login.pushButton_2.clicked.connect(show_do_register) # Chama a tela de registro
#______________________________________
register.pushButton_2.clicked.connect(back_to_login)
register.pushButton.clicked.connect(do_register_after_verify)
#______________________________________
main.pushButton_2.clicked.connect(execute_register)
main.pushButton_3.clicked.connect(logout)
main.pushButton_4.clicked.connect(show_register_list)
#______________________________________
register_list.pushButton_2.clicked.connect(back_to_main)
register_list.pushButton_3.clicked.connect(show_update_form)
register_list.pushButton_4.clicked.connect(quest_to_delete)
#_______________________________________
update.pushButton_3.clicked.connect(back_to_main)
update.pushButton_2.clicked.connect(save_changes)
#________________________________________
quest_delete.pushButton_4.clicked.connect(delete)
quest_delete.pushButton_3.clicked.connect(back_to_register_list)
# Start do Sistema
login.show()
app.exec()
|
# -*- coding: utf-8 -*-
"""
Authentication module.
"""
import flask_login
import requests
from flask import redirect, render_template, request, url_for
from . import AUTH_SERVICE, app, login_manager
from .models import User
@app.route('/signup', methods=['GET', 'POST'])
def signup():
"""
Sign-up page.
:return:
"""
if request.method == 'GET':
return render_template('login.html')
name = request.form['name']
email = request.form['email']
pw = request.form['password']
if len(pw) < 8:
return render_template(
'login.html', signuperror='Password must be at least 8 characters.'
)
elif pw != request.form['confirm-password']:
return render_template(
'login.html', signuperror='Make sure to confirm the password!'
)
r = requests.post(
f'{AUTH_SERVICE}/users',
json={
'name': name,
'email': email,
'pw': pw
}
)
if r.status_code != 201:
return render_template('login.html', signuperror=r.json()['message'])
new_user_doc = r.json()['data']
new_user_obj = User().from_json(new_user_doc)
flask_login.login_user(new_user_obj)
return redirect(url_for('show_movies'))
@app.route('/login', methods=['GET', 'POST'])
def login():
"""
Log-in page.
:return:
"""
if request.method == 'GET':
return render_template('login.html')
email = request.form['email']
pw = request.form['password']
r = requests.get(
f'{AUTH_SERVICE}/user-auth/?email={email}', json={'pw': pw}
)
if r.status_code != 200:
return render_template('login.html', loginerror=r.json()['message'])
user_doc = r.json()['data']
user_obj = User().from_json(user_doc)
flask_login.login_user(user_obj)
return redirect(url_for('show_movies'))
@app.route('/profile')
@flask_login.login_required
def profile():
"""
Profile page.
:return:
"""
return render_template('profile.html')
@app.route('/logout')
@flask_login.login_required
def logout():
"""
Logout page.
:return:
"""
flask_login.logout_user()
return redirect(url_for('show_movies'))
@login_manager.unauthorized_handler
def unauthorized_handler():
"""
Flask-Login unauthorized handler.
When "login_required", this function handles unauthorized users, and
redirect them to appropriate place.
:return:
"""
return render_template('splash_screen.html')
|
import os
from typing import Any, Dict
from jinja2 import Environment, FileSystemLoader
from jinja2.exceptions import TemplateNotFound
from routers import Router
class Bird(Router):
def __init__(self, family: int = 4, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.family = family
self.config = self.render_config(self.build_config(), "bird.conf.j2").strip()
def build_config(self) -> Dict[str, Any]:
if not self.router_id:
raise LookupError("Unable to determine router id")
ipv4_next_hop, ipv6_next_hop = self.multi_hop_gateway
if self.ipv4_multi_hop and not ipv4_next_hop:
raise LookupError("Unable to determine IPv4 next hop for multihop peer")
if self.ipv6_multi_hop and not ipv6_next_hop:
raise LookupError("Unable to determine IPv6 next hop for multihop peer")
return {
"bgp_neighbors": [neighbor._asdict() for neighbor in self.bgp_neighbors],
"meta": {
"router_id": self.router_id,
"family": self.family,
"ipv4_next_hop": ipv4_next_hop if self.ipv4_multi_hop else None,
"ipv6_next_hop": ipv6_next_hop if self.ipv6_multi_hop else None,
},
}
def render_config(self, data: Dict[str, Any], filename: str) -> str:
script_dir = os.path.dirname(__file__)
search_dir = os.path.join(script_dir, "templates")
loader = FileSystemLoader(searchpath=search_dir)
env = Environment(loader=loader)
try:
template = env.get_template(filename)
except TemplateNotFound as e:
raise TemplateNotFound(
"Failed to locate bird's configuration template {}.".format(e.message)
)
return template.render(data=data)
|
from django.views import generic
# Create your views here.
class Index(generic.TemplateView):
template_name = 'index.html'
class Services(generic.TemplateView):
template_name = 'services.html'
class Prices(generic.TemplateView):
template_name = 'prices.html'
class Fleets(generic.TemplateView):
template_name = 'fleets.html'
class About(generic.TemplateView):
template_name = 'about.html'
class Contact(generic.TemplateView):
template_name = 'contact.html'
|
import serial
import serial.tools.list_ports
import tkinter as tk
from tkinter import messagebox
import threading
import pyautogui
import PIL.Image
import PIL.ImageTk
import cv2
# Gloval variables
main_window = tk.Tk()
current_option = tk.StringVar(main_window)
btn_text_var = None
arduino_thread = None
window_is_alive = True
ser = None
presentation_started = False
glove_image = None
IMAGES_DIR = "../assets/pics/"
class GloveController:
def __init__(self):
self.fingers = {"index_finger_action": 'LEFT',
"middle_finger_action": 'RIGHT',
"ring_finger_action": 'F5',
"baby_finger_action": None}
print("Starting glove controller")
def transform_img(img_array):
global glove_image, main_window
img_array = cv2.cvtColor(cv2.resize(img_array, (300, 300)), cv2.COLOR_BGR2RGB)
img = PIL.Image.fromarray(img_array)
img_tk = PIL.ImageTk.PhotoImage(img)
return img_tk
def get_finger(index):
global glove_image, main_window
if index == 'I':
tmp_img = transform_img(cv2.imread("../assets/pics/gc_index.png"))
glove_image.configure(image=tmp_img)
glove_image.image = tmp_img
main_window.update()
return 'index_finger'
elif index == 'M':
tmp_img = transform_img(cv2.imread("../assets/pics/gc_middle.png"))
glove_image.configure(image=tmp_img)
glove_image.image = tmp_img
main_window.update()
return 'middle_finger'
elif index == 'R':
tmp_img = transform_img(cv2.imread("../assets/pics/gc_ring.png"))
glove_image.configure(image=tmp_img)
glove_image.image = tmp_img
main_window.update()
return 'ring_finger'
elif index == 'B':
tmp_img = transform_img(cv2.imread("../assets/pics/gc_baby.png"))
glove_image.configure(image=tmp_img)
glove_image.image = tmp_img
main_window.update()
return None
else:
return None
def do_action(action, glove_controller):
global presentation_started
finger = get_finger(action)
finger_action = "{}_action".format(finger)
if finger is not None:
pyautogui.press(glove_controller.fingers[finger_action])
if finger == 'ring_finger':
if presentation_started:
glove_controller.fingers['ring_finger_action'] = "ESC"
presentation_started = False
else:
glove_controller.fingers['ring_finger_action'] = "F5"
presentation_started = True
def kill_arduino_thread():
global window_is_alive, arduino_thread, main_window, ser
print("Exiting")
window_is_alive = False
main_window.destroy()
if ser is not None:
ser.close()
if arduino_thread is not None:
arduino_thread.join()
def start_listening():
global window_is_alive, ser, arduino_thread
if current_option.get() != "Select port":
port_selected = current_option.get().split("-")[0].strip()
try:
ser = serial.Serial(port_selected)
messagebox.showinfo(title="Connected", message="¡Connected successfully!")
btn_text_var.set("Stop listening")
glove_controller = GloveController()
while window_is_alive:
serial_data = (ser.read()).decode('utf-8')
if len(serial_data) > 0:
print("Doing action")
do_action(serial_data, glove_controller)
#ser.close()
print("Good bye")
except Exception as e:
print(e)
messagebox.showerror(title="Error", message="Error. Port is Busy")
btn_text_var.set("Start listening")
def init_thread():
global arduino_thread, ser, window_is_alive
if arduino_thread is None:
arduino_thread = threading.Thread(target=start_listening)
arduino_thread.start()
else:
if ser is not None:
window_is_alive = False
ser.close()
btn_text_var.set("Start listening")
arduino_thread.join()
arduino_thread = None
def get_ports():
ports = serial.tools.list_ports.comports()
ports_desc = []
for port in ports:
device = port.device
description = port.description
ports_desc.append(f"{device} - {description}")
print(f"Device: {device}, Description: {description}")
if len(ports_desc) > 0:
return ports_desc
else:
return None
def display_ports():
options = get_ports()
if options is not None:
select = tk.OptionMenu(main_window, current_option, *options)
select.config(bg="#F1FAEE",font=('Open Sans', 8))
select.place(x=170, y=100, width=300)
def main():
global arduino_thread, window_is_alive, glove_image, btn_text_var
# Window settup
window_title = "Glove Controller"
width, height = 600, 600
main_window.title(window_title)
main_window.resizable(False, False)
main_window.geometry(f"{width}x{height}")
main_window.config(bg="#F1FAEE")
# Title label
title_label = tk.Label(master=main_window, text="Glove Controller", font=('Open Sans', 32),bg="#F1FAEE")
title_label.place(x=150, y=20)
current_option.set("Select port")
# Button start listening
btn_text_var = tk.StringVar(value="Start listening")
btn_start_listening = tk.Button(master=main_window, textvariable=btn_text_var,
command=init_thread, font=('Open Sans', 16))
btn_start_listening.config(bg="#F1FAEE")
btn_start_listening.place(x=220, y=200, width=150, height=40)
# Initial glove image
tmp_img = transform_img(cv2.imread("../assets/pics/gc.png"))
glove_image = tk.Label(main_window, bg="#F1FAEE")
glove_image.configure(image=tmp_img)
glove_image.place(x=150, y=300)
# Main process
display_ports()
main_window.protocol("WM_DELETE_WINDOW", kill_arduino_thread)
main_window.mainloop()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import click
import json
import os
import re
import requests
import sys
from sh import git, ErrorReturnCode
from aeriscloud.ansible import get_organization_list
from aeriscloud.basebox import baseboxes
from aeriscloud.cli.aeris.sync import sync
from aeriscloud.cli.helpers import Command, fatal, warning, \
error, get_input, start_box, move_shell_to
from aeriscloud.cli.prompt import AerisCompletableList, AerisPrompt
from aeriscloud.config import aeriscloud_path, config, basebox_bucket, \
projects_path
from aeriscloud.project import Project
from aeriscloud.services import services as list_services
style = click.style
vgit = git.bake(_out=sys.stdout, _out_bufsize=0, _err_to_out=True)
default_baseboxes = [
'chef/centos-6.6',
'chef/centos-7.0',
'debian/wheezy64',
'debian/jessie64',
'ubuntu/trusty64',
'ubuntu/vivid64'
]
default_basebox = 'chef/centos-7.0'
def _title(title):
click.secho('\n%s\n%s' % (title, '-' * len(title)), fg='cyan')
def _get_git_root():
"""
Retrieve the git directory, or prompt to create one if not found
"""
git_root = None
try:
git_root = str(git('rev-parse', '--show-toplevel')).strip()
except ErrorReturnCode as e:
if e.exit_code != 128:
fatal(e.message, e.exit_code)
if not git_root:
warning('You must be in a git repository directory to '
'initialize a new project.')
if not click.confirm('Do you want to create a new git '
'repository here?', default=True):
fatal('Please run %s' % style('git init', bold=True))
try:
vgit('init')
git_root = os.getcwd()
except ErrorReturnCode as e:
fatal('An error occurred when trying to initialize a '
'new repo.', e.exit_code)
if git_root == aeriscloud_path:
fatal('You cannot init AerisCloud from the AerisCloud directory!')
return git_root
def _ask_general_info(git_root):
_title('General')
# Take the basename of the git root as default name for the application
default_name = os.path.basename(git_root) \
.lower()
app_name = None
app_id = None
click.secho('\nSelect your %s (only lowercase letters, numbers and '
'dash are accepted).' % style('application name', bold=True))
while True:
app_name = get_input('[default: %s] > ' % default_name)
if not app_name:
app_name = default_name
if re.match("^[a-z0-9-]+$", app_name):
break
else:
click.echo('Only lowercase letters and numbers are accepted.')
organization_list = AerisCompletableList(get_organization_list())
click.secho('\nWhich organization contains the playbook you want to '
'use to provision the boxes of this project?')
organization = AerisPrompt('> ', completer=organization_list).get_input()
if not organization:
organization = config.get('config', 'default_organization',
default=None)
click.echo('\nProvide your %s.\n'
'If you don\'t have one, ask your system operators!' %
style('application ID', bold=True))
while True:
app_id = get_input('> ')
if len(app_id) > 0:
try:
app_id = int(app_id)
if app_id > 0:
break
warning('Please enter a valid ID.')
except ValueError:
warning('Please enter a valid ID.')
return app_name, organization, app_id
def _show_service_list(service_list, enabled_services):
"""
Show the service list, coloring enabled services, return the list of non
default services
TODO: find a better command name, as it doesn't just show services
"""
if not enabled_services:
enabled_services = []
service_complete = {}
for service, service_info in sorted(service_list.iteritems()):
if not service_info['default']:
service_complete[service] = service_info['description']
if service in enabled_services:
click.secho('* %s - %s' % (
style(service, bold=True, fg='green'),
service_info['description']
))
else:
click.secho('* %s - %s' % (
style(service, bold=True),
service_info['description']
))
return service_complete
def _ask_services(organization, enabled_services=None):
"""
Ask the user which services to enable/disable, returns a list of services
"""
service_list = list_services(organization)
if not service_list:
return []
_title('Services')
click.secho('You now will need to select what %s you would like to '
'install, from the following list:' %
style('services', bold=True))
service_complete = _show_service_list(service_list, enabled_services)
selected_services = AerisCompletableList(service_complete.keys(),
service_complete)
if enabled_services:
for service in enabled_services:
selected_services.select(service)
click.echo('''
Which {0} do you wish to use? (autocomplete available)
You can enter {0} on different lines, or several on the same line separated \
by spaces.
'''.format(style('services', bold=True)))
service_cli = AerisPrompt('> ', completer=selected_services)
while True:
if selected_services.selected:
click.secho('''
Current enabled services: %s
Press %s to validate, or add another %s from the list.
You can remove a service from the list by putting a %s before its name.
''' % (
style(','.join(selected_services.selected) or 'None',
fg='cyan'),
style('ENTER', bold=True),
style('service', bold=True),
style('-', bold=True))
)
services_input = service_cli.get_input()
if services_input:
for service in services_input.strip().split(' '):
if not service:
continue
if service in service_list:
selected_services.select(service)
elif service[0] == '-' \
and service[1:] in service_list:
selected_services.unselect(service[1:])
else:
error('''%s was not recognized as a valid service.
Please enter a valid service.''' % service)
else:
break
return selected_services.selected
def _check_atlas_basebox(name):
if name.count('/') != 1:
click.secho('error: a valid basebox name looks like owner/box_name',
fg='red')
return False
res = requests.get('https://atlas.hashicorp.com/api/v1/box/%s' % name)
if res.status_code != 200:
error = json.loads(res.content)
click.secho('error: %s' % ', '.join(error['errors']), fg='red')
return False
return True
def _fix_basebox_url(url):
"""
Kinda fix a basebox URL
"""
if not url.startswith('http'):
url = 'http://%s' % url
if not url.endswith('/meta'):
url += '/meta'
return url
def _ask_basebox():
_title('Basebox')
basebox_list = default_baseboxes
# if we have a custom bucket, also use that
bucket_baseboxes = []
if basebox_bucket():
bucket_baseboxes = list(set([
'%s/%s' % (infra, box[:box.rfind('-')])
for infra, boxes in baseboxes().iteritems()
for box in boxes
]))
basebox_list += bucket_baseboxes
basebox_list.sort()
selected_boxes = AerisCompletableList(basebox_list)
prompt = AerisPrompt('> ', completer=selected_boxes)
click.echo("""
You will now need to select a basebox to use for your project, we have small
selection of boxes available if you press TAB. It is recommended you use a
box that matches your production servers.
You can also enter any box you find on Atlas:
https://atlas.hashicorp.com/boxes/search?provider=virtualbox
If none is entered, %s will be selected as a default.
""" % (click.style(default_basebox, bold=True)))
while True:
basebox = prompt.get_input().strip()
if basebox:
if basebox not in bucket_baseboxes:
if not _check_atlas_basebox(basebox):
continue
return basebox, None
else:
return basebox, _fix_basebox_url(basebox_bucket())
else:
break
return default_basebox, None
def _ask_project_details(git_root, project):
if not project.initialized():
(app_name, organization, app_id) = _ask_general_info(git_root)
services = _ask_services(organization)
basebox, basebox_url = _ask_basebox()
project.set_name(app_name)
project.set_organization(organization)
project.set_id(app_id)
project.add_box({
'basebox': basebox
}, basebox_url)
else:
if not click.confirm('There is already a project configured in this '
'folder, do you want to modify it\'s config?'):
fatal('aborting')
services = _ask_services(project.organization(), project.services())
project.set_services(services)
def _up(box, make):
click.echo('\nStarting box %s\n' %
style(box.name(), bold=True))
if not box.is_running():
start_box(box)
else:
box.vagrant('provision')
if make:
click.echo('\nRunning make %s in your box\n' %
style(make, bold=True))
if box.ssh_shell('make %s' % make) != 0:
fatal('make command failed!')
if sync(box, 'down') is False:
# sync failed, message should already be displayed, exit
sys.exit(1)
@click.command(cls=Command)
@click.option('-u', '--up', is_flag=True)
@click.option('-m', '--make',
metavar='<command>')
@click.option('-b', '--box', 'box_name',
help='Which box to use for make')
@click.argument('folder', required=False)
def cli(up, make, box_name, folder):
"""
Initialize a new AerisCloud project
"""
if not folder:
folderpath = os.getcwd()
else:
folderpath = os.path.join(os.curdir, folder)
relpath = os.path.relpath(folderpath, projects_path())
if relpath[:2] == '..':
warning("""You are trying to create a new project in %s
which is outside of your projects' directory (%s).""" %
(os.path.abspath(folderpath), projects_path()))
folder_in_projectpath = os.path.join(projects_path(), folder)
if click.confirm("Do you want to create it in %s instead?" %
folder_in_projectpath, default=True):
folderpath = folder_in_projectpath
if not os.path.exists(folderpath):
os.makedirs(folderpath)
os.chdir(folderpath)
git_root = _get_git_root()
project = Project(git_root)
_ask_project_details(git_root, project)
click.echo('\nWriting .aeriscloud.yml ... ', nl=False)
project.save()
click.echo('done')
move_shell_to(project.folder())
if up or make:
# Retrieve the proper box
box = project.box(box_name)
_up(box, make)
if __name__ == '__main__':
cli()
|
## =========================================================
## bot_v_bot.py
## ---------------------------------------------------------
import time
from nsl.go import agent
# Import goboard
# There are three versions with different degree of optimizations:
#| from nsl.go import goboard_slow as goboard
from nsl.go import goboard
#| from nsl.go import goboard_fast as goboard
from nsl.go import gotypes
from nsl.go.utils import print_board, print_move, clear_screen
## =========================================================
## Main
## ---------------------------------------------------------
def bot_v_bot(board_size=9, sleep=0.3):
"""A random bot playing against another random bot.
"""
game = goboard.GameState.new_game(board_size)
bots = {
gotypes.Player.black: agent.naive.RandomBot(),
gotypes.Player.white: agent.naive.RandomBot(),
}
while not game.is_over():
# We set a sleep timer to 0.3 seconds so that bot moves aren't
# printed too fast to observe
time.sleep(sleep)
# Before each move we clear the screen. This way the board is
# always printed to the same position on the command line.
#| print(chr(27) + "[2J")
clear_screen()
# Print a header
print('')
print(' nslgo')
print('')
# Print the board
print_board(game.board)
print('')
# Calculate and print the next move
bot_move = bots[game.next_player].select_move(game)
print_move(game.next_player, bot_move)
print('')
# Apply the move
game = game.apply_move(bot_move)
## =========================================================
## =========================================================
## fin.
|
def test_convert_bytes32_to_num_overflow(assert_tx_failed, get_contract_with_gas_estimation):
code = """
@public
def test1():
y: bytes32 = 0x1000000000000000000000000000000000000000000000000000000000000000
x: int128 = convert(y, 'int128')
"""
c = get_contract_with_gas_estimation(code)
assert_tx_failed(lambda: c.test1())
def test_convert_address_to_num(assert_compile_failed, get_contract_with_gas_estimation):
code = """
@public
def test2():
x: int128 = convert(msg.sender, 'int128')
"""
assert_compile_failed(lambda: get_contract_with_gas_estimation(code), Exception)
|
"""
Change Static IP for Lightsail VPS automatically and update respective DNS settings
"""
import boto3
import json
from datetime import datetime
from time import sleep
import numpy as np
import os
import logging
### Parameters ###
log_file = r'~/logs/changeStaticIP.log'
debug_level = logging.INFO
vHostedZoneId = '/hostedzone/Z04791493QXIP3UOKM5E2'
vIpHistoryFilename = r'~/logs/static_ip_history.csv'
vIpHistoryFileColumn = 3
# New Application Parameters
vpn_servers = [
{
'instance_name': 'Ubuntu-AU1-2GB',
'static_ip_name': 'StaticIp-AU-Auto',
'DNS_names': ['sanpingshui.com'],
#'DNS_names': ['pxie.info','us.pxie.info'],
'region_name': 'ap-southeast-2'
},
]
def ensure_file_exists(filename):
""" expanduser ~ and convert to current folder if the parent folder does not exist.
filename: Full path, e.g. ~/a.py
"""
newfile = os.path.expanduser(filename)
# if the folder does not exist, convert into current folder
file_folder = os.path.dirname(newfile)
if not os.path.exists(file_folder):
newfile = os.path.basename(newfile)
return newfile
log_file = ensure_file_exists(log_file)
vIpHistoryFilename = ensure_file_exists(vIpHistoryFilename)
# logging
logging.basicConfig(filename=log_file, level=debug_level, filemode='w', format='%(asctime)s ln-%(lineno)d %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
log = logging.getLogger('')
# App root path
root_path=os.path.dirname(os.path.realpath(__file__))
# boto3.setup_default_session(region_name='us-west-2')
# lsclient = boto3.client('lightsail')
# lsclient = boto3.client('lightsail', region_name='us-west-2')
rtclient = boto3.client('route53')
def writeFile(filename, strText):
f = open(filename,'w') # nuke or create the file !
f.write(strText)
f.close()
def getStaticIp(vStaticIpName_,lsclient):
static_ip_response = ''
try:
static_ip_response = lsclient.get_static_ip(
# staticIpName = 'StaticIp-Oregon-New'
staticIpName = vStaticIpName_
)
except Exception as ex:
print('Call to get_static_ip failed with exception as below:')
print(str(ex))
#log.info (static_ip_response)
if static_ip_response != '':
if str(static_ip_response['ResponseMetadata']['HTTPStatusCode']) == '200':
log.info ( 'staticIp name: ' + static_ip_response['staticIp']['name'] )
log.info ( 'staticIp ipAddress: ' + static_ip_response['staticIp']['ipAddress'] )
log.info ( 'Attached to: ' + static_ip_response['staticIp']['attachedTo'] )
return static_ip_response['staticIp']['ipAddress']
def getStaticIps(lsclient):
static_ips_response = lsclient.get_static_ips()
print ( 'static_ips_response:\n',static_ips_response )
# Allocate a new static IP
def allocateStaticIp( vStaticIpName_, lsclient ):
allocate_static_ip_resp = ''
try:
allocate_static_ip_resp = lsclient.allocate_static_ip(
staticIpName = vStaticIpName_
)
except Exception as ex:
print('Call to allocate_static_ip failed with exception as below:')
print(str(ex))
# log.info (allocate_static_ip_resp)
if allocate_static_ip_resp != '':
if (str(allocate_static_ip_resp['ResponseMetadata']['HTTPStatusCode']) == '200' and
str(allocate_static_ip_resp['operations'][0]['status']) == 'Succeeded'):
# log.info ( 'region Name: ' + allocate_static_ip_resp['operations'][0]['location']['regionName'] )
log.info ( 'StaticIp is created: ' + allocate_static_ip_resp['operations'][0]['resourceName'] )
# Attach a new static IP
def attachStaticIp( vStaticIpName_, vInstanceName_, lsclient):
attach_static_ip_resp = ''
try:
attach_static_ip_resp = lsclient.attach_static_ip(
staticIpName = vStaticIpName_,
instanceName = vInstanceName_
)
except Exception as ex:
print('Call to attach_static_ip failed with exception as below:')
print(str(ex))
# log.info (attach_static_ip_resp)
if attach_static_ip_resp != '':
if (str(attach_static_ip_resp['ResponseMetadata']['HTTPStatusCode']) == '200' and
str(attach_static_ip_resp['operations'][0]['status']) == 'Succeeded'):
# log.info ( 'region Name: ' + allocate_static_ip_resp['operations'][0]['location']['regionName'] )
log.info ( 'StaticIp is attached to: ' + attach_static_ip_resp['operations'][0]['operationDetails'] )
# Release the old static IP
def releaseStaticIp( vStaticIpName_, lsclient):
release_static_ip_resp = ''
try:
release_static_ip_resp = lsclient.release_static_ip(
staticIpName = vStaticIpName_
)
except Exception as ex:
print('Call to release_static_ip failed with exception as below:')
print(str(ex))
#log.info (release_static_ip_resp)
if release_static_ip_resp != '':
if (str(release_static_ip_resp['ResponseMetadata']['HTTPStatusCode']) == '200' and
str(release_static_ip_resp['operations'][0]['status']) == 'Succeeded'):
log.info ( 'StaticIp is released: ' + release_static_ip_resp['operations'][0]['resourceName'] )
# Change DNS A record
def changeDNS( vHostedZoneId_, vDNS_name_, vIpAddress_):
change_resource_record_sets_resp = ''
try:
change_resource_record_sets_resp = rtclient.change_resource_record_sets(
HostedZoneId = vHostedZoneId_,
ChangeBatch={
'Comment': 'change DNS A record',
'Changes': [
{
'Action': 'UPSERT',
'ResourceRecordSet': {
'Name': vDNS_name_,
'Type': 'A',
'TTL': 300,
'ResourceRecords': [
{
'Value': vIpAddress_
}
]
}
}
]
}
)
except Exception as ex:
print('Call to change_resource_record_sets failed with exception as below:')
print(str(ex))
# log.info (change_resource_record_sets_resp)
if change_resource_record_sets_resp != '':
if (str(change_resource_record_sets_resp['ResponseMetadata']['HTTPStatusCode']) == '200' and
str(change_resource_record_sets_resp['ChangeInfo']['Status']) == 'PENDING'):
log.info ( 'DNS is being updated: ' + vDNS_name_ + ' - ' + vIpAddress_ )
# List DNS A record
def listDNS_A_record( vHostedZoneId_, vSubDomainName_):
vDomainName = vSubDomainName_ + '.'
list_resource_record_sets_resp = ''
try:
list_resource_record_sets_resp = rtclient.list_resource_record_sets(
HostedZoneId = vHostedZoneId_
)
except Exception as ex:
print('Call to list_resource_record_sets failed with exception as below:')
print(str(ex))
# log.info (list_resource_record_sets_resp)
if list_resource_record_sets_resp != '':
if str(list_resource_record_sets_resp['ResponseMetadata']['HTTPStatusCode']) == '200':
for record in list_resource_record_sets_resp['ResourceRecordSets']:
if record['Type'] == 'A' and record['Name'] == vDomainName:
log.info('Checking DNS setting: ' + record['Name'] + ' - ' + record['ResourceRecords'][0]['Value'])
return record['ResourceRecords'][0]['Value']
def isIpAddressExist(vFilename_,vTargetIp_):
# File content format
# 10.1.1.1,2018-10-01_05-00-00
if not os.path.exists(vFilename_):
log.info(vFilename_ + ' does not exits.')
return False
try:
ip_loadtxt = np.loadtxt(vFilename_,dtype=str, delimiter=',')
#except OSError as ex:
# print('Error loading file with error:\n' + str(ex))
except Exception as ex:
log.error(str(ex))
return False
else:
#log.info(ip_loadtxt.reshape(-1,2))
# read IP columne 1
for i in ip_loadtxt.reshape(-1,vIpHistoryFileColumn)[:,0]:
#log.info(i)
if i == vTargetIp_:
log.info ('Found matched ip:' + i)
return True
else: # Empty file will also fall into here.
# log.info('Don\'t find matched ip.')
return False
def writeIpHistoryFile(vFilename_,vIpAddress_,vTries_):
cur_dt = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
f = open(vFilename_,"a")
f.write( ','.join( [vIpAddress_, cur_dt, vTries_] ) + '\n')
# f.write(vIpAddress_ + ',' + cur_dt + ',' + vTries_ + '\n')
f.close()
def main():
# cur_dt = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
max_retry=3
for server in vpn_servers:
vStaticIpName = server['static_ip_name']
vInstanceName = server['instance_name']
vRegionName = server['region_name']
vDNS_names = server['DNS_names']
lsclient = boto3.client('lightsail', region_name=vRegionName )
for i in range(max_retry):
# log.info ('Time: ' + datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
log.info ('======================================')
log.info ('*****Static IP before relocation:*****')
getStaticIp(vStaticIpName,lsclient)
releaseStaticIp(vStaticIpName, lsclient)
sleep(1)
allocateStaticIp(vStaticIpName, lsclient)
attachStaticIp(vStaticIpName, vInstanceName, lsclient)
sleep(1)
log.info ('****Static IP after relocation:*****')
vStaticIP = getStaticIp(vStaticIpName, lsclient)
# Update respective DNS mapping
if (vStaticIP != None and isIpAddressExist(vIpHistoryFilename,vStaticIP) == False):
log.info('Static IP is re-allocated successfully.')
writeIpHistoryFile(vIpHistoryFilename, vStaticIP, str(i+1))
for dns in vDNS_names:
changeDNS( vHostedZoneId, dns, vStaticIP)
sleep(1)
# check
for dns in vDNS_names:
listDNS_A_record( vHostedZoneId, dns)
break
# wait for some time for next loop
sleep(1)
else:
log.error('Failed to get a new static IP in ' + str(max_retry) + ' attempts.' )
if __name__ == '__main__':
main()
|
import zipfile
import os
import glob
import json
from openstudio_server import OpenStudioServerAPI
def mos_exists(path):
mos = glob.glob(f'{path}/datapoint/*/modelica.mos')
if mos and len(mos) == 1:
return mos[0]
# just return the mos
elif mos and len(mos) > 1:
raise Exception(f"There are more than one mos file in {path}")
else:
# no mos file found
return None
def process_mos(id, mospath):
print(f'[{id}] Processing MOS: {mospath}')
# read in the results.json to parse the building characteristics
r_json_name = os.path.join(os.path.dirname(mospath), '../results.json')
new_mospath = os.path.join(os.path.dirname(mospath), '../../../../../loads/')
if not os.path.exists(new_mospath):
os.makedirs(new_mospath)
if os.path.exists(r_json_name):
r_json = json.loads(open(r_json_name, 'rt').read())
building_type = r_json['create_doe_prototype_building']['building_type']
climate_zone = r_json['create_doe_prototype_building']['climate_zone'].split('-')[2]
vintage = r_json['create_doe_prototype_building']['template'].replace(' ', '_')
with open(mospath, 'rt') as f:
data = f.read()
data = data.replace("{{BUILDINGTYPE}}", building_type)
data = data.replace("{{CLIMATEZONE}}", climate_zone)
data = data.replace("{{VINTAGE}}", vintage)
data = data.replace("{{SIMID}}", id)
new_file = os.path.join(new_mospath, f'{building_type}-{vintage}-{climate_zone}.mos')
with open(new_file, 'wt') as f2:
f2.write(data)
else:
raise Exception(f'Could not find results.json in dir: {os.path.dirname(mospath)}')
return None
def process_directory(id, path):
if mos_exists(path):
process_mos(id, mos_exists(path))
else:
raise Exception(f"Could not find a valid MOS file in {path}")
# main block
# get the analysis id out of the pat.json (make sure that you "saved" the pat project after you started simulations)
pat_json = json.loads(open(os.path.join('pat-project', 'pat.json'), 'rt').read())
analysis_id = pat_json['analysisID']
remote_url = pat_json['remoteSettings']['remoteServerURL']
print(f"Remove server is {remote_url}")
protocol, host, port = remote_url.split(':')
host = f"{protocol}:{host}"
port.replace('/', '')
oss = OpenStudioServerAPI(host, port)
if not oss.alive():
raise Exception("The OpenStudio Server host is not up, please start to download datapoints")
# use the pat_json to look at all the datapoints
sims = oss.get_analysis_results(analysis_id)
for sim in sims['data']:
if sim['status'] == 'completed' and sim['status_message'] == 'completed normal':
print(f"[{sim['_id']}] Simulation complete with name {sim['name']}")
# check if the data_point has been downloaded
zipfilename = os.path.join('pat-project', 'localResults', sim['_id'], 'data_point.zip')
if os.path.exists(zipfilename):
print(f"[{sim['_id']}] Datapoint already downloaded")
else:
print(f"[{sim['_id']}] Downloading data_point.zip into {os.path.dirname(zipfilename)}")
# verify that the dir exists, PAT truncates after 160 datapoints, so need to manually add
if not os.path.exists(os.path.dirname(zipfilename)):
os.makedirs(os.path.dirname(zipfilename))
oss.download_datapoint_report(sim['_id'], 'data_point.zip', f'./{os.path.dirname(zipfilename)}')
if not os.path.exists(zipfilename):
raise Exception(f"{sim['_id']} Could not find download data_point zipfile, download may have failed")
# check if the zip has been extracted by looking for the datapoint directory
unzip_path = os.path.join(os.path.dirname(zipfilename), 'datapoint')
if not os.path.exists(unzip_path):
try:
with zipfile.ZipFile(zipfilename, 'r') as zip_ref:
zip_ref.extractall(unzip_path)
except zipfile.BadZipFile:
raise Exception(f"Could not process ZipFile: {unzip_path}")
# now process the directory to cleanup the mos file
process_directory(sim['_id'], os.path.dirname(zipfilename))
|
from alarm import Alarm
from utilities import Days
import unittest
from unittest.mock import patch
class AlarmComposite(object):
def __init__(self):
self.alarms = []
def add_alarm(self, alarm):
self.alarms.append(alarm)
def add_alarms(self, *args):
for alarm in args:
self.alarms.append(alarm)
def remove_alarm(self, alarm):
self.alarms.remove(alarm)
@property
def active(self):
return any(alarm.active for alarm in self.alarms)
@property
def target_days(self):
result = Days(0)
for alarm in self.alarms:
result |= alarm.target_days
return result
@property
def target_hour(self):
return self._find_nearest_alarm().target_hour
@property
def target_minute(self):
return self._find_nearest_alarm().target_minute
@property
def next_day(self):
return self._find_nearest_alarm().next_day
def get_desired_brightness(self):
# return greatest brightness
if len(self.alarms) == 0:
return 0
result = self.alarms[0].get_desired_brightness()
for alarm in self.alarms:
temp = alarm.get_desired_brightness()
if temp > result:
result = temp
return result
@property
def value(self):
# return greatest value
if len(self.alarms) == 0:
return 0
result = self.alarms[0].value
for alarm in self.alarms:
temp = alarm.value
if temp > result:
result = temp
return result
def _find_nearest_alarm(self):
if len(self.alarms) == 0:
raise Exception("Must have at least one alarm")
nearest = self.alarms[0]
for alarm in self.alarms:
if alarm.target_days != Days(0) and alarm.target_datetime < nearest.target_datetime:
nearest = alarm
return nearest
class _TestTargetDaysReturnsOrOfAllDays(unittest.TestCase):
def test_empty_composite_yieldsNone(self):
composite = AlarmComposite()
self.assertEqual(Days(0), composite.target_days)
def test_single_yields_that(self):
composite = AlarmComposite()
alarm = Alarm()
composite.add_alarm(alarm)
self.assertEqual(Days(0), composite.target_days)
alarm.add_target_day(Days.MONDAY)
self.assertEqual(Days.MONDAY, composite.target_days)
alarm.remove_target_day(Days.MONDAY)
self.assertEqual(Days(0), composite.target_days)
alarm.add_target_day(Days.ALL)
self.assertEqual(Days.ALL, composite.target_days)
alarm.remove_target_day(Days.ALL)
alarm.add_target_day(Days.WEDNESDAY)
self.assertEqual(Days.WEDNESDAY, composite.target_days)
def test_double_yields_or(self):
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.MONDAY | Days.TUESDAY)
self.assertEqual(Days.MONDAY | Days.TUESDAY, composite.target_days)
alarm2.add_target_day(Days.MONDAY | Days.FRIDAY)
self.assertEqual(Days.MONDAY | Days.TUESDAY | Days.FRIDAY, composite.target_days)
class _TestTargetHourReturnsNearestHour(unittest.TestCase):
def test_empty_composite_throws(self):
composite = AlarmComposite()
with self.assertRaises(Exception) as context:
composite.target_hour
self.assertTrue("Must have at least one alarm" in str(context.exception))
def test_single_yields_that(self):
composite = AlarmComposite()
alarm = Alarm()
composite.add_alarm(alarm)
self.assertEqual(0, composite.target_hour)
alarm.target_hour = 5
self.assertEqual(5, composite.target_hour)
def test_double_yields_nearest(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.ALL)
alarm1.target_hour = 7
alarm2.add_target_day(Days.ALL)
alarm2.target_hour = 8
self.assertEqual(7, composite.target_hour)
def test_double_yields_nearest_two(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.MONDAY)
alarm1.target_hour = 7
alarm2.add_target_day(Days.SUNDAY)
alarm2.target_hour = 8
self.assertEqual(8, composite.target_hour)
class _TestTargetMinuteReturnsNearestHour(unittest.TestCase):
def test_empty_composite_throws(self):
composite = AlarmComposite()
with self.assertRaises(Exception) as context:
composite.target_minute
self.assertTrue("Must have at least one alarm" in str(context.exception))
def test_single_yields_that(self):
composite = AlarmComposite()
alarm = Alarm()
composite.add_alarm(alarm)
self.assertEqual(0, composite.target_minute)
alarm.target_minute = 30
self.assertEqual(30, composite.target_minute)
def test_double_yields_nearest(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.ALL)
alarm1.target_minute = 15
alarm2.add_target_day(Days.ALL)
alarm2.target_minute = 30
self.assertEqual(15, composite.target_minute)
def test_double_yields_nearest_two(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.MONDAY)
alarm1.target_hour = 6
alarm1.target_minute = 0
alarm2.add_target_day(Days.SUNDAY)
alarm2.target_hour = 8
alarm2.target_minute = 30
self.assertEqual(30, composite.target_minute)
class _TestNexDayReturnsNearestNextDay(unittest.TestCase):
def test_empty_composite_throws(self):
composite = AlarmComposite()
with self.assertRaises(Exception) as context:
composite.target_minute
self.assertTrue("Must have at least one alarm" in str(context.exception))
def test_single_yields_that(self):
composite = AlarmComposite()
alarm = Alarm()
composite.add_alarm(alarm)
alarm.add_target_day(Days.MONDAY)
self.assertEqual(Days.MONDAY, composite.next_day)
def test_double_yields_nearest(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.FRIDAY)
alarm2.add_target_day(Days.SATURDAY)
self.assertEqual(Days.FRIDAY, composite.next_day)
def test_double_yields_nearest_two(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.MONDAY)
alarm1.target_hour = 5
alarm1.target_minute = 0
alarm2.add_target_day(Days.SUNDAY)
alarm2.target_hour = 5
alarm2.target_minute = 0
self.assertEqual(Days.MONDAY, composite.next_day)
class _TestDesiredBrightnessYieldsGreatest(unittest.TestCase):
def test_empty_composite_throws(self):
composite = AlarmComposite()
self.assertEqual(0, composite.get_desired_brightness())
def test_single_yields_that(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm = Alarm()
composite.add_alarm(alarm)
alarm.add_target_day(Days.SUNDAY)
alarm.target_hour = 6
alarm.target_minute = 5
self.assertEqual(alarm.get_desired_brightness(), composite.get_desired_brightness())
def test_double_yields_nearest(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.FRIDAY)
alarm1.target_hour = 6
alarm1.target_minute = 0
alarm2.add_target_day(Days.SUNDAY)
alarm2.target_hour = 6
alarm2.target_minute = 20
self.assertEqual(alarm2.get_desired_brightness(), composite.get_desired_brightness())
def test_double_yields_nearest_two(self):
from datetime import datetime
with patch("utilities.TestableDateTime") as mock_date:
mock_date.now.return_value = datetime(2006, 1, 1, 6, 0) # 6:00, Sunday, Jan 1st, 2006
composite = AlarmComposite()
alarm1 = Alarm()
alarm2 = Alarm()
composite.add_alarms(alarm1, alarm2)
alarm1.add_target_day(Days.SUNDAY)
alarm1.target_hour = 6
alarm1.target_minute = 15
alarm2.add_target_day(Days.SUNDAY)
alarm2.target_hour = 6
alarm2.target_minute = 30
self.assertEqual(alarm1.get_desired_brightness(), composite.get_desired_brightness())
if __name__ == "__main__":
unittest.main()
|
# static library
src_list= [
'function.cpp',
]
task_list= []
env= tool.createTargetEnvironment()
env.setConfig( 'Debug' )
env.refresh()
task_list.append( tool.addLibTask( env, 'test', src_list ) )
env= tool.createTargetEnvironment()
env.setConfig( 'Release' )
env.refresh()
task_list.append( tool.addLibTask( env, 'test', src_list ) )
tool.addGroupTask( genv, 'build', task_list )
tool.addCleanTask( genv, 'clean' )
|
import mrjob
from mrjob.job import MRJob
from utils import serialization, logging
import constants.internal as const
class EngineJob(MRJob):
'''
Base class for all M/R Jobs of this ML engine.
'''
def init(self):
try:
if self.is_initialized:
return
except AttributeError:
# assuming configuration is a dictionary
conf = serialization.load_object('configuration.pkl')
# create attribute for each object in the loaded configuration
# data_handler always has to be present in configuration
for key, value in conf.iteritems():
setattr(self, key, value)
self.pre_processor = self.data_handler.get_pre_processor()
self.data_processor = self.data_handler.get_data_processor()
self.data_conf = self.data_handler.get_configuration()
# select job configuration according to phase
phase = self.data_handler.get_phase()
if phase == const.PHASE_PRE_PROC:
self.job_conf = self.data_conf.get_pre_proc_conf()
elif phase == const.PHASE_TRAINING:
self.job_conf = self.data_conf.get_training_conf()
elif phase == const.PHASE_VALIDATION:
self.job_conf = self.data_conf.get_validation_conf()
else:
logging.warn('No job conf for current phase: ' + phase)
self.job_conf = None
self.is_initialized = True
def get_data_handler(self):
return getattr(self, const.DATA_HANDLER)
def get_pre_processor(self):
return self.get_data_handler().get_pre_processor()
def get_data_processor(self):
return self.get_data_handler().get_data_processor()
def get_statistics(self):
return self.get_data_handler().get_statistics()
def get_alg_factory(self):
return getattr(self, const.ALG_FACTORY)
def get_validator(self):
return getattr(self, const.VALIDATOR)
def get_trained_alg(self):
return getattr(self, const.TRAINED_ALG)
def steps(self):
return [
self.mr( mapper_init = self.init,
mapper = self.mapper,
reducer_init = self.init,
reducer = self.reducer )]
def input_protocol(self):
try:
input_protocol = self.job_conf.get_input_protocol()
except AttributeError:
self.init()
input_protocol = self.job_conf.get_input_protocol()
return input_protocol()
def internal_protocol(self):
try:
internal_protocol = self.job_conf.get_internal_protocol()
except AttributeError:
self.init()
internal_protocol = self.job_conf.get_internal_protocol()
return internal_protocol()
def output_protocol(self):
try:
output_protocol = self.job_conf.get_output_protocol()
except AttributeError:
self.init()
output_protocol = self.job_conf.get_output_protocol()
return output_protocol()
def hadoop_input_format(self):
try:
input_format = self.job_conf.get_hadoop_input_format()
except AttributeError:
self.init()
input_format = self.job_conf.get_hadoop_input_format()
return input_format
def jobconf(self):
try:
custom_jobconf = self.job_conf.get_job_conf()
except AttributeError:
self.init()
custom_jobconf = self.job_conf.get_job_conf()
orig_jobconf = super(EngineJob, self).jobconf()
return mrjob.conf.combine_dicts(orig_jobconf, custom_jobconf)
|
class Solution:
def generate(self, numRows: int) -> List[List[int]]:
if (numRows == 1):
return [[1]]
Array = []
for i in range(numRows):
Array.append([1 for j in range(i+1)])
if (numRows > 2):
for i in range(2,numRows):
for j in range(1,len(Array[i])-1):
Array[i][j] = Array[i-1][j-1]+Array[i-1][j]
return Array
|
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from lib.BootpayApi import BootpayApi
bootpay = BootpayApi('59bfc738e13f337dbd6ca48a', 'pDc0NwlkEX3aSaHTp/PPL/i8vn5E/CqRChgyEp/gHD0=')
result = bootpay.get_access_token()
if result['status'] is 200:
print(bootpay.certificate('1234'))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
import bcrypt
import re
class UserManager(models.Manager):
def basic_validator(self, postData):
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
errors = {}
if len(postData['name']) < 3:
errors['name'] = "Name is required, must be at least 3 characters!"
if len(postData['alias']) < 3:
errors['alias'] = "Alias is required, must be at least 3 characters!"
elif len(User.objects.filter(alias=postData["alias"])):
errors['alias'] = "Alias already in use, try again"
if not EMAIL_REGEX.match(postData['email']):
errors['email'] = "Must be a valid email!"
elif len(User.objects.filter(email=postData["email"])):
errors['email'] = "Email already in use, try again"
if len(postData['password']) < 8:
errors['password'] = "Password must be at least 8 characters!"
if postData['password'] != postData['confirm']:
errors['confirm'] = "Passwords must match!"
if len(postData['dob']) == 0:
errors['dob'] = "Please enter a Date of Birth!"
return errors
class User(models.Model):
name = models.CharField(max_length=255)
alias = models.CharField(max_length=255)
email = models.EmailField()
password = models.CharField(max_length=255)
dob = models.DateField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now = True)
objects = UserManager()
def __str__(self):
return self.name
class Poke(models.Model):
poker_user = models.ForeignKey(User, related_name="poker")
poked_user = models.ForeignKey(User, related_name="poked")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now = True)
objects = models.Manager()
|
from flask import Flask, render_template
from flask_sqlalchemy import SQLAlchemy
from flask_security import Security, SQLAlchemyUserDatastore, \
UserMixin, RoleMixin, login_required, roles_accepted
from film_search import config
# Create app
app = Flask(__name__)
app.config.from_object(config.Config)
# Create database connection object
db = SQLAlchemy(app)
# Define models
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
#SECURITY_TRACKABLE
last_login_at = db.Column(db.DateTime())
current_login_at = db.Column(db.DateTime())
last_login_ip = db.Column(db.String(15))
current_login_ip = db.Column(db.String(15))
login_count = db.Column(db.Integer())
# Setup Flask-Security
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(app, user_datastore)
# Views
@app.route('/')
@login_required
def home():
return render_template('index.html')
@app.route('/admin')
@roles_accepted('admin')
def admin():
users = User.query.order_by('last_login_at')
return render_template('admin.html',users=users)
if __name__ == '__main__':
app.run('0.0.0.0', port="5000")
|
from typing import Any, Dict, Union, Optional
import aiohttp
from .errors import PokemonNotFound, ConnectionError
__all__ = ("HTTPClient",)
BASE_URL = "https://pokeapi.co/api/v2"
class HTTPClient:
"""
Class which deals with all the Requests made to the API.
"""
def __init__(self) -> None:
self.session = aiohttp.ClientSession()
async def fetch_pokemon_data(
self, pokemon: Union[int, str] = None
) -> Optional[Dict[str, Any]]:
"""
Getting a JSON response from API for the Pokémon ID provided
"""
req_url = f"{BASE_URL}/pokemon/{pokemon}"
try:
response = await self.session.get(req_url)
except aiohttp.ClientConnectionError:
raise ConnectionError()
if response.status == 404:
raise PokemonNotFound(pokemon)
data = await response.json()
return data
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.