blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5217d549706a2f597d04b402bbc9bb8c20a0c4b2 | 6567e62b10027b4219692d9f8a1ba9c9c9871cfd | /model/model_dynamodb.py | 1725e0d1101e459b600e1e2cb1b9feb471885052 | [] | no_license | vlames/memoverse | 1356031b5cddf0d73f87aa0abdb161f88d631525 | 2d365bdce2992a3fea38eebc19958c204dda80b7 | refs/heads/master | 2022-11-22T17:42:59.103971 | 2020-07-16T22:58:19 | 2020-07-16T22:58:19 | 279,144,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,217 | py | # File: model/model_dynamodb.py
# Description: the file provides the dynamodb backend storage for memoverse
"""
The memoverse data is stored in the dynamodb database and has the following look:
+-----------+-------------------+-------+--------------+
| Reference | Theme | Verse | Fums |
+===========+===================+=======+==============+
| EX.1.22 | State of the Dead | <p... | <script>... |
+-----------+-------------------+-------+--------------+
This can be created with the following command (see implementation of this file):
resource = boto3.resource("dynamodb", region_name="us-east-1")
resource.create_table(TableName, KeySchema, AttributeDefinitions, ProvisionedThroughput)
"""
from datetime import date
from .model_abstract import Model
import boto3
class model(Model):
def __init__(self):
self.resource = boto3.resource("dynamodb", region_name="us-east-1")
self.table = self.resource.Table("memoverse")
# Makes sure the database exists
try:
self.table.load()
# Creates a table if it does not exist
except:
self.resource.create_table(
TableName="memoverse",
KeySchema=[
{
"AttributeName": "reference",
"KeyType": "HASH"
},
{
"AttributeName": "theme",
"KeyType": "RANGE"
}
],
AttributeDefinitions=[
{
"AttributeName": "reference",
"AttributeType": "S"
},
{
"AttributeName": "theme",
"AttributeType": "S"
}
],
ProvisionedThroughput={
"ReadCapacityUnits": 1,
"WriteCapacityUnits": 1
}
)
# Gets verses data from the database
def select(self):
"""
Gets all rows from the database
Each row contains: reference, theme, verse, and fums
:return: list of lists containing all rows of database
:raises: error if the table scan failed
"""
try:
entries = self.table.scan()
except Exception as e:
return([["Failed to get reference", "Failed to get theme", "Failed to get verse", "Failed to get fums"]])
return ([[entry["reference"], entry["theme"], entry["verse"], entry["fums"]] for entry in entries["Items"]])
# Inserts verses data into the database
def insert(self, reference, theme, verse, fums):
"""
Inserts entry into database
:param reference: String
:param theme: String
:param verse: String
:param fums: String
:return: True or False
"""
aVerse = {
"reference" : reference,
"theme" : theme,
"verse" : verse,
"fums" : fums,
}
try:
self.table.put_item(Item=aVerse)
except:
return False
return True | [
"vlames@pdx.edu"
] | vlames@pdx.edu |
45a0abb7cb04b7724c98f4ae1b983fc5ab8ca6ff | bee0e3bd66de44a8fdf5ef111eeee11c78268d86 | /server/edd/notify/tests.py | 0d19754431fdac324ca2b2ce54e53c208cc9a7b0 | [
"BSD-3-Clause",
"BSD-3-Clause-LBNL"
] | permissive | zhwycsz/edd | 7516d56a7263c90c625b6b5c2c58577a36d6ace7 | bdc1d2f8b5e375d3a1254829b9d2b460dd09ca12 | refs/heads/master | 2020-07-10T22:21:52.152045 | 2019-08-15T04:27:53 | 2019-08-15T04:27:53 | 204,384,337 | 1 | 0 | NOASSERTION | 2019-08-26T02:58:01 | 2019-08-26T02:58:00 | null | UTF-8 | Python | false | false | 13,569 | py | # coding: utf-8
import time
from uuid import uuid4
import pytest
from channels.testing import WebsocketCommunicator
from edd import TestCase, asgi
from main.tests import factory
from . import backend
class NotificationTests(TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.user = factory.UserFactory()
def test_notification_equality(self):
# simple message
n1 = backend.Notification("Testing Notification")
# preped for storage should be same
n2 = n1.prepare()
# another message
n3 = backend.Notification("Testing Notification")
self.assertEqual(len({n1, n2}), 1)
self.assertEqual(len({n1, n2, n3}), 2)
self.assertEqual(n1, n2)
self.assertNotEqual(n1, n3)
def test_basebroker(self):
broker = backend.BaseBroker(self.user)
# all these methods rely on undefined operations
with self.assertRaises(NotImplementedError):
broker.count()
with self.assertRaises(NotImplementedError):
iter(broker)
with self.assertRaises(NotImplementedError):
broker.mark_all_read()
with self.assertRaises(NotImplementedError):
broker.mark_read(None)
with self.assertRaises(NotImplementedError):
broker.notify("Dummy message")
# these should work
groups = broker.group_names()
self.assertEqual(len(groups), 1)
self.assertIn(self.user.username, groups[0])
def test_redisbroker(self):
broker = backend.RedisBroker(self.user)
# initially empty
self.assertEqual(broker.count(), 0)
# count updates after adding message
broker.notify("Dummy message")
self.assertEqual(broker.count(), 1)
# can iterate over messages
ids = {n.uuid for n in broker}
self.assertEqual(len(ids), 1)
# can remove specific messages
marker_uuid = uuid4()
broker.notify("Dummy message 2")
time.sleep(1)
broker.notify("Dummy message 3", uuid=marker_uuid)
time.sleep(1)
broker.notify("Dummy message 4")
self.assertEqual(broker.count(), 4)
for uuid in ids:
broker.mark_read(uuid)
self.assertEqual(broker.count(), 3)
# can remove older messages
broker.mark_all_read(uuid=marker_uuid)
self.assertEqual(broker.count(), 1)
# can remove all messages
broker.mark_all_read()
self.assertEqual(broker.count(), 0)
@pytest.mark.asyncio
async def test_notification_subscribe_no_user():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
connected, subprotocol = await communicator.connect()
# websocket should initially accept the connection
assert not connected
# then as there is no user, and connection must be accepted to verify, disconnect
assert await communicator.receive_nothing() is True
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_subscribe_empty():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["messages"] == []
assert response["unread"] == 0
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_subscribe_with_messages():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# joe is going to help us send some messages to the fake user
joe = backend.RedisBroker(user)
await joe.async_notify("Hello, world!")
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert len(response["messages"][0]) == 5
assert response["messages"][0][0] == "Hello, world!"
assert response["unread"] == 1
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_dismiss():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# joe is going to help us send some messages to the fake user
joe = backend.RedisBroker(user)
marker_uuid = uuid4()
await joe.async_notify("Hello, world!", uuid=marker_uuid)
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["unread"] == 1
# joe is now going to dismiss the message sent earlier
await joe.async_mark_read(marker_uuid)
response = await communicator.receive_json_from()
assert "dismiss" in response
assert "unread" in response
assert response["dismiss"] == str(marker_uuid)
assert response["unread"] == 0
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_dismiss_all():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# joe is going to help us send some messages to the fake user
joe = backend.RedisBroker(user)
await joe.async_notify("Hello, world!")
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["unread"] == 1
# joe is now going to dismiss the message sent earlier
await joe.async_mark_all_read()
response = await communicator.receive_json_from()
assert "dismiss" in response
assert "unread" in response
assert response["unread"] == 0
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_incoming():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# joe is going to help us send some messages to the fake user
joe = backend.RedisBroker(user)
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# nothing in inbox to start
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["unread"] == 0
# joe is now going to send a message
await joe.async_notify("Hello, world!")
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["messages"][0][0] == "Hello, world!"
assert response["unread"] == 1
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_send_dismiss():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# joe is going to help us send some messages to the fake user
joe = backend.RedisBroker(user)
marker_uuid = uuid4()
await joe.async_notify("Hello, world!", uuid=marker_uuid)
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["unread"] == 1
# now dismiss the message via our own channel
await communicator.send_json_to({"dismiss": str(marker_uuid)})
response = await communicator.receive_json_from()
assert "dismiss" in response
assert "unread" in response
assert response["dismiss"] == str(marker_uuid)
assert response["unread"] == 0
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_send_dismiss_older():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# joe is going to help us send some messages to the fake user
joe = backend.RedisBroker(user)
# manually create a bunch of Notification objects so we can control the time
messages = [
backend.Notification(f"{i}", None, None, i, uuid4()) for i in range(10)
]
for m in messages:
joe._store(m)
# arbitrarily pick out the seventh as the one to submit for dismissal
marker_uuid = messages[7].uuid
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["unread"] == 10
# now dismiss the message via our own channel
await communicator.send_json_to({"dismiss_older": str(marker_uuid)})
response = await communicator.receive_json_from()
assert "dismiss" in response
assert "unread" in response
assert response["dismiss"] == str(marker_uuid)
assert response["unread"] == 2
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_send_reset():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["messages"] == []
assert response["unread"] == 0
# now reset via our own channel
await communicator.send_json_to({"reset": True})
response = await communicator.receive_json_from()
assert "reset" in response
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
@pytest.mark.asyncio
async def test_notification_send_fetch():
communicator = WebsocketCommunicator(asgi.application, "/ws/notify/")
try:
# force login with fake user
user = factory.UserFactory.build()
communicator.scope["user"] = user
# websocket will allow connection
connected, subprotocol = await communicator.connect()
assert connected
# initial message will have messages and unread count
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["messages"] == []
assert response["unread"] == 0
# now reset via our own channel
await communicator.send_json_to({"fetch": True})
response = await communicator.receive_json_from()
assert "messages" in response
assert "unread" in response
assert response["messages"] == []
assert response["unread"] == 0
except Exception as e:
raise AssertionError() from e
finally:
await communicator.disconnect()
| [
"WCMorrell@lbl.gov"
] | WCMorrell@lbl.gov |
7dff4b226731cd79302d718ef9315d7ab9f5ed9f | 90b6c2643e41e374d86048da524d69699810907b | /aula6/criador_de_nota_fiscal.py | d7b7c4ef2db7145b4fb8e0bef061a8534d5dfbe4 | [] | no_license | silviosnjr/Python-Design-Patterns-parte1 | 24d4a8b5b8eff3c14788b2e0c272180825157fed | c831736f5e9c20bd68ee5a9578a2f71bdd107aa4 | refs/heads/master | 2023-08-16T08:33:17.625107 | 2021-10-16T17:56:52 | 2021-10-16T17:56:52 | 417,688,036 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,537 | py | # -*- coding: UTF-8 -*-
from nota_fiscal import Nota_fiscal
from datetime import date
class Criador_de_nota_fiscal(object):
def __init__(self):
self.__razao_social = None
self.__cnpj = None
self.__data_de_emissao = None
self.__itens = None
self.__detalhes = None
def com_razao_social(self, razao_social):
self.__razao_social = razao_social
return self
def com_cnpj(self, cnpj):
self.__cnpj = cnpj
return self
def com_data_de_emissao(self, data_de_emissao):
self.__data_de_emissao = data_de_emissao
return self
def com_itens(self, itens):
self.__itens = itens
return self
def com_detalhes(self, detalhes):
self.__detalhes = detalhes
return self
def constroi(self):
if self.__razao_social is None :
raise Exception("Razão Social deve ser preenchido")
if self.__cnpj is None:
raise Exception("CNPJ deve ser preenchido")
if self.__itens is None :
raise Exception("Itens devem ser preenchidos")
if self.__data_de_emissao is None:
self.__data_de_emissao = date.today()
if self.__detalhes is None:
self.__detalhes = ""
return Nota_fiscal(razao_social=self.__razao_social,
cnpj=self.__cnpj,
data_de_emissao=self.__data_de_emissao,
itens=self.__itens,
detalhes=self.__detalhes) | [
"silviosnjr@gmail.com"
] | silviosnjr@gmail.com |
3f59c6edd6e5a5576e24f61b7997b031a064e4d7 | a62c3f0f641c930d74aa4a43e14b0f1e8de71b5f | /pages/views.py | d3ee28ea642f9016e0fb679d2d6d97a165b998b5 | [
"MIT"
] | permissive | ticotheps/scenic-realty-app | b2b02f509cff51d40d88c07fe5afff7c65c73c0c | c91caaee019d4790d444d02067a1a8e83ed554ba | refs/heads/develop | 2020-12-02T09:37:58.467839 | 2020-02-10T18:15:58 | 2020-02-10T18:15:58 | 230,966,666 | 0 | 0 | MIT | 2020-02-10T18:15:59 | 2019-12-30T19:10:19 | CSS | UTF-8 | Python | false | false | 204 | py | from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return render(request, 'pages/index.html')
def about(request):
return render(request, 'pages/about.html') | [
"ticotheps@gmail.com"
] | ticotheps@gmail.com |
4d7b1226c2be4554257e8f7c5caf6d4ec0fc71b2 | 1085f7ea495be42baf5b4384e47419d867a22b20 | /event/tests.py | 7b6b1fb1a6d263d840df6f396fffe2f9cc595f7c | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | lorne-luo/venom | 0c1061fdcfa79176be8ae79cc9fb239f59a82254 | bf6379d9a40dd81f1e01cc230a4eae93452c0f18 | refs/heads/master | 2022-12-28T17:13:08.607339 | 2020-10-12T00:21:17 | 2020-10-12T00:21:17 | 301,604,020 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,697 | py | import unittest
import json
from broker.oanda.common.constants import OrderType
from event.event import SignalEvent, SignalAction, Event
from mt4.constants import OrderSide, PERIOD_M5, pip, calculate_price
from strategy.hlhb_trend import HLHBTrendStrategy
class EventTest(unittest.TestCase):
def test_event(self):
open = SignalEvent(SignalAction.OPEN,
HLHBTrendStrategy.name, HLHBTrendStrategy.version,
HLHBTrendStrategy.magic_number,
instrument='EURUSD',
side=OrderSide.BUY,
order_type=OrderType.MARKET,
stop_loss=30,
take_profit=50,
trailing_stop=None,
percent=None)
close = SignalEvent(SignalAction.CLOSE,
HLHBTrendStrategy.name, HLHBTrendStrategy.version,
HLHBTrendStrategy.magic_number,
instrument='EURUSD',
side=OrderSide.BUY,
percent=0.5)
data = json.dumps(open.to_dict())
data2 = json.loads(data)
open2 = Event.from_dict(data2)
self.assertEqual(open.type, open2.type)
for k in open.__dict__.keys():
self.assertEqual(open.__dict__[k], open2.__dict__[k])
data = json.dumps(close.to_dict())
data2 = json.loads(data)
close2 = Event.from_dict(data2)
self.assertEqual(close.type, close2.type)
for k in close.__dict__.keys():
self.assertEqual(close.__dict__[k], close2.__dict__[k])
| [
"lorneluo@leightonobrien.com"
] | lorneluo@leightonobrien.com |
72892bdf8fdf2f6871faa6409f8b2e0b4cb27a32 | 433f7b48e1228f700a4581a9c212208e0214ee91 | /django_cms_example2/wsgi.py | a6953a86dcdd243e3bf42672b2d5dbe13ca59973 | [] | no_license | sysneld1/telecom1 | 13e69972ac016fd3f1cc59a5a4dfb250f4a3a2af | f210edd73098610ec7029b0991e208a11a7e3fb7 | refs/heads/master | 2022-12-17T02:52:56.947608 | 2020-09-15T12:28:45 | 2020-09-15T12:28:45 | 295,719,322 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | """
WSGI config for django_cms_example2 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_cms_example2.settings')
application = get_wsgi_application()
| [
"sysneld1@gmail.com"
] | sysneld1@gmail.com |
bce3a2f07e931ae8a96ce2b494edbef64616f5ea | b21a7616ec39e53c4c5d596c32fc2e6c6f3d5273 | /Reverse/HW-08/10/anrgy.py | ac996f3b9fdbd2b53ba7c92a306be07f0cef4b6f | [] | no_license | vladrus13/ITMO | d34fbd5feee0626c0fe5722b79dd928ee2a3f36a | c4ff564ea5f73e02354c0ae9248fee75df928b4e | refs/heads/master | 2022-02-23T03:13:36.794460 | 2022-02-10T22:24:16 | 2022-02-10T22:24:16 | 177,217,313 | 17 | 9 | null | 2020-08-07T15:06:37 | 2019-03-22T22:33:18 | Java | UTF-8 | Python | false | false | 593 | py | def tracer():
ROOT = '#ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
ENTER_FOR_ADDR = '0x80484b1\n'
IF_ADDR = '0x80484ba\n'
file = open('trace.log', 'r')
char = 0
s = file.readline()
while file.readable():
while s != ENTER_FOR_ADDR:
s = file.readline()
char = 0
s = file.readline()
while s != ENTER_FOR_ADDR:
if s == IF_ADDR:
char += 1
s = file.readline()
#print(s)
print(ROOT[char], end="")
if __name__ == '__main__':
print(tracer())
| [
"vladrus13rus@yandex.ru"
] | vladrus13rus@yandex.ru |
5858339fb5fa9dbe8b8188ff43641fdd371396b9 | 1ee10e1d42b59a95a64d860f0477a69b016d1781 | /Lecture_03/Lecture Code/10_Matcher_3_Lexical_Attibutes.py | 00f1d77a02bad808777d7d520f42ccb07444ce0b | [] | no_license | KushalIsmael/NLP | 5564070a573d251d7222dda85b8025ae1f9c3c6f | d4ce567a009e149b0cb1781d3a341d25aa438916 | refs/heads/master | 2023-08-18T14:07:48.646386 | 2021-10-28T19:09:25 | 2021-10-28T19:09:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | import spacy
from spacy.matcher import Matcher
nlp = spacy.load("en_core_web_sm")
matcher = Matcher(nlp.vocab)
pattern = [{"IS_DIGIT": True}, {"LOWER": "fifa"}, {"LOWER": "world"},
{"LOWER": "cup"}, {"IS_PUNCT": True}]
matcher.add("FIFA", [pattern])
doc = nlp("2018 FIFA World Cup: France won!")
matches = matcher(doc)
for match_id, start, end in matches:
matched_span = doc[start:end]
print(matched_span.text) | [
"amir.h.jafari@okstate.edu"
] | amir.h.jafari@okstate.edu |
5521b133c4c362f2cc6d3ccdcc2029c849e5935b | d28ae1748a2be7aa60926d82f60723392f3bd0b4 | /piece.py | 7046be7cea0bc62e123dd94afef9afaafd5cf915 | [] | no_license | FilipKrzemien/Python-Checkers | 05855845a0ede8fd95b330211d5e8a6875441870 | f7283d1d33a5eff65176dc0d679aa831f3d4e706 | refs/heads/master | 2020-03-21T08:36:31.159249 | 2019-04-14T13:44:31 | 2019-04-14T13:44:31 | 138,354,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 724 | py | from square import Square
class Piece(Square):
def __init__(self, position, player):
Square.__init__(self, position)
self.player = player
def move_available(self, other_position, black_turn):
pass
def update_position(self, new_position):
Square.update_position(self, new_position)
def capture_available(self, other_position, black_turn, l, b):
pass
def piece_pressed(self, black_turn):
if black_turn:
if self.player == 'C':
return True
else:
return False
elif not black_turn:
if self.player == 'B':
return True
else:
return False
| [
"filip.krzem@gmail.com"
] | filip.krzem@gmail.com |
0e85466f60bd9a21dc3fc450b1a424e22248fff6 | 9a0a4439610489f391ed99f565de358779048a3f | /Assign3/ib_kpca.py | 0a1713af17bae9b55adb1f5643412b900157c95e | [] | no_license | asyrofist/SMAI-A3-SVM-Kernel-PCA-LDA | 6a5fecba0bcbe828ec8e3df895ac5d7aac8e4e55 | 5675a3ef452840298ff94ccaa631278fbdf8a691 | refs/heads/master | 2021-04-23T18:37:09.598069 | 2016-10-28T12:52:59 | 2016-10-28T12:52:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 211 | py | from sklearn.decomposition import KernelPCA
def kpca(X,g,k):
kpca = KernelPCA(k,kernel="rbf", fit_inverse_transform=True, gamma=g)
pcaX = kpca.fit_transform(X)
return (pcaX) | [
"saumya.rawat25@gmail.com"
] | saumya.rawat25@gmail.com |
85115d1212270dde95742797c7074e489bb195c8 | e9c0b70cab39fa771db383fa882436c14ae9aec7 | /pizza_app/migrations/0001_initial.py | ece9b436685209c0100e8865b75f0d5b8d49abde | [
"MIT"
] | permissive | rusrom/django_pizza_project | f4b67b558a6238b58e285f1b9eb38bf1c8cbadf5 | 350862ca49b91f5d5d4e12105846ecc9e4fc15c0 | refs/heads/master | 2020-07-16T05:45:07.229049 | 2019-09-02T14:14:21 | 2019-09-02T14:14:21 | 205,732,229 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,024 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2019-09-02 12:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='PizzaShop',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('phone', models.CharField(max_length=100)),
('address', models.CharField(max_length=100)),
('logo', models.ImageField(upload_to='logo/')),
('owner', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='pizzashop', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"rusrom@guyfawkes.33mail.com"
] | rusrom@guyfawkes.33mail.com |
f022eab24ec8efff6cd2a71e46e90e334bef79fc | 2e87a0a3f62be689ae3b858eaf1e843632c152f8 | /Turtoise.py | ce8b81787592b8486bd2107bec532b16a8f915d5 | [] | no_license | ThinThinzarHtet/PythonHybridClass | 1dc3ec385d49fab2b4513507699c5587c03eff3b | a9d7b7cb51c1896523b4b2788490959b20532232 | refs/heads/master | 2021-10-25T07:59:56.116757 | 2020-01-19T11:05:35 | 2020-01-19T11:05:35 | 217,832,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | # from turtle import *
# color('red', 'yellow')
# begin_fill()
# while True:
# forward(200)
# left(170)
# if abs(pos()) < 1:
# break
# end_fill()
# done()
import turtle
triple = turtle.turtle()
for i in range(20):
triple.forward(i * 10)
triple.right(144)
turtle.done() | [
"thinthinzarhtet19@gmail.com"
] | thinthinzarhtet19@gmail.com |
fe71939a7bd2a6c8c874577da3939a3550c854ce | e52af4720714e8d40d22593c815403129d96f630 | /kiwi/util/service_groups.py | 4102bb45721fd2efc7f9009568adb85a10ddb9cb | [
"Apache-2.0"
] | permissive | KiwiNetworkOrg/kiwi-blockchain | 26f143aef0ec5d615fc3ce8a2f5d64d285f3c76d | 90ec3a0fd7b50f5b98b5b0478e28ebbe6f8e8a71 | refs/heads/main | 2023-07-27T03:03:23.966038 | 2021-09-02T07:57:21 | 2021-09-02T07:57:21 | 398,200,002 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,190 | py | from typing import KeysView, Generator
SERVICES_FOR_GROUP = {
"all": "kiwi_harvester kiwi_timelord_launcher kiwi_timelord kiwi_farmer kiwi_full_node kiwi_wallet".split(),
"node": "kiwi_full_node".split(),
"harvester": "kiwi_harvester".split(),
"farmer": "kiwi_harvester kiwi_farmer kiwi_full_node kiwi_wallet".split(),
"farmer-no-wallet": "kiwi_harvester kiwi_farmer kiwi_full_node".split(),
"farmer-only": "kiwi_farmer".split(),
"timelord": "kiwi_timelord_launcher kiwi_timelord kiwi_full_node".split(),
"timelord-only": "kiwi_timelord".split(),
"timelord-launcher-only": "kiwi_timelord_launcher".split(),
"wallet": "kiwi_wallet kiwi_full_node".split(),
"wallet-only": "kiwi_wallet".split(),
"introducer": "kiwi_introducer".split(),
"simulator": "kiwi_full_node_simulator".split(),
}
def all_groups() -> KeysView[str]:
return SERVICES_FOR_GROUP.keys()
def services_for_groups(groups) -> Generator[str, None, None]:
for group in groups:
for service in SERVICES_FOR_GROUP[group]:
yield service
def validate_service(service: str) -> bool:
return any(service in _ for _ in SERVICES_FOR_GROUP.values())
| [
"jackydu1980@gmail.com"
] | jackydu1980@gmail.com |
23de31fa7213263f9a98e2bd707d3c2d771dd3be | eda36d24a1e6d4f30597ab1e1b2d8e17694f93bd | /weio/tests/test_turbsim.py | 2afe6ac46c1e982c0352cf1e40abbc37dad84357 | [
"MIT"
] | permissive | ebranlard/weio | 31fdab7a8afde9919f66fab942dad309f8d8d0e2 | 50fab087c5dc3e0248bcce578de6e713fa3e9b5f | refs/heads/main | 2023-07-23T19:32:42.548855 | 2022-12-19T08:13:06 | 2022-12-19T08:13:06 | 152,828,434 | 25 | 20 | MIT | 2023-01-13T20:37:29 | 2018-10-13T02:44:25 | Python | UTF-8 | Python | false | false | 1,734 | py | import unittest
import os
import numpy as np
from .helpers_for_test import MyDir, reading_test
try:
from weio.turbsim_file import TurbSimFile
except:
from weio.weio.turbsim_file import TurbSimFile
class Test(unittest.TestCase):
def test_001_read_all(self, DEBUG=True):
reading_test('TurbSim_*.*', TurbSimFile)
def test_TurbSim(self):
# --- Test without tower
F = TurbSimFile(os.path.join(MyDir,'TurbSim_NoTwr.bts'))
F.write( os.path.join(MyDir,'TurbSim_NoTwr_TMP.bts'))
F2= TurbSimFile(os.path.join(MyDir,'TurbSim_NoTwr_TMP.bts'))
os.remove( os.path.join(MyDir,'TurbSim_NoTwr_TMP.bts'))
np.testing.assert_almost_equal(F['u'][0,:,:,:],F2['u'][0,:,:,:],4)
np.testing.assert_almost_equal(F['u'][1,:,:,:],F2['u'][1,:,:,:],4)
np.testing.assert_almost_equal(F['u'][2,:,:,:],F2['u'][2,:,:,:],4)
# --- Test with tower
F = TurbSimFile(os.path.join(MyDir,'TurbSim_WithTwr.bts'))
np.testing.assert_almost_equal(F['u'][2,-1,1,3], 0.508036, 5)
np.testing.assert_almost_equal(F['u'][0, 4,2,0], 7.4867466, 5)
np.testing.assert_almost_equal(F['uTwr'][0, 4, :], [6.1509, 6.4063, 8.9555, 7.6943], 4)
F.write( os.path.join(MyDir,'TurbSim_WithTwr_TMP.bts'))
F2= TurbSimFile(os.path.join(MyDir,'TurbSim_WithTwr_TMP.bts'))
os.remove( os.path.join(MyDir,'TurbSim_WithTwr_TMP.bts'))
np.testing.assert_almost_equal(F['u'][0,:,:,:],F2['u'][0,:,:,:],3)
np.testing.assert_almost_equal(F['u'][1,:,:,:],F2['u'][1,:,:,:],3)
np.testing.assert_almost_equal(F['u'][2,:,:,:],F2['u'][2,:,:,:],3)
if __name__ == '__main__':
# Test().test_000_debug()
unittest.main()
| [
"emmanuel.branlard@nrel.gov"
] | emmanuel.branlard@nrel.gov |
ef4f31488ff1d5936c39d77fc37b29c55734102e | 4500003dcaa3eb92e2b9c6bca8987ec473fb5ec3 | /core/migrations/0006_post_slug.py | db41286dfce7136c7c34e38796bac248d7291c36 | [] | no_license | alikhundmiri/simpleweddingdjango | 0bb2bfc069bac075d759efa96eede55c68595cf4 | 57aa6576df368fde651f7f2b6863f693bbb57756 | refs/heads/master | 2022-12-17T22:36:18.674974 | 2020-06-14T08:10:09 | 2020-06-14T08:10:09 | 239,115,495 | 0 | 0 | null | 2022-12-08T03:51:09 | 2020-02-08T11:01:00 | HTML | UTF-8 | Python | false | false | 866 | py | # Generated by Django 3.0.3 on 2020-03-29 16:37
from django.db import migrations, models
from core.utils import random_string_generator
from django.utils.text import Truncator
from django.utils.text import slugify
def gen_slug(apps, schema_editor):
MyModel = apps.get_model('core', 'Post')
for row in MyModel.objects.all():
if not row.slug:
row.slug = slugify((Truncator(row.title).chars(200) +'-'+ random_string_generator(size=4)))
row.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0005_auto_20200329_2203'),
]
operations = [
migrations.AddField(
model_name='post',
name='slug',
field=models.SlugField(max_length=200, null=True),
),
migrations.RunPython(gen_slug, reverse_code=migrations.RunPython.noop),
]
| [
"salikhundmiri@gmail.com"
] | salikhundmiri@gmail.com |
ff9ee174e245f827cf26c90562405bbd23476e0d | d659887598e3cd40219c2e1b46cf41de28e5bc58 | /generate_mp4.py | 363567b86a0d8e1de74bd032bacd9eafd47e2c0b | [
"MIT"
] | permissive | cvtuge/Extreme-Dark-Video-Enhancement | d3f5ad874bdd2d636b04a344ba70afd8db0084c5 | e0de50428d74a7cec2ee87b63e9fce9860dfd590 | refs/heads/master | 2022-04-08T21:51:40.559183 | 2020-02-02T23:00:45 | 2020-02-02T23:00:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,009 | py | #!/usr/bin/env python
# by boyuanwa 3043994708
# ----------------------------------------------------------------
# Written by Boyuan Wang
# Fall 2019
# ----------------------------------------------------------------
# This helper python aims to cnvert from npy file to mp4 file
import os, glob, time
import numpy as np
from skvideo.io import vread, vwrite
TEST_LIST = 'test_list'
TEST_RESULT_DIR = '0_data/gt_he/'
# get train IDs
with open(TEST_LIST) as f:
text = f.readlines()
train_files = text
t0 = time.time()
ids = [line.strip().split(' ')[0] for line in train_files]
output_files = glob.glob(TEST_RESULT_DIR + '*.npy')
for file in output_files:
file_name = os.path.basename(file)[:-4]
# if file_name[:-4] not in ids:
# continue
output = np.load(file)
out_file = file_name + '.mp4'
vwrite(TEST_RESULT_DIR + out_file, output)
print("Finishing converting from npy to mp4 For:", file_name, "\n")
t1 = time.time()
print ('ALL FINISHED. ({:.3f}s)'.format(t1 - t0)) | [
"wangby511@gmail.com"
] | wangby511@gmail.com |
a172c1be6d6b5a6fe6ce3855677790abab9776c3 | 5a60ad7e8a5efabb9a9ef4a9980ae7bbcf81c393 | /attack_files/leak.py | 67cb4e87a08a939b3bb919efc95ae78cd08abc45 | [] | no_license | agadient/SERVEEZ-CVE | 03cb17b9b35162517b4d8ff7003187479ab8a8c6 | 6507dbf6bed1ae98a7322f15f7238144ea1e1be7 | refs/heads/master | 2020-09-13T10:48:09.714749 | 2019-11-19T17:39:58 | 2019-11-19T17:39:58 | 222,747,827 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 449 | py | #!/usr/bin/python
import socket
from time import sleep
import sys
a = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
b = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
c = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
d = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
f = open(sys.argv[1], 'r')
data = f.read()
f.close()
a.connect(('localhost', 42422))
sleep(0.5)
b.close()
a.send(data)
print(a.recv(1024))
c.recv(1024)
d.recv(1024)
| [
"agadient@mit.edu"
] | agadient@mit.edu |
365a794605a103aa3c437ccaaab439b00243cb55 | 03fc2bef8d43fb62f1063c3c70edee938d87a36e | /Reinforcement-learning-agent-law/game/create_game.py | 641f2dba9f5c8d5960a4439eeaa65f82065dc29a | [
"MIT"
] | permissive | driesdenouter/Reinforcement-learning-agent-that-has-learned-the-law | 7fc79c5724984e8544376bb33e8246e595c7146b | e4f39a0f6827bf25148aa9fd4f6e15f2a8f4bf6f | refs/heads/master | 2023-01-28T15:37:00.450840 | 2020-12-05T16:53:09 | 2020-12-05T16:53:09 | 262,281,384 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,953 | py | import random
import math
import numpy as np
import pygame
from pygame.color import THECOLORS
import pymunk
from pymunk.vec2d import Vec2d
from pymunk.pygame_util import draw
# PyGame init
width = 1000
height = 700
pygame.init()
screen = pygame.display.set_mode((width, height))
clock = pygame.time.Clock()
# Turn off alpha since we don't use it.
screen.set_alpha(None)
# Showing sensors slows things down.
show_sensors = False
class GameState:
def __init__(self):
# Global-ish.
self.crashed = False
# Physics stuff.
self.space = pymunk.Space()
self.space.gravity = pymunk.Vec2d(0., 0.)
# Create the car.
self.create_car(100, 100, 0.5)
# Record steps.
self.num_steps = 0
# Create walls.
static = [
pymunk.Segment(
self.space.static_body,
(0, 1), (0, height), 1),
pymunk.Segment(
self.space.static_body,
(1, height), (width, height), 1),
pymunk.Segment(
self.space.static_body,
(width-1, height), (width-1, 1), 1),
pymunk.Segment(
self.space.static_body,
(1, 1), (width, 1), 1)
]
for s in static:
s.friction = 1.
s.group = 1
s.collision_type = 1
s.color = THECOLORS['red']
self.space.add(static)
# Create some obstacles, semi-randomly.
# We'll create three and they'll move around to prevent over-fitting.
self.obstacles = []
self.obstacles.append(self.create_obstacle(200, 350, 100))
self.obstacles.append(self.create_obstacle(700, 200, 125))
self.obstacles.append(self.create_obstacle(600, 600, 35))
# Create a cat.
self.create_cat()
def create_obstacle(self, x, y, r):
c_body = pymunk.Body(pymunk.inf, pymunk.inf)
c_shape = pymunk.Circle(c_body, r)
c_shape.elasticity = 1.0
c_body.position = x, y
c_shape.color = THECOLORS["blue"]
self.space.add(c_body, c_shape)
return c_body
def create_cat(self):
inertia = pymunk.moment_for_circle(1, 0, 14, (0, 0))
self.cat_body = pymunk.Body(1, inertia)
self.cat_body.position = 50, height - 100
self.cat_shape = pymunk.Circle(self.cat_body, 30)
self.cat_shape.color = THECOLORS["orange"]
self.cat_shape.elasticity = 1.0
self.cat_shape.angle = 0.5
direction = Vec2d(1, 0).rotated(self.cat_body.angle)
self.space.add(self.cat_body, self.cat_shape)
def create_car(self, x, y, r):
inertia = pymunk.moment_for_circle(1, 0, 14, (0, 0))
self.car_body = pymunk.Body(1, inertia)
self.car_body.position = x, y
self.car_shape = pymunk.Circle(self.car_body, 25)
self.car_shape.color = THECOLORS["green"]
self.car_shape.elasticity = 1.0
self.car_body.angle = r
self.velocity_changer = 40
driving_direction = Vec2d(1, 0).rotated(self.car_body.angle)
self.car_body.apply_impulse(driving_direction)
self.space.add(self.car_body, self.car_shape)
# Define the possible actions for agent to take.
self.action_memory = {
0: [0,0],
1: [0,1],
2: [1,0],
3: [1,1],
4: [2,0],
5: [2,1],
}
def frame_step(self, action):
current_action = self.action_memory[action]
angle = current_action[0]
speed = current_action[1]
minN = 3 # Let speed never get over or under a specific value.
maxN = 50
self.velocity_changer = max(minN, self.velocity_changer)
self.velocity_changer = min(maxN, self.velocity_changer)
if angle == 0: # Turn left.
self.car_body.angle -= .2
elif angle == 1: # Turn right.
self.car_body.angle += .2
if speed == 0: # Slow down.
self.velocity_changer -= 1
elif speed == 1: # Speed up.
self.velocity_changer += 1
# Move obstacles.
if self.num_steps % 100 == 0:
self.move_obstacles()
# Move cat.
if self.num_steps % 5 == 0:
self.move_cat()
# Speed agent.
driving_direction = Vec2d(1, 0).rotated(self.car_body.angle)
self.car_body.velocity = (50 + ((self.velocity_changer) * 0.005)) * driving_direction
# Draw screen slows down the training, so only draw screen in final frames training and playing.
if self.num_steps < 1490000:
draw_screen = False
# Update the screen and stuff.
screen.fill(THECOLORS["black"])
draw(screen, self.space)
self.space.step(1./10)
if draw_screen:
pygame.display.flip()
clock.tick()
else:
draw_screen = True
# Update the screen and stuff.
screen.fill(THECOLORS["black"])
draw(screen, self.space)
self.space.step(1./10)
if draw_screen:
pygame.display.flip()
clock.tick()
for evt in pygame.event.get():
if evt.type == pygame.QUIT:
pygame.quit()
sys.exit()
# Get the current location and the readings of sonar arms and velocity as state.
x, y = self.car_body.position
readings = self.get_sonar_readings(x, y, self.car_body.angle)
normalized_readings = [(x-20.0)/20.0 for x in readings]
state = np.array([normalized_readings])
# Set the reward
if self.car_is_crashed(readings):
# Car crashed when any reading of sonar arms == 1.
self.crashed = True
reward = -1000
self.recover_from_crash(driving_direction)
elif self.speed_is_violated():
# Set low reward if the speedlimit is violated.
coef_velo_change = 1.3
reward = -50 - int(self.velocity_changer ** coef_velo_change)
self.num_steps += 1
elif self.speed_within_limits():
# Reward is based on the readings (lower reading is better) and the velocity coefficient (higher velocity is better).
intercept_reward = -5
coef_velo_change = 1.738495
coef_sum_readings = 1.393518
reward = intercept_reward + int(self.velocity_changer ** coef_velo_change) + int(self.sum_readings(readings[0:3]) ** coef_sum_readings)
self.num_steps += 1
return reward, state
def speed_within_limits(self):
# Set the allowed speed.
if self.velocity_changer in range(0, 31):
return True
else:
return False
def speed_is_violated(self):
# Set the speed limit.
speed_limit = 30
if self.velocity_changer > speed_limit:
return True
else:
return False
def move_obstacles(self):
# Randomly move obstacles around.
for obstacle in self.obstacles:
speed = random.randint(1, 5)
direction = Vec2d(1, 0).rotated(self.car_body.angle + random.randint(-2, 2))
obstacle.velocity = speed * direction
def move_cat(self):
#randomly move cat.
speed = random.randint(20, 75)
self.cat_body.angle -= random.randint(-1, 1)
direction = Vec2d(1, 0).rotated(self.cat_body.angle)
self.cat_body.velocity = speed * direction
def car_is_crashed(self, readings):
if readings[0] == 1 or readings[1] == 1 or readings[2] == 1:
return True
else:
return False
def recover_from_crash(self, driving_direction):
# We hit something, so recover.
while self.crashed:
# Go backwards.
self.car_body.velocity = -100 * driving_direction
self.crashed = False
for i in range(10):
self.car_body.angle += .2 # Turn a little.
def sum_readings(self, readings):
# Sum the number of non-zero readings.
tot = 0
for i in readings:
tot += i
return tot
def get_sonar_readings(self, x, y, angle):
readings = []
# Make our arms.
arm_left = self.make_sonar_arm(x, y)
arm_middle = arm_left
arm_right = arm_left
# Rotate them and get readings.
readings.append(self.get_arm_distance(arm_left, x, y, angle, 0.75))
readings.append(self.get_arm_distance(arm_middle, x, y, angle, 0))
readings.append(self.get_arm_distance(arm_right, x, y, angle, -0.75))
readings.append(self.velocity_changer)
if show_sensors:
pygame.display.update()
return readings
def get_arm_distance(self, arm, x, y, angle, offset):
# Used to count the distance.
i = 0
# Look at each point and see if we've hit something.
for point in arm:
i += 1
# Move the point to the right spot.
rotated_p = self.get_rotated_point(
x, y, point[0], point[1], angle + offset
)
# Check if we've hit something. Return the current i (distance)
# if we did.
if rotated_p[0] <= 0 or rotated_p[1] <= 0 \
or rotated_p[0] >= width or rotated_p[1] >= height:
return i # Sensor is off the screen.
else:
obs = screen.get_at(rotated_p)
if self.get_track_or_not(obs) != 0:
return i
if show_sensors:
pygame.draw.circle(screen, (255, 255, 255), (rotated_p), 2)
# Return the distance for the arm.
return i
def make_sonar_arm(self, x, y):
spread = 10 # Default spread.
distance = 20 # Gap before first sensor.
arm_points = []
# Make an arm. We build it flat because we'll rotate it about the center later.
for i in range(1, 40):
arm_points.append((distance + x + (spread * i), y))
return arm_points
def get_rotated_point(self, x_1, y_1, x_2, y_2, radians):
# Rotate x_2, y_2 around x_1, y_1 by angle.
x_change = (x_2 - x_1) * math.cos(radians) + \
(y_2 - y_1) * math.sin(radians)
y_change = (y_1 - y_2) * math.cos(radians) - \
(x_1 - x_2) * math.sin(radians)
new_x = x_change + x_1
new_y = height - (y_change + y_1)
return int(new_x), int(new_y)
def get_track_or_not(self, reading):
if reading == THECOLORS['black']:
return 0
else:
return 1
if __name__ == "__main__":
game_state = GameState()
while True:
game_state.frame_step((random.randint(0, 2)))
| [
"noreply@github.com"
] | noreply@github.com |
dbc3af99f4b28f4c0b2f38df4b3669c55981fec5 | 95e25faf8756a5f8f01be106e21449250c35af79 | /project/_auth/migrations/0022_auto_20210515_0557.py | bdab54653b2f88bfe5bd1b8353d87aa678b5805b | [] | no_license | nazkeyramazan/django2021 | ac2d918821b35672f242f8d4656d2e8698c0e7f9 | f63a1760b0a7a13721087863a191aeb282b9f077 | refs/heads/master | 2023-05-14T18:55:40.787793 | 2021-05-15T01:38:07 | 2021-05-15T01:38:07 | 342,715,437 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | # Generated by Django 3.1.7 on 2021-05-14 23:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('_auth', '0021_auto_20210515_0410'),
]
operations = [
migrations.AlterField(
model_name='mainuser',
name='role',
field=models.SmallIntegerField(choices=[(2, 'manager'), (3, 'admin'), (1, 'customer')], default=1),
),
]
| [
"nazkeyramazan@gmail.com"
] | nazkeyramazan@gmail.com |
8ca8ffe69257d8c7e79065fd4e815b66d6a14d52 | 353b324a088d32053b165c0ab3e622dfe524a09d | /linked_class.py | d443e0ccc1b8c57d8bebbc603a1881305788c7b5 | [] | no_license | nurlanorazkeldiyev/seminar3 | 0fbb92d5456ab23e3f76825462cad38a652cec4e | dc9945907e3baf50847359c02ca36c5f72b3523f | refs/heads/master | 2020-04-25T00:07:20.552808 | 2019-03-01T11:24:07 | 2019-03-01T11:24:07 | 172,367,796 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | from linked_list import Mylist
class Queue:
def __init__(self):
self.my_queue = Mylist()
def length(self):
return self.my_queue.__len__()
def isEmpty(self):
if self.length() == 0:
return True
return False
def dequeue(self):
if not self.isEmpty():
count = 0
for elem in self.my_queue:
count += 1
if count == self.length():
data = elem
self.my_queue.remove(data)
return data
print("The stack contains no items!")
return ""
def enqueue(self, item):
elem = ListNode(item)
self.my_queue.add(elem)
| [
"nurlan_o.s@mail.ru"
] | nurlan_o.s@mail.ru |
0d3df4c7aa3aec5676639d9ff04e3a255dd4bd91 | b0bbe465952beab8c9634320662e1a60dbce567f | /app/api/agent/__init__.py | 1a69d0ec7e34a9447ae490f085bda28cdd502f45 | [] | no_license | pawanpal1848/smart-pid-api | eb84b36aaef314aaf099bc9dcd2552a02fea10ab | e26b4da1a09b9314961b07acb0859f29a694f1a5 | refs/heads/master | 2023-08-09T11:14:16.098429 | 2021-09-07T12:59:44 | 2021-09-07T12:59:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,724 | py | from app.model.buffer import Buffer, get_buffer_used_size
from app.model.response import ValuedResponse, Response, ExceptionResponse
from app.model.agent import load_agent_settings, save_agent_settings, get_loaded_agent
import tensorflow as tf
import numpy as np
agent = get_loaded_agent()
def remember(body: list):
try:
Buffer().store(body)
return Response("added {} line(s) to buffer".format(len(body))).__dict__
except Exception as e:
return ExceptionResponse(e).__dict__, 500
def set_weights(body):
global agent
try:
agent.set_actor_weights(body)
return ValuedResponse("updated actor weights", agent.get_actor_weights()).__dict__
except Exception as e:
return ExceptionResponse(e).__dict__, 500
def get_weights():
global agent
return {"actor": agent.get_actor_weights(False), "target_actor": agent.get_actor_weights(True)}
def set_settings(body):
global agent
try:
need_reload = save_agent_settings(body)
agent.set_settings(body)
if need_reload:
agent = get_loaded_agent()
return ValuedResponse("Updated agent settings", load_agent_settings()).__dict__
except Exception as e:
return ExceptionResponse(e).__dict__, 500
def get_settings():
global agent
settings = agent.get_settings()
return settings
def save(is_best_actor=False):
global agent
try:
agent.save_models(is_best_actor)
return Response("Successfully saved models").__dict__
except Exception as e:
return ExceptionResponse(e).__dict__, 500
def learn(train_actor=True):
global agent
try:
buffer_used_size = get_buffer_used_size()
if buffer_used_size > 0:
agent.learn(train_actor=train_actor)
return agent.get_actor_weights()
return Response("can't learn without experience", status="bad query").__dict__, 400
except Exception as e:
return ExceptionResponse(e).__dict__, 500
def test_actor(body):
global agent
try:
data = np.array(body)
state = tf.convert_to_tensor(data[:, agent.used_states], dtype=tf.float32)
return agent.actor(state, False).numpy().tolist()
except Exception as e:
return ExceptionResponse(e).__dict__, 500
def test_critic(body):
global agent
try:
data = np.array(body)
states = tf.convert_to_tensor(data[:, np.concatenate((agent.used_states, [False]))], dtype=tf.float32)
actions = tf.convert_to_tensor(data[:, -1:], dtype=tf.float32)
return agent.critic(states, actions, training=False).numpy().tolist()
except Exception as e:
return ExceptionResponse(e).__dict__, 500
| [
"mathias.tiberghien@gmail.com"
] | mathias.tiberghien@gmail.com |
b5450b3f9c338676f9ab05092e450396a19672b0 | 5d5f6ba3bdcb52b4750a5f28afa8a1a1019bfc9e | /django/extras/djangoForms/djangoFormApp/models.py | 464d6d00fd8196fb2c75dbf55badc599443656b0 | [] | no_license | eDiazGtz/pythonLearning | 06e96f2f5a6e48ac314cb815cf9fbf65d0b7c2c8 | 57d7b2292cf5d9769cce9adf765962c3c0930d6c | refs/heads/master | 2023-06-18T02:16:09.293375 | 2021-05-03T18:09:52 | 2021-05-03T18:09:52 | 335,090,531 | 0 | 0 | null | 2021-05-03T18:09:53 | 2021-02-01T21:35:24 | Python | UTF-8 | Python | false | false | 758 | py | from django.db import models
# Create your models here.
class UserManager(models.Manager):
def createValidator(self, postData):
errors = {}
if len(postData['firstName']) < 1:
errors["firstName"] = "First Name should be at least 1 character"
if len(postData['lastName']) < 1:
errors["lastName"] = "Last Name should be at least 1 character"
if len(postData['email']) > 50:
errors["email"] = "Email max length 50 Characters"
return errors
class User(models.Model):
firstName = models.CharField(max_length=17)
lastName = models.CharField(max_length=20)
email = models.CharField(max_length=50)
password = models.CharField(max_length=100)
objects = UserManager() | [
"ediaz-gutierrez@hotmail.com"
] | ediaz-gutierrez@hotmail.com |
ce6904d70f47be5003b15f558dcc9f8b0b56a0fe | 4863991fc679b178106cd8091ac6c61923fdf8e6 | /03-Python/3/Activities/03-Stu_HobbyBook/Unsolved/HobbyBook_Unsolved.py | afffec60f4fdf29ba6c71dc8da33fbcf265bb2ad | [] | no_license | HeyMikeMarshall/GWARL-Data | 0b22bea61f7cf103cd7da8119bf2138d74a665f1 | cfa61c1eb8db933f4c096fcc0fe79a01e82539e4 | refs/heads/master | 2020-04-27T21:46:49.156111 | 2019-04-13T14:27:57 | 2019-04-13T14:27:57 | 174,711,542 | 0 | 0 | null | 2019-03-21T23:03:24 | 2019-03-09T15:30:36 | null | UTF-8 | Python | false | false | 437 | py |
me_hobbybook = {
"name":"Michael Marshall",
"age":"32",
"hobbies":
["hiking", "gaming", "diy"],
"wakeuptime": {
"Monday": '0600',
"Tuesday": '0600',
"Wednesday": '0600',
"Thursday": '0600',
"Friday": '0600',
"Saturday": '0800',
"Sunday": '1100',
}
}
print(f"My name is {me_hobbybook['name']}. One of my hobbies is {me_hobbybook['hobbies'][0]}.") | [
"mjmarshall@gmail.com"
] | mjmarshall@gmail.com |
5cf1960c834e90ad7fb7225ef4c412aa36982ac7 | 7737714964065404309fbb3c256bb57333bb3954 | /DiSTopStudy/test/default_cfg.py | 9b368ca9df3556a6bb5b0a8c3e1361a12b0f4744 | [] | no_license | pastika/SusyAnalysis | 4d14dc5b364334a018819deaea7fbdc166c7b070 | 6edc5107908e205db47c1b08d3dd9c1c5d32f420 | refs/heads/master | 2020-05-20T05:35:49.581731 | 2015-01-12T15:39:45 | 2015-01-12T15:39:45 | 28,870,414 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,271 | py | import FWCore.ParameterSet.Config as cms
#J..
import commands
import FWCore.Utilities.FileUtils as FileUtils
from FWCore.ParameterSet.VarParsing import VarParsing
process = cms.Process("Analysis")
options = VarParsing ('Analysis')
options.register ('fileList',
'',
VarParsing.multiplicity.singleton,
VarParsing.varType.string,
"File List File")
options.parseArguments()
#..J
#===================== Message Logger =============================
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.MessageLogger.categories.append('PATSummaryTables')
process.MessageLogger.cerr.PATSummaryTables = cms.untracked.PSet(
limit = cms.untracked.int32(10),
reportEvery = cms.untracked.int32(1)
)
process.options = cms.untracked.PSet(
wantSummary = cms.untracked.bool(True)
)
process.MessageLogger.cerr.FwkReport.reportEvery = 10000
# Check for ny duplicates
#process.source.duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
process.load("Configuration.StandardSequences.Geometry_cff")
process.load("Configuration.StandardSequences.MagneticField_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi")
runningOnMC = False
process.GlobalTag.globaltag = "START52_V11C::All"
if runningOnMC == False:
process.GlobalTag.globaltag = "GR_R_52_V9D::All"
#J process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring( options.inputFiles ) )
#J (s, o) = commands.getstatusoutput('cat /uscms/home/javiert/work/CMSSW_5_2_5/src/SusyAnalysis/LostLepton/test/fileList')
pepe= cms.untracked.string(options.fileList)
print("")
print("Reading file list from: ")
print(options.fileList)
print("")
mylist = FileUtils.loadListFromFile ( options.fileList )
readFiles = cms.untracked.vstring( *mylist)
process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring( readFiles ) )
#J process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(100) )
process.source.skipEvents = cms.untracked.uint32(0)
#=============== configure cleaning sequence ================================
process.load("SandBox.Skims.filterBoolean_cfi")
process.RA2_HBHENoiseFilterRA2 = process.booleanFilter.clone()
process.RA2_HBHENoiseFilterRA2.ResultSource = cms.InputTag("HBHENoiseFilterRA2","HBHENoiseFilterResult","PAT")
process.RA2_beamHaloFilter = process.booleanFilter.clone()
process.RA2_beamHaloFilter.ResultSource = cms.InputTag("beamHaloFilter")
process.RA2_eeNoiseFilter = process.booleanFilter.clone()
process.RA2_eeNoiseFilter.ResultSource = cms.InputTag("eeNoiseFilter")
process.RA2_trackingFailureFilter = process.booleanFilter.clone()
process.RA2_trackingFailureFilter.ResultSource = cms.InputTag("trackingFailureFilter")
process.RA2_inconsistentMuons = process.booleanFilter.clone()
process.RA2_inconsistentMuons.ResultSource = cms.InputTag("inconsistentMuons")
process.RA2_greedyMuons = process.booleanFilter.clone()
process.RA2_greedyMuons.ResultSource = cms.InputTag("greedyMuons")
process.RA2_EcalTPFilter = process.booleanFilter.clone()
process.RA2_EcalTPFilter.ResultSource = cms.InputTag("ra2EcalTPFilter")
process.RA2_EcalBEFilter = process.booleanFilter.clone()
process.RA2_EcalBEFilter.ResultSource = cms.InputTag("ra2EcalBEFilter")
process.HcalLaserEventFilter = process.booleanFilter.clone()
process.HcalLaserEventFilter.ResultSource = cms.InputTag("hcalLaserEventFilter")
process.EEBadScFilter = process.booleanFilter.clone()
process.EEBadScFilter.ResultSource = cms.InputTag("eeBadScFilter")
process.cleaningSeq = cms.Sequence(
process.RA2_HBHENoiseFilterRA2
* process.RA2_beamHaloFilter
* process.RA2_eeNoiseFilter
* process.RA2_trackingFailureFilter
* process.RA2_inconsistentMuons
* process.RA2_greedyMuons
* process.RA2_EcalTPFilter
* process.RA2_EcalBEFilter
#* process.HcalLaserEventFilter
#* process.EEBadScFilter
)
process.lostLeptonTree = cms.EDAnalyzer("LostLeptonTree",
Debug = cms.bool (False),
VertexSource = cms.InputTag('goodVertices'),
DoPUReweight = cms.bool (True),
PUWeigthSource = cms.InputTag("puWeight"),
PFMetSource = cms.InputTag("patMETsPF"),
JetAllSource = cms.InputTag("patJetsPF"),
#bTagName = cms.string ("trackCountingHighEffBJetTags"),
bTagName = cms.string ("combinedSecondaryVertexBJetTags"),
MHTSource = cms.InputTag("mhtPFchs"),
HTSource = cms.InputTag("htPFchs"),
MuonVetoSrc = cms.InputTag("patMuonsPFIDIso"),
EleVetoSrc = cms.InputTag("patElectronsIDIso"),
SaveAllMuons = cms.bool (True),
MuonSource = cms.InputTag('patMuonsPF'),
MinMuPt = cms.double (5.0),
MaxMuEta = cms.double (2.4),
MaxMuD0 = cms.double (0.2),
MaxMuDz = cms.double (0.5),
MaxMuRelIso = cms.double (0.20),
PFCandidateSrc = cms.InputTag("pfNoPileUpIsoPF"),
#ElePFSrc = cms.InputTag("patElectronsPFIDIso"),
SaveAllElectrons = cms.bool(True),
ElectronSource = cms.InputTag('gsfElectrons'),
ConversionsSource = cms.InputTag("allConversions"),
IsoValInputTags = cms.VInputTag(cms.InputTag('elPFIsoValueCharged03PFIdPFIso'),
cms.InputTag('elPFIsoValueGamma03PFIdPFIso'),
cms.InputTag('elPFIsoValueNeutral03PFIdPFIso')),
RhoIsoSrcEle = cms.InputTag("kt6PFJetsForIsolation", "rho"),
BeamSpotSource = cms.InputTag("offlineBeamSpot"),
MinElePt = cms.double(5.0)
)
#J process.load('SandBox.Utilities.puWeightProducer_cfi')
#J process.puWeight.PileUpModeForMC = "Summer12_PUS7"
#J process.puWeight.DataPileUpHistFile = "SandBox/Utilities/data/DataPileupHistogram_RA2Summer12_190456-196531_8TeV_PromptReco_WOLowPU_pixelcorr.root"
# an example sequence to create skimmed susypat-tuples
process.analysisSeq = cms.Sequence(
process.lostLeptonTree
)
##============ configure output module configuration ========================
process.TFileService = cms.Service("TFileService",
fileName = cms.string(options.outputFile)
)
#process.load("SandBox.Skims.RA2Selection_cff")
#process.load("SandBox.Skims.RA2HT_cff")
#process.load("SandBox.Skims.RA2MHT_cff")
#process.load("SandBox.Skims.RA2Jets_cff")
#process.preselectionSeq = cms.Sequence(
# #process.countJetsAK5PFPt50Eta25 *
# process.htPFFilter *
# process.mhtPFFilter
#)
#process.ppf = cms.Path(process.cleaningSeq * process.puWeight * process.preselectionSeq * process.analysisSeq)
#process.ppf = cms.Path(process.cleaningSeq * process.preselectionSeq * process.analysisSeq)
#J process.ppf = cms.Path(process.cleaningSeq * process.puWeight * process.analysisSeq)
process.ppf = cms.Path( process.analysisSeq )
###-- Dump config ------------------------------------------------------------
##file = open('SusyPAT_RA2414_cfg.py','w')
##file.write(str(process.dumpPython()))
##file.close()
| [
"pastika@cern.ch"
] | pastika@cern.ch |
b1c10929ca27cebfc8f32d5fa3e33f13d3744bd3 | c251401a04faee549a5255745dc976c2be8e24b9 | /work_orders/permissions.py | 15b4821acb2a82a375b098f4d93f2ef74b862691 | [] | no_license | fengo4142/aero-django-backend | a43a3526b570730fd9d519b8e890e550ff9f9f3c | 53167b52b68b30eef6a10edea47888ba0ad71a4e | refs/heads/master | 2022-11-11T10:01:50.534513 | 2020-06-24T15:40:11 | 2020-06-24T15:40:11 | 274,699,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,948 | py | import logging
from rest_framework.permissions import BasePermission
from work_orders.models import WorkOrderForm
logger = logging.getLogger('backend')
# *****************************************************************************
# ***************************** WORK ORDERS *******************************
# *****************************************************************************
class CanCreateWorkOrders(BasePermission):
"""Allows to create a work order"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
# if (request.method == 'POST' and request.user.has_perm(
# "work_orders.add_workorder")):
# return True
if (request.method == 'POST' and request.user.aerosimple_user and \
request.user.aerosimple_user.has_permission("add_workorder")):
return True
return False
class CanViewWorkOrders(BasePermission):
"""Allows to view work orders list and detail """
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
# if (request.method == 'GET' and request.user.has_perm(
# "work_orders.view_workorder")):
# return True
if (request.method == 'GET' and request.user.aerosimple_user and \
request.user.aerosimple_user.has_permission("view_workorder")):
return True
return False
class CanFillMaintenanceForm(BasePermission):
"""Allows to create a Maintenance form"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
woform = WorkOrderForm.objects.get(
airport__id=request.user.aerosimple_user.airport_id)
role = woform.maintenance_form.assigned_role
users = woform.maintenance_form.assigned_users
has_role = role in request.user.aerosimple_user.roles.all()
is_assigned = request.user.aerosimple_user in users.all()
if (request.method == 'POST' and request.user.aerosimple_user
and request.user.aerosimple_user.has_permission("add_maintenance")
and request.user.aerosimple_user.has_permission("view_workorder")
and (has_role or is_assigned)):
return True
return False
class CanFillOperationsForm(BasePermission):
"""Allows to create a Operations form"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
woform = WorkOrderForm.objects.get(
airport__id=request.user.aerosimple_user.airport_id)
role = woform.operations_form.assigned_role
users = woform.operations_form.assigned_users
has_role = role in request.user.aerosimple_user.roles.all()
is_assigned = request.user.aerosimple_user in users.all()
if (request.method == 'POST' and request.user.aerosimple_user
and request.user.aerosimple_user.has_permission("add_operations")
and request.user.aerosimple_user.has_permission("view_workorder")
and (has_role or is_assigned)):
return True
return False
class CanEditWorkOrderSchema(BasePermission):
"""Allows to create work order schema instances"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
# if (request.method == 'POST' and request.user.has_perm(
# "work_orders.add_workorderschema")):
# return True
if (request.method == 'POST' and request.user.aerosimple_user and \
request.user.aerosimple_user.has_permission("add_workorderschema")):
return True
return False | [
"fengo4142@gmail.com"
] | fengo4142@gmail.com |
1d1e5c80adae2a85e36764be6c6786ca13998bc7 | 3a771b72dae1aae406b94726bcbcf73915577b18 | /q38.py | 0a85a5450c76b409276bf18b448122f28c6bc171 | [] | no_license | SHANK885/Python-Basic-Programs | 4fcb29280412baa63ffd33efba56d9f59770c9dc | 157f0f871b31c4523b6873ce5dfe0d6e26a6dc61 | refs/heads/master | 2021-07-18T18:24:10.455282 | 2018-11-19T07:02:27 | 2018-11-19T07:02:27 | 138,009,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | '''
Define a function which can generate a list where the values are square of numbers between 1 and 20 (both included).
Then the function needs to print the last 5 elements in the list.
'''
def lis(lower,upper):
l = []
for i in range(lower,upper+1):
l.append(i)
print(l[-5:])
lis(1,20) | [
"shashankshekhar885@gmail.com"
] | shashankshekhar885@gmail.com |
5b91c207f77e5910ac31b71ac9a0edf50cc7a5c1 | 5201e9f33a961cbfcc7b3fc5efc9bf9e57298b7b | /UIelems.py | 3ccd01343cc037be781546cf339be2d7f99b32e4 | [] | no_license | EveryoneHATEme/simple-photo-editor | 1f7ada2138ef8e665e7753cbb4b188e5e00a8084 | 57d55d800cb83218788a1e82bb0112abeee6aabf | refs/heads/master | 2020-09-08T05:10:15.412928 | 2019-11-11T16:40:18 | 2019-11-11T16:40:18 | 221,024,792 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,644 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'untitled.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from draw import FilterHandler, BlurHandler, default_image
class ClickableLabel(QtWidgets.QLabel):
clicked = QtCore.pyqtSignal()
def __init__(self, parent):
super().__init__(parent)
def mousePressEvent(self, event):
self.clicked.emit()
QtWidgets.QLabel.mousePressEvent(self, event)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(809, 600)
MainWindow.setMinimumSize(QtCore.QSize(700, 600))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.image_label = QtWidgets.QLabel(self.centralwidget)
self.image_label.setMinimumSize(QtCore.QSize(0, 350))
self.image_label.setText("")
self.image_label.setAlignment(QtCore.Qt.AlignCenter)
self.image_label.setObjectName("image_label")
self.verticalLayout_6.addWidget(self.image_label)
self.menu_tab = QtWidgets.QTabWidget(self.centralwidget)
self.menu_tab.setFocusPolicy(QtCore.Qt.NoFocus)
self.menu_tab.setElideMode(QtCore.Qt.ElideNone)
self.menu_tab.setObjectName("menu_tab")
self.adjusting_tab = QtWidgets.QWidget()
self.adjusting_tab.setObjectName("adjusting_tab")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.adjusting_tab)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.brightness_label = QtWidgets.QLabel(self.adjusting_tab)
self.brightness_label.setAlignment(QtCore.Qt.AlignCenter)
self.brightness_label.setObjectName("brightness_label")
self.verticalLayout_7.addWidget(self.brightness_label)
self.brightness_slider = QtWidgets.QSlider(self.adjusting_tab)
self.brightness_slider.setMaximum(100)
self.brightness_slider.setProperty("value", 50)
self.brightness_slider.setOrientation(QtCore.Qt.Horizontal)
self.brightness_slider.setObjectName("brightness_slider")
self.verticalLayout_7.addWidget(self.brightness_slider)
self.contrast_label = QtWidgets.QLabel(self.adjusting_tab)
self.contrast_label.setAlignment(QtCore.Qt.AlignCenter)
self.contrast_label.setObjectName("contrast_label")
self.verticalLayout_7.addWidget(self.contrast_label)
self.contrast_slider = QtWidgets.QSlider(self.adjusting_tab)
self.contrast_slider.setMaximum(100)
self.contrast_slider.setProperty("value", 50)
self.contrast_slider.setOrientation(QtCore.Qt.Horizontal)
self.contrast_slider.setObjectName("contrast_slider")
self.verticalLayout_7.addWidget(self.contrast_slider)
self.sharpness_label = QtWidgets.QLabel(self.adjusting_tab)
self.sharpness_label.setAlignment(QtCore.Qt.AlignCenter)
self.sharpness_label.setObjectName("sharpness_label")
self.verticalLayout_7.addWidget(self.sharpness_label)
self.sharpness_slider = QtWidgets.QSlider(self.adjusting_tab)
self.sharpness_slider.setMaximum(100)
self.sharpness_slider.setProperty("value", 50)
self.sharpness_slider.setOrientation(QtCore.Qt.Horizontal)
self.sharpness_slider.setObjectName("sharpness_slider")
self.verticalLayout_7.addWidget(self.sharpness_slider)
self.menu_tab.addTab(self.adjusting_tab, "")
self.crop_tab = QtWidgets.QWidget()
self.crop_tab.setObjectName("crop_tab")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.crop_tab)
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.crop_top_label = QtWidgets.QLabel(self.crop_tab)
self.crop_top_label.setLayoutDirection(QtCore.Qt.LeftToRight)
self.crop_top_label.setAlignment(QtCore.Qt.AlignCenter)
self.crop_top_label.setObjectName("crop_top_label")
self.verticalLayout_4.addWidget(self.crop_top_label)
self.crop_top_slider = QtWidgets.QSlider(self.crop_tab)
self.crop_top_slider.setMinimum(1)
self.crop_top_slider.setMaximum(100)
self.crop_top_slider.setProperty("value", 100)
self.crop_top_slider.setOrientation(QtCore.Qt.Horizontal)
self.crop_top_slider.setObjectName("crop_top_slider")
self.verticalLayout_4.addWidget(self.crop_top_slider)
self.crop_bottom_label = QtWidgets.QLabel(self.crop_tab)
self.crop_bottom_label.setAlignment(QtCore.Qt.AlignCenter)
self.crop_bottom_label.setObjectName("crop_bottom_label")
self.verticalLayout_4.addWidget(self.crop_bottom_label)
self.crop_bottom_slider = QtWidgets.QSlider(self.crop_tab)
self.crop_bottom_slider.setMinimum(1)
self.crop_bottom_slider.setMaximum(100)
self.crop_bottom_slider.setProperty("value", 100)
self.crop_bottom_slider.setOrientation(QtCore.Qt.Horizontal)
self.crop_bottom_slider.setObjectName("crop_bottom_slider")
self.verticalLayout_4.addWidget(self.crop_bottom_slider)
self.horizontalLayout.addLayout(self.verticalLayout_4)
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.crop_left_label = QtWidgets.QLabel(self.crop_tab)
self.crop_left_label.setAlignment(QtCore.Qt.AlignCenter)
self.crop_left_label.setObjectName("crop_left_label")
self.verticalLayout_5.addWidget(self.crop_left_label)
self.crop_left_slider = QtWidgets.QSlider(self.crop_tab)
self.crop_left_slider.setMinimum(1)
self.crop_left_slider.setMaximum(100)
self.crop_left_slider.setProperty("value", 100)
self.crop_left_slider.setOrientation(QtCore.Qt.Horizontal)
self.crop_left_slider.setObjectName("crop_left_slider")
self.verticalLayout_5.addWidget(self.crop_left_slider)
self.crop_right_label = QtWidgets.QLabel(self.crop_tab)
self.crop_right_label.setAlignment(QtCore.Qt.AlignCenter)
self.crop_right_label.setObjectName("crop_right_label")
self.verticalLayout_5.addWidget(self.crop_right_label)
self.crop_right_slider = QtWidgets.QSlider(self.crop_tab)
self.crop_right_slider.setMinimum(1)
self.crop_right_slider.setMaximum(100)
self.crop_right_slider.setProperty("value", 100)
self.crop_right_slider.setOrientation(QtCore.Qt.Horizontal)
self.crop_right_slider.setObjectName("crop_right_slider")
self.verticalLayout_5.addWidget(self.crop_right_slider)
self.horizontalLayout.addLayout(self.verticalLayout_5)
self.menu_tab.addTab(self.crop_tab, "")
self.rotate_tab = QtWidgets.QWidget()
self.rotate_tab.setObjectName("rotate_tab")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.rotate_tab)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.flip_horizontal_button = QtWidgets.QPushButton(self.rotate_tab)
self.flip_horizontal_button.setObjectName("flip_horizontal_button")
self.verticalLayout.addWidget(self.flip_horizontal_button)
self.flip_vertical_button = QtWidgets.QPushButton(self.rotate_tab)
self.flip_vertical_button.setObjectName("flip_vertical_button")
self.verticalLayout.addWidget(self.flip_vertical_button)
self.horizontalLayout_2.addLayout(self.verticalLayout)
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.rotate_plus90_button = QtWidgets.QPushButton(self.rotate_tab)
self.rotate_plus90_button.setObjectName("rotate_plus90_button")
self.verticalLayout_2.addWidget(self.rotate_plus90_button)
self.rotate_minus90_button = QtWidgets.QPushButton(self.rotate_tab)
self.rotate_minus90_button.setObjectName("rotate_minus90_button")
self.verticalLayout_2.addWidget(self.rotate_minus90_button)
self.horizontalLayout_2.addLayout(self.verticalLayout_2)
self.menu_tab.addTab(self.rotate_tab, "")
self.filters_tab = QtWidgets.QWidget()
self.filters_tab.setObjectName("filters_tab")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.filters_tab)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.default_fliter_label = ClickableLabel(self.filters_tab)
self.default_fliter_label.setMaximumSize(QtCore.QSize(150, 150))
self.default_fliter_label.setStyleSheet("border: 1px solid blue")
self.default_fliter_label.setText("")
self.default_fliter_label.setObjectName("default_fliter_label")
self.default_fliter_label.func = default_image
self.default_fliter_label.flag = False
self.default_fliter_label.name = 'По_умолчанию'
self.horizontalLayout_4.addWidget(self.default_fliter_label)
self.black_white_filter_label = ClickableLabel(self.filters_tab)
self.black_white_filter_label.setMaximumSize(QtCore.QSize(150, 150))
self.black_white_filter_label.setStyleSheet("border: 1px solid gray")
self.black_white_filter_label.setText("")
self.black_white_filter_label.setObjectName("black_white_filter_label")
self.black_white_filter_label.func = FilterHandler.black_white
self.black_white_filter_label.flag = False
self.black_white_filter_label.name = 'Черно-белый'
self.horizontalLayout_4.addWidget(self.black_white_filter_label)
self.negative_filter_label = ClickableLabel(self.filters_tab)
self.negative_filter_label.setMaximumSize(QtCore.QSize(150, 150))
self.negative_filter_label.setStyleSheet("border: 1px solid gray")
self.negative_filter_label.setText("")
self.negative_filter_label.setObjectName("negative_filter_label")
self.negative_filter_label.func = FilterHandler.negative
self.negative_filter_label.flag = False
self.negative_filter_label.name = 'Негатив'
self.horizontalLayout_4.addWidget(self.negative_filter_label)
self.sepia_filter_label = ClickableLabel(self.filters_tab)
self.sepia_filter_label.setMaximumSize(QtCore.QSize(150, 150))
self.sepia_filter_label.setStyleSheet("border: 1px solid gray")
self.sepia_filter_label.setText("")
self.sepia_filter_label.setObjectName("sepia_filter_label")
self.sepia_filter_label.func = FilterHandler.sepia
self.sepia_filter_label.flag = False
self.sepia_filter_label.name = 'Сепия'
self.horizontalLayout_4.addWidget(self.sepia_filter_label)
self.menu_tab.addTab(self.filters_tab, "")
self.blur_tab = QtWidgets.QWidget()
self.blur_tab.setObjectName("blur_tab")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.blur_tab)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.default_blur_label = ClickableLabel(self.blur_tab)
self.default_blur_label.setMinimumSize(QtCore.QSize(0, 0))
self.default_blur_label.setMaximumSize(QtCore.QSize(150, 150))
self.default_blur_label.setStyleSheet("border: 1px solid blue")
self.default_blur_label.setText("")
self.default_blur_label.setObjectName("default_blur_label")
self.default_blur_label.func = default_image
self.default_blur_label.flag = False
self.default_blur_label.name = 'По_умолчанию'
self.horizontalLayout_3.addWidget(self.default_blur_label)
self.vertical_blur_label = ClickableLabel(self.blur_tab)
self.vertical_blur_label.setMinimumSize(QtCore.QSize(0, 0))
self.vertical_blur_label.setMaximumSize(QtCore.QSize(150, 150))
self.vertical_blur_label.setStyleSheet("border: 1px solid gray")
self.vertical_blur_label.setText("")
self.vertical_blur_label.setObjectName("vertical_blur_label")
self.vertical_blur_label.func = BlurHandler.vertical_blur
self.vertical_blur_label.flag = False
self.vertical_blur_label.name = 'Вертикальное размытие'
self.horizontalLayout_3.addWidget(self.vertical_blur_label)
self.horizontal_blur_label = ClickableLabel(self.blur_tab)
self.horizontal_blur_label.setMinimumSize(QtCore.QSize(0, 0))
self.horizontal_blur_label.setMaximumSize(QtCore.QSize(150, 150))
self.horizontal_blur_label.setStyleSheet("border: 1px solid gray")
self.horizontal_blur_label.setText("")
self.horizontal_blur_label.setObjectName("horizontal_blur_label")
self.horizontal_blur_label.func = BlurHandler.horizontal_blur
self.horizontal_blur_label.flag = False
self.horizontal_blur_label.name = 'Горизонтальное размытие'
self.horizontalLayout_3.addWidget(self.horizontal_blur_label)
self.menu_tab.addTab(self.blur_tab, "")
self.verticalLayout_6.addWidget(self.menu_tab)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 609, 21))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuEdit = QtWidgets.QMenu(self.menubar)
self.menuEdit.setObjectName("menuEdit")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionOpen = QtWidgets.QAction(MainWindow)
self.actionOpen.setObjectName("actionOpen")
self.actionSave = QtWidgets.QAction(MainWindow)
self.actionSave.setObjectName("actionSave")
self.actionAddFilter = QtWidgets.QAction(MainWindow)
self.actionAddFilter.setObjectName("actionAddFilter")
self.actionReset = QtWidgets.QAction(MainWindow)
self.actionReset.setObjectName("actionReset")
self.actionUndo = QtWidgets.QAction(MainWindow)
self.actionUndo.setObjectName("actionUndo")
self.actionRedo = QtWidgets.QAction(MainWindow)
self.actionRedo.setObjectName("actionRedo")
self.menuFile.addAction(self.actionOpen)
self.menuFile.addAction(self.actionSave)
self.menuEdit.addAction(self.actionUndo)
self.menuEdit.addAction(self.actionRedo)
self.menuEdit.addAction(self.actionAddFilter)
self.menuEdit.addAction(self.actionReset)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuEdit.menuAction())
self.crop_sliders = [self.crop_left_slider, self.crop_right_slider, self.crop_top_slider,
self.crop_bottom_slider]
self.adjusting_sliders = [self.brightness_slider, self.contrast_slider, self.sharpness_slider]
self.filter_labels = [self.default_fliter_label,
self.sepia_filter_label, self.negative_filter_label, self.black_white_filter_label]
self.blur_labels = [self.default_blur_label, self.vertical_blur_label, self.horizontal_blur_label]
self.retranslateUi(MainWindow)
self.menu_tab.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Simple Photo Editor"))
self.brightness_label.setText(_translate("MainWindow", "Яркость"))
self.contrast_label.setText(_translate("MainWindow", "Контраст"))
self.sharpness_label.setText(_translate("MainWindow", "Резкость"))
self.menu_tab.setTabText(self.menu_tab.indexOf(self.adjusting_tab), _translate("MainWindow", "Регулировка"))
self.crop_top_label.setText(_translate("MainWindow", "Верхняя сторона:"))
self.crop_bottom_label.setText(_translate("MainWindow", "Нижняя сторона:"))
self.crop_left_label.setText(_translate("MainWindow", "Левая сторона:"))
self.crop_right_label.setText(_translate("MainWindow", "Правая сторона:"))
self.menu_tab.setTabText(self.menu_tab.indexOf(self.crop_tab), _translate("MainWindow", "Обрезать"))
self.flip_horizontal_button.setText(_translate("MainWindow", "Развернуть горизонтально"))
self.flip_vertical_button.setText(_translate("MainWindow", "Развернуть вертикально"))
self.rotate_plus90_button.setText(_translate("MainWindow", "Повернуть на +90°"))
self.rotate_minus90_button.setText(_translate("MainWindow", "Повернуть на -90°"))
self.menu_tab.setTabText(self.menu_tab.indexOf(self.rotate_tab), _translate("MainWindow", "Повернуть"))
self.menu_tab.setTabText(self.menu_tab.indexOf(self.filters_tab), _translate("MainWindow", "Фильтры"))
self.menu_tab.setTabText(self.menu_tab.indexOf(self.blur_tab), _translate("MainWindow", "Размытие"))
self.menuFile.setTitle(_translate("MainWindow", "Файл"))
self.menuEdit.setTitle(_translate("MainWindow", "Правка"))
self.actionOpen.setText(_translate("MainWindow", "Открыть"))
self.actionOpen.setShortcut(_translate("MainWindow", "Ctrl+O"))
self.actionSave.setText(_translate("MainWindow", "Сохранить"))
self.actionSave.setShortcut(_translate("MainWindow", "Ctrl+S"))
self.actionAddFilter.setText(_translate("MainWindow", "Добавить фильтр"))
self.actionReset.setText(_translate("MainWindow", "Сброс"))
self.actionReset.setShortcut(_translate("MainWindow", "Ctrl+R"))
self.actionUndo.setText(_translate("MainWindow", "Шаг назад"))
self.actionUndo.setShortcut(_translate("MainWindow", "Ctrl+Z"))
self.actionRedo.setText(_translate("MainWindow", "Шаг вперед"))
self.actionRedo.setShortcut(_translate("MainWindow", "Ctrl+Shift+Z"))
def add_filter_label(self, func):
exec(f'self.{func.__name__}_label = ClickableLabel(self.filters_tab)')
eval(f'self.{func.__name__}_label.setMaximumSize(QtCore.QSize(150, 150))')
eval(f'self.{func.__name__}_label.setStyleSheet("border: 1px solid gray")')
eval(f'self.{func.__name__}_label.setText("")')
eval(f'self.{func.__name__}_label.setObjectName("self.{func.__name__}_label")')
eval(f'self.horizontalLayout_4.addWidget(self.{func.__name__}_label)')
return eval(f'self.{func.__name__}_label')
| [
"noreply@github.com"
] | noreply@github.com |
89e4b0cc8cc580454793178a3e90e399b693f848 | 1cd853babf022779f3392eb9e1781f952d4f2c07 | /proposal.py | 45a17c9f760c1ab2575741bea87304eb7b516340 | [
"Apache-2.0"
] | permissive | ksrhamdi/proCon3 | 84b53027305f609267393701b49f3e7efade9097 | f0d214651dae5cbdbd4f7ff881269fb1cc5501ad | refs/heads/master | 2022-11-10T06:58:07.931219 | 2020-06-03T18:01:10 | 2020-06-03T18:01:10 | 276,995,886 | 0 | 0 | Apache-2.0 | 2020-07-03T22:15:11 | 2020-07-03T22:15:10 | null | UTF-8 | Python | false | false | 5,885 | py | # Import external modules.
from google.appengine.api import memcache
from google.appengine.ext import ndb
import logging
import random
import time
# Import local modules.
from configuration import const as conf
from constants import Constants
const = Constants()
const.MAX_RETRY = 3
const.MIN_REAGGREGATE_DELAY_SEC = 60
# Parent key: RequestForProposals? No, use KeyProperty instead.
class Proposal(ndb.Model):
requestId = ndb.StringProperty() # May be null
title = ndb.StringProperty()
detail = ndb.StringProperty()
creator = ndb.StringProperty()
allowEdit = ndb.BooleanProperty()
voteAggregateStartTime = ndb.IntegerProperty()
numPros = ndb.IntegerProperty( default=0 )
numCons = ndb.IntegerProperty( default=0 )
netPros = ndb.IntegerProperty( default=0 ) # numPros - numCons
lastSumUpdateTime = ndb.IntegerProperty( default=0 )
@ndb.transactional( retries=const.MAX_RETRY )
def setEditable( proposalId, editable ):
proposalRecord = Proposal.get_by_id( int(proposalId) )
proposalRecord.allowEdit = editable
proposalRecord.put()
#####################################################################################
# Use tasklets for async counting pros/cons per proposal.
# If enough delay since voteAggregateStartTime... updates voteAggregateStartTime and returns flag.
@ndb.transactional( retries=const.MAX_RETRY )
def __setVoteAggStartTime( proposalId ):
proposalRecord = Proposal.get_by_id( int(proposalId) )
now = int( time.time() )
if proposalRecord.voteAggregateStartTime + const.MIN_REAGGREGATE_DELAY_SEC > now:
return False
proposalRecord.voteAggregateStartTime = now
proposalRecord.put()
return True
# Retrieves all reason vote counts for a proposal, sums their pro/con counts, and updates proposal pro/con counts.
@ndb.tasklet
def __updateVoteAggs( proposalId ):
reasons = yield Reason.query( Reason.proposalId==proposalId ).fetch_async() # Async
numPros = sum( reason.voteCount for reason in reasons if reason.proOrCon == conf.PRO )
numCons = sum( reason.voteCount for reason in reasons if reason.proOrCon == conf.CON )
__setNumProsAndCons( proposalId, numPros, numCons ) # Transaction
#####################################################################################
# Use sharded counter to count pros/cons per proposal.
const.NUM_SHARDS = 10
const.SHARD_KEY_TEMPLATE = '{}-{}'
const.COUNTER_CACHE_SEC = 10
class ProposalShard( ndb.Model ):
requestId = ndb.StringProperty()
proposalId = ndb.StringProperty()
numPros = ndb.IntegerProperty( default=0 )
numCons = ndb.IntegerProperty( default=0 )
@ndb.tasklet
def incrementTasklet( requestId, proposalId, prosInc, consInc ):
logging.debug( 'proposal.incrementAsync() proposalId={}'.format(proposalId) )
yield __incrementShard( requestId, proposalId, prosInc, consInc ) # Pause and wait for async transaction
# Cache sums in Proposal record, to make top proposals queryable by score.
# Rate-limit updates to Proposal, by storing last-update time
now = int( time.time() )
updateNow = yield __checkAndSetLastSumTime( proposalId, now ) # Pause and wait for async transaction
logging.debug( 'proposal.incrementAsync() updateNow=' + str(updateNow) )
if updateNow:
shardRecords = yield __getProposalShardsAsync( proposalId ) # Pause and wait for async
numPros = sum( s.numPros for s in shardRecords if s )
numCons = sum( s.numCons for s in shardRecords if s )
logging.debug( 'proposal.incrementAsync() numPros=' + str(numPros) + ' numCons=' + str(numCons) )
yield __setNumProsAndConsAsync( proposalId, numPros, numCons ) # Pause and wait for async transaction
logging.debug( 'proposal.incrementAsync() __setNumProsAndCons() done' )
@ndb.transactional_async( retries=const.MAX_RETRY )
def __incrementShard( requestId, proposalId, prosInc, consInc ):
shardNum = random.randint( 0, const.NUM_SHARDS - 1 )
shardKeyString = const.SHARD_KEY_TEMPLATE.format( proposalId, shardNum )
shardRec = ProposalShard.get_by_id( shardKeyString )
if shardRec is None:
shardRec = ProposalShard( id=shardKeyString, requestId=requestId, proposalId=proposalId )
shardRec.numPros += prosInc
shardRec.numCons += consInc
shardRec.put()
@ndb.transactional_async( retries=const.MAX_RETRY )
def __checkAndSetLastSumTime( proposalId, now ):
logging.debug( 'proposal.__checkAndSetLastSumTime() proposalId={}'.format(proposalId) )
proposalRecord = Proposal.get_by_id( int(proposalId) )
logging.debug( 'proposal.__checkAndSetLastSumTime() proposalRecord={}'.format(proposalRecord) )
if proposalRecord.lastSumUpdateTime + const.COUNTER_CACHE_SEC < now:
proposalRecord.lastSumUpdateTime = now
proposalRecord.put()
return True
else:
return False
def __getProposalShardsAsync( proposalId ):
shardKeyStrings = [ const.SHARD_KEY_TEMPLATE.format(proposalId, s) for s in range(const.NUM_SHARDS) ]
logging.debug( 'proposal.__getProposalShardsAsync() shardKeyStrings=' + str(shardKeyStrings) )
shardKeys = [ ndb.Key(ProposalShard, s) for s in shardKeyStrings ]
return ndb.get_multi_async( shardKeys )
@ndb.transactional_async( retries=const.MAX_RETRY )
def __setNumProsAndConsAsync( proposalId, numPros, numCons ):
__setNumProsAndConsImp( proposalId, numPros, numCons )
@ndb.transactional( retries=const.MAX_RETRY )
def __setNumProsAndCons( proposalId, numPros, numCons ):
__setNumProsAndConsImp( proposalId, numPros, numCons )
def __setNumProsAndConsImp( proposalId, numPros, numCons ):
proposalRecord = Proposal.get_by_id( int(proposalId) )
proposalRecord.numPros = numPros
proposalRecord.numCons = numCons
proposalRecord.netPros = numPros - numCons
proposalRecord.put()
| [
"you@example.com"
] | you@example.com |
cb403497b8afbd364fbe73234f0890db03722801 | 04b1799fca40a9a4fafa3540d0e9abf1e32fcdc5 | /test_files/configs/dummy_dataset_config.py | 7833c62628c083136e6e1ffed4054068865fa46a | [] | no_license | selcouthlyBlue/bi_lstm_ocr | 0afe71e181b2c2a2a930f821c8b8c1c4557a0f22 | 6c3c61380546a896d12c4dd87eb277ad020565b2 | refs/heads/master | 2021-08-14T15:28:49.751921 | 2017-11-16T04:16:34 | 2017-11-16T04:16:34 | 105,123,935 | 3 | 6 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | class DatasetConfig:
def __init__(self):
self.labels_file = "C:/Users/asus.11/Documents/bi_lstm_ocr/test_files/dummy_labels_file.txt"
self.data_dir = "C:/Users/asus.11/Documents/bi_lstm_ocr/test_files/dummy_data/" | [
"jvgonzalvo@up.edu.ph"
] | jvgonzalvo@up.edu.ph |
ad8bc92067a56e68d2d6a41e02f85a5fc6f954e0 | 1d9a6406c859fda186f520bb4472c551fc572c7b | /src/hopla/cli/groupcmds/hatch.py | e3c85019653f241bbc5b6a5ab861095a0e1e838d | [
"Apache-2.0"
] | permissive | rickie/hopla | af21b794ce6719d402721550e1ee4091790410b6 | 24a422194e42c03d5877dc167b2b07147326a595 | refs/heads/main | 2023-08-13T17:33:03.612293 | 2021-10-12T12:13:25 | 2021-10-12T12:13:25 | 408,538,704 | 0 | 0 | Apache-2.0 | 2021-09-20T17:30:15 | 2021-09-20T17:30:15 | null | UTF-8 | Python | false | false | 873 | py | #!/usr/bin/env python3
"""
The module with CLI code that handles the `hopla hatch` GROUP command.
"""
import sys
from typing import NoReturn
import click
import requests
from hopla.hoplalib.hatchery.hatchcontroller import HatchRequester
@click.group()
def hatch():
"""GROUP for hatching eggs."""
def hatch_egg(*, egg_name: str, potion_name: str) -> NoReturn:
"""
Hatch an egg by performing an API request and echo the result to the
terminal.
"""
requester = HatchRequester(
egg_name=egg_name,
hatch_potion_name=potion_name
)
response: requests.Response = requester.post_hatch_egg_request()
json: dict = response.json()
if json["success"] is True:
click.echo(f"Successfully hatched a {egg_name}-{potion_name}.")
sys.exit(0)
click.echo(f"{json['error']}: {json['message']}")
sys.exit(1)
| [
"31448155+melvio@users.noreply.github.com"
] | 31448155+melvio@users.noreply.github.com |
3106b4bc8e71a298aca6998c29c4550feecf1a1e | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_057/ch136_2020_04_01_12_32_08_786356.py | 4df310a8d6f0a4bc15990114f812d772610cae60 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54 | py | dinheiro = 10
print('dinheiro:'{0}).format (dinheiro)
| [
"you@example.com"
] | you@example.com |
96a67941448fd56f8f1010061052ed22f03db22f | 9aa80d470054cbe3dd87be5db83e05bcbff2c3e9 | /License-Plate-Recognition-master/predict.py | e51bf9a637e8af3678e7e7f11fd621d140416e28 | [
"MIT"
] | permissive | fog-dong/test | 9c28cf902f40288e082a958e42c8fabd16ea386b | fbf38d97bb5750b08097178fa532b24e47f1bfb9 | refs/heads/master | 2023-02-16T03:17:40.087509 | 2020-12-28T17:32:33 | 2020-12-28T17:32:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,351 | py | import cv2
import numpy as np
from numpy.linalg import norm
import sys
import os
import json
SZ = 20 #训练图片长宽
MAX_WIDTH = 1000 #原始图片最大宽度
Min_Area = 2000 #车牌区域允许最大面积
PROVINCE_START = 1000
#读取图片文件
def imreadex(filename):
return cv2.imdecode(np.fromfile(filename, dtype=np.uint8), cv2.IMREAD_COLOR)
def point_limit(point):
if point[0] < 0:
point[0] = 0
if point[1] < 0:
point[1] = 0
#根据设定的阈值和图片直方图,找出波峰,用于分隔字符
def find_waves(threshold, histogram):
up_point = -1#上升点
is_peak = False
if histogram[0] > threshold:
up_point = 0
is_peak = True
wave_peaks = []
for i,x in enumerate(histogram):
if is_peak and x < threshold:
if i - up_point > 2:
is_peak = False
wave_peaks.append((up_point, i))
elif not is_peak and x >= threshold:
is_peak = True
up_point = i
if is_peak and up_point != -1 and i - up_point > 4:
wave_peaks.append((up_point, i))
return wave_peaks
#根据找出的波峰,分隔图片,从而得到逐个字符图片
def seperate_card(img, waves):
part_cards = []
for wave in waves:
part_cards.append(img[:, wave[0]:wave[1]])
return part_cards
#来自opencv的sample,用于svm训练
def deskew(img):
m = cv2.moments(img)
if abs(m['mu02']) < 1e-2:
return img.copy()
skew = m['mu11']/m['mu02']
M = np.float32([[1, skew, -0.5*SZ*skew], [0, 1, 0]])
img = cv2.warpAffine(img, M, (SZ, SZ), flags=cv2.WARP_INVERSE_MAP | cv2.INTER_LINEAR)
return img
#来自opencv的sample,用于svm训练
def preprocess_hog(digits):
samples = []
for img in digits:
gx = cv2.Sobel(img, cv2.CV_32F, 1, 0)
gy = cv2.Sobel(img, cv2.CV_32F, 0, 1)
mag, ang = cv2.cartToPolar(gx, gy)
bin_n = 16
bin = np.int32(bin_n*ang/(2*np.pi))
bin_cells = bin[:10,:10], bin[10:,:10], bin[:10,10:], bin[10:,10:]
mag_cells = mag[:10,:10], mag[10:,:10], mag[:10,10:], mag[10:,10:]
hists = [np.bincount(b.ravel(), m.ravel(), bin_n) for b, m in zip(bin_cells, mag_cells)]
hist = np.hstack(hists)
# transform to Hellinger kernel
eps = 1e-7
hist /= hist.sum() + eps
hist = np.sqrt(hist)
hist /= norm(hist) + eps
samples.append(hist)
return np.float32(samples)
#不能保证包括所有省份
provinces = [
"zh_cuan", "川",
"zh_e", "鄂",
"zh_gan", "赣",
"zh_gan1", "甘",
"zh_gui", "贵",
"zh_gui1", "桂",
"zh_hei", "黑",
"zh_hu", "沪",
"zh_ji", "冀",
"zh_jin", "津",
"zh_jing", "京",
"zh_jl", "吉",
"zh_liao", "辽",
"zh_lu", "鲁",
"zh_meng", "蒙",
"zh_min", "闽",
"zh_ning", "宁",
"zh_qing", "靑",
"zh_qiong", "琼",
"zh_shan", "陕",
"zh_su", "苏",
"zh_sx", "晋",
"zh_wan", "皖",
"zh_xiang", "湘",
"zh_xin", "新",
"zh_yu", "豫",
"zh_yu1", "渝",
"zh_yue", "粤",
"zh_yun", "云",
"zh_zang", "藏",
"zh_zhe", "浙"
]
class StatModel(object):
def load(self, fn):
self.model = self.model.load(fn)
def save(self, fn):
self.model.save(fn)
class SVM(StatModel):
def __init__(self, C = 1, gamma = 0.5):
self.model = cv2.ml.SVM_create()
self.model.setGamma(gamma)
self.model.setC(C)
self.model.setKernel(cv2.ml.SVM_RBF)
self.model.setType(cv2.ml.SVM_C_SVC)
#训练svm
def train(self, samples, responses):
self.model.train(samples, cv2.ml.ROW_SAMPLE, responses)
#字符识别
def predict(self, samples):
r = self.model.predict(samples)
return r[1].ravel()
class CardPredictor:
def __init__(self):
#车牌识别的部分参数保存在js中,便于根据图片分辨率做调整
f = open('config.js')
j = json.load(f)
for c in j["config"]:
if c["open"]:
self.cfg = c.copy()
break
else:
raise RuntimeError('没有设置有效配置参数')
def __del__(self):
self.save_traindata()
def train_svm(self):
#识别英文字母和数字
self.model = SVM(C=1, gamma=0.5)
#识别中文
self.modelchinese = SVM(C=1, gamma=0.5)
if os.path.exists("svm.dat"):
self.model.load("svm.dat")
else:
chars_train = []
chars_label = []
for root, dirs, files in os.walk("train\\chars2"):
if len(os.path.basename(root)) > 1:
continue
root_int = ord(os.path.basename(root))
for filename in files:
filepath = os.path.join(root,filename)
digit_img = cv2.imread(filepath)
digit_img = cv2.cvtColor(digit_img, cv2.COLOR_BGR2GRAY)
chars_train.append(digit_img)
#chars_label.append(1)
chars_label.append(root_int)
chars_train = list(map(deskew, chars_train))
chars_train = preprocess_hog(chars_train)
#chars_train = chars_train.reshape(-1, 20, 20).astype(np.float32)
chars_label = np.array(chars_label)
self.model.train(chars_train, chars_label)
if os.path.exists("svmchinese.dat"):
self.modelchinese.load("svmchinese.dat")
else:
chars_train = []
chars_label = []
for root, dirs, files in os.walk("train\\charsChinese"):
if not os.path.basename(root).startswith("zh_"):
continue
pinyin = os.path.basename(root)
index = provinces.index(pinyin) + PROVINCE_START + 1 #1是拼音对应的汉字
for filename in files:
filepath = os.path.join(root,filename)
digit_img = cv2.imread(filepath)
digit_img = cv2.cvtColor(digit_img, cv2.COLOR_BGR2GRAY)
chars_train.append(digit_img)
#chars_label.append(1)
chars_label.append(index)
chars_train = list(map(deskew, chars_train))
chars_train = preprocess_hog(chars_train)
#chars_train = chars_train.reshape(-1, 20, 20).astype(np.float32)
chars_label = np.array(chars_label)
print(chars_train.shape)
self.modelchinese.train(chars_train, chars_label)
def save_traindata(self):
if not os.path.exists("svm.dat"):
self.model.save("svm.dat")
if not os.path.exists("svmchinese.dat"):
self.modelchinese.save("svmchinese.dat")
def accurate_place(self, card_img_hsv, limit1, limit2, color):
row_num, col_num = card_img_hsv.shape[:2]
xl = col_num
xr = 0
yh = 0
yl = row_num
#col_num_limit = self.cfg["col_num_limit"]
row_num_limit = self.cfg["row_num_limit"]
col_num_limit = col_num * 0.8 if color != "green" else col_num * 0.5#绿色有渐变
for i in range(row_num):
count = 0
for j in range(col_num):
H = card_img_hsv.item(i, j, 0)
S = card_img_hsv.item(i, j, 1)
V = card_img_hsv.item(i, j, 2)
if limit1 < H <= limit2 and 34 < S and 46 < V:
count += 1
if count > col_num_limit:
if yl > i:
yl = i
if yh < i:
yh = i
for j in range(col_num):
count = 0
for i in range(row_num):
H = card_img_hsv.item(i, j, 0)
S = card_img_hsv.item(i, j, 1)
V = card_img_hsv.item(i, j, 2)
if limit1 < H <= limit2 and 34 < S and 46 < V:
count += 1
if count > row_num - row_num_limit:
if xl > j:
xl = j
if xr < j:
xr = j
return xl, xr, yh, yl
def predict(self, car_pic, resize_rate=1):
if type(car_pic) == type(""):
img = imreadex(car_pic)
else:
img = car_pic
pic_hight, pic_width = img.shape[:2]
if resize_rate != 1:
img = cv2.resize(img, (int(pic_width*resize_rate), int(pic_hight*resize_rate)), interpolation=cv2.INTER_AREA)
pic_hight, pic_width = img.shape[:2]
print("h,w:", pic_hight, pic_width)
blur = self.cfg["blur"]
#高斯去噪
if blur > 0:
img = cv2.GaussianBlur(img, (blur, blur), 0)#图片分辨率调整
oldimg = img
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
#equ = cv2.equalizeHist(img)
#img = np.hstack((img, equ))
#去掉图像中不会是车牌的区域
kernel = np.ones((20, 20), np.uint8)
img_opening = cv2.morphologyEx(img, cv2.MORPH_OPEN, kernel)
img_opening = cv2.addWeighted(img, 1, img_opening, -1, 0);
#找到图像边缘
ret, img_thresh = cv2.threshold(img_opening, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
img_edge = cv2.Canny(img_thresh, 100, 200)
#使用开运算和闭运算让图像边缘成为一个整体
kernel = np.ones((self.cfg["morphologyr"], self.cfg["morphologyc"]), np.uint8)
img_edge1 = cv2.morphologyEx(img_edge, cv2.MORPH_CLOSE, kernel)
img_edge2 = cv2.morphologyEx(img_edge1, cv2.MORPH_OPEN, kernel)
#查找图像边缘整体形成的矩形区域,可能有很多,车牌就在其中一个矩形区域中
contours, hierarchy = cv2.findContours(img_edge2, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = [cnt for cnt in contours if cv2.contourArea(cnt) > Min_Area]
print('len(contours)', len(contours))
#一一排除不是车牌的矩形区域
car_contours = []
for cnt in contours:
rect = cv2.minAreaRect(cnt)
area_width, area_height = rect[1]
if area_width < area_height:
area_width, area_height = area_height, area_width
wh_ratio = area_width / area_height
#print(wh_ratio)
#要求矩形区域长宽比在2到5.5之间,2到5.5是车牌的长宽比,其余的矩形排除
if wh_ratio > 2 and wh_ratio < 5.5:
car_contours.append(rect)
box = cv2.boxPoints(rect)
box = np.int0(box)
#oldimg = cv2.drawContours(oldimg, [box], 0, (0, 0, 255), 2)
#cv2.imshow("edge4", oldimg)
#cv2.waitKey(0)
print(len(car_contours))
print("精确定位")
card_imgs = []
#矩形区域可能是倾斜的矩形,需要矫正,以便使用颜色定位
for rect in car_contours:
if rect[2] > -1 and rect[2] < 1:#创造角度,使得左、高、右、低拿到正确的值
angle = 1
else:
angle = rect[2]
rect = (rect[0], (rect[1][0]+5, rect[1][1]+5), angle)#扩大范围,避免车牌边缘被排除
box = cv2.boxPoints(rect)
heigth_point = right_point = [0, 0]
left_point = low_point = [pic_width, pic_hight]
for point in box:
if left_point[0] > point[0]:
left_point = point
if low_point[1] > point[1]:
low_point = point
if heigth_point[1] < point[1]:
heigth_point = point
if right_point[0] < point[0]:
right_point = point
if left_point[1] <= right_point[1]:#正角度
new_right_point = [right_point[0], heigth_point[1]]
pts2 = np.float32([left_point, heigth_point, new_right_point])#字符只是高度需要改变
pts1 = np.float32([left_point, heigth_point, right_point])
M = cv2.getAffineTransform(pts1, pts2)
dst = cv2.warpAffine(oldimg, M, (pic_width, pic_hight))
point_limit(new_right_point)
point_limit(heigth_point)
point_limit(left_point)
card_img = dst[int(left_point[1]):int(heigth_point[1]), int(left_point[0]):int(new_right_point[0])]
card_imgs.append(card_img)
#cv2.imshow("card", card_img)
#cv2.waitKey(0)
elif left_point[1] > right_point[1]:#负角度
new_left_point = [left_point[0], heigth_point[1]]
pts2 = np.float32([new_left_point, heigth_point, right_point])#字符只是高度需要改变
pts1 = np.float32([left_point, heigth_point, right_point])
M = cv2.getAffineTransform(pts1, pts2)
dst = cv2.warpAffine(oldimg, M, (pic_width, pic_hight))
point_limit(right_point)
point_limit(heigth_point)
point_limit(new_left_point)
card_img = dst[int(right_point[1]):int(heigth_point[1]), int(new_left_point[0]):int(right_point[0])]
card_imgs.append(card_img)
#cv2.imshow("card", card_img)
#cv2.waitKey(0)
#开始使用颜色定位,排除不是车牌的矩形,目前只识别蓝、绿、黄车牌
colors = []
for card_index,card_img in enumerate(card_imgs):
green = yello = blue = black = white = 0
card_img_hsv = cv2.cvtColor(card_img, cv2.COLOR_BGR2HSV)
#有转换失败的可能,原因来自于上面矫正矩形出错
if card_img_hsv is None:
continue
row_num, col_num= card_img_hsv.shape[:2]
card_img_count = row_num * col_num
for i in range(row_num):
for j in range(col_num):
H = card_img_hsv.item(i, j, 0)
S = card_img_hsv.item(i, j, 1)
V = card_img_hsv.item(i, j, 2)
if 11 < H <= 34 and S > 34:#图片分辨率调整
yello += 1
elif 35 < H <= 99 and S > 34:#图片分辨率调整
green += 1
elif 99 < H <= 124 and S > 34:#图片分辨率调整
blue += 1
if 0 < H <180 and 0 < S < 255 and 0 < V < 46:
black += 1
elif 0 < H <180 and 0 < S < 43 and 221 < V < 225:
white += 1
color = "no"
limit1 = limit2 = 0
if yello*2 >= card_img_count:
color = "yello"
limit1 = 11
limit2 = 34#有的图片有色偏偏绿
elif green*2 >= card_img_count:
color = "green"
limit1 = 35
limit2 = 99
elif blue*2 >= card_img_count:
color = "blue"
limit1 = 100
limit2 = 124#有的图片有色偏偏紫
elif black + white >= card_img_count*0.7:#TODO
color = "bw"
print(color)
colors.append(color)
print(blue, green, yello, black, white, card_img_count)
#cv2.imshow("color", card_img)
#cv2.waitKey(0)
if limit1 == 0:
continue
#以上为确定车牌颜色
#以下为根据车牌颜色再定位,缩小边缘非车牌边界
xl, xr, yh, yl = self.accurate_place(card_img_hsv, limit1, limit2, color)
if yl == yh and xl == xr:
continue
need_accurate = False
if yl >= yh:
yl = 0
yh = row_num
need_accurate = True
if xl >= xr:
xl = 0
xr = col_num
need_accurate = True
card_imgs[card_index] = card_img[yl:yh, xl:xr] if color != "green" or yl < (yh-yl)//4 else card_img[yl-(yh-yl)//4:yh, xl:xr]
if need_accurate:#可能x或y方向未缩小,需要再试一次
card_img = card_imgs[card_index]
card_img_hsv = cv2.cvtColor(card_img, cv2.COLOR_BGR2HSV)
xl, xr, yh, yl = self.accurate_place(card_img_hsv, limit1, limit2, color)
if yl == yh and xl == xr:
continue
if yl >= yh:
yl = 0
yh = row_num
if xl >= xr:
xl = 0
xr = col_num
card_imgs[card_index] = card_img[yl:yh, xl:xr] if color != "green" or yl < (yh-yl)//4 else card_img[yl-(yh-yl)//4:yh, xl:xr]
#以上为车牌定位
#以下为识别车牌中的字符
predict_result = []
roi = None
card_color = None
for i, color in enumerate(colors):
if color in ("blue", "yello", "green"):
card_img = card_imgs[i]
gray_img = cv2.cvtColor(card_img, cv2.COLOR_BGR2GRAY)
#黄、绿车牌字符比背景暗、与蓝车牌刚好相反,所以黄、绿车牌需要反向
if color == "green" or color == "yello":
gray_img = cv2.bitwise_not(gray_img)
ret, gray_img = cv2.threshold(gray_img, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
#查找水平直方图波峰
x_histogram = np.sum(gray_img, axis=1)
x_min = np.min(x_histogram)
x_average = np.sum(x_histogram)/x_histogram.shape[0]
x_threshold = (x_min + x_average)/2
wave_peaks = find_waves(x_threshold, x_histogram)
if len(wave_peaks) == 0:
print("peak less 0:")
continue
#认为水平方向,最大的波峰为车牌区域
wave = max(wave_peaks, key=lambda x:x[1]-x[0])
gray_img = gray_img[wave[0]:wave[1]]
#查找垂直直方图波峰
row_num, col_num= gray_img.shape[:2]
#去掉车牌上下边缘1个像素,避免白边影响阈值判断
gray_img = gray_img[1:row_num-1]
y_histogram = np.sum(gray_img, axis=0)
y_min = np.min(y_histogram)
y_average = np.sum(y_histogram)/y_histogram.shape[0]
y_threshold = (y_min + y_average)/5#U和0要求阈值偏小,否则U和0会被分成两半
wave_peaks = find_waves(y_threshold, y_histogram)
#for wave in wave_peaks:
# cv2.line(card_img, pt1=(wave[0], 5), pt2=(wave[1], 5), color=(0, 0, 255), thickness=2)
#车牌字符数应大于6
if len(wave_peaks) <= 6:
print("peak less 1:", len(wave_peaks))
continue
wave = max(wave_peaks, key=lambda x:x[1]-x[0])
max_wave_dis = wave[1] - wave[0]
#判断是否是左侧车牌边缘
if wave_peaks[0][1] - wave_peaks[0][0] < max_wave_dis/3 and wave_peaks[0][0] == 0:
wave_peaks.pop(0)
#组合分离汉字
cur_dis = 0
for i,wave in enumerate(wave_peaks):
if wave[1] - wave[0] + cur_dis > max_wave_dis * 0.6:
break
else:
cur_dis += wave[1] - wave[0]
if i > 0:
wave = (wave_peaks[0][0], wave_peaks[i][1])
wave_peaks = wave_peaks[i+1:]
wave_peaks.insert(0, wave)
#去除车牌上的分隔点
point = wave_peaks[2]
if point[1] - point[0] < max_wave_dis/3:
point_img = gray_img[:,point[0]:point[1]]
if np.mean(point_img) < 255/5:
wave_peaks.pop(2)
if len(wave_peaks) <= 6:
print("peak less 2:", len(wave_peaks))
continue
part_cards = seperate_card(gray_img, wave_peaks)
for i, part_card in enumerate(part_cards):
#可能是固定车牌的铆钉
if np.mean(part_card) < 255/5:
print("a point")
continue
part_card_old = part_card
#w = abs(part_card.shape[1] - SZ)//2
w = part_card.shape[1] // 3
part_card = cv2.copyMakeBorder(part_card, 0, 0, w, w, cv2.BORDER_CONSTANT, value = [0,0,0])
part_card = cv2.resize(part_card, (SZ, SZ), interpolation=cv2.INTER_AREA)
#cv2.imshow("part", part_card_old)
#cv2.waitKey(0)
#cv2.imwrite("u.jpg", part_card)
#part_card = deskew(part_card)
part_card = preprocess_hog([part_card])
if i == 0:
resp = self.modelchinese.predict(part_card)
charactor = provinces[int(resp[0]) - PROVINCE_START]
else:
resp = self.model.predict(part_card)
charactor = chr(resp[0])
#判断最后一个数是否是车牌边缘,假设车牌边缘被认为是1
if charactor == "1" and i == len(part_cards)-1:
if part_card_old.shape[0]/part_card_old.shape[1] >= 8:#1太细,认为是边缘
print(part_card_old.shape)
continue
predict_result.append(charactor)
roi = card_img
card_color = color
break
return predict_result, roi, card_color#识别到的字符、定位的车牌图像、车牌颜色
c = CardPredictor()
c.train_svm()
r, roi, color = c.predict("test/bbb.jpg")
print(r)
| [
"1026198824@qq.com"
] | 1026198824@qq.com |
9ebc206e34a9a97f3a229550659068e38d3d3a59 | 77e3ec4741a32a9784e3fd9ac9210694a572f497 | /tests/test_extract.py | 110ebc8c60e0f1314beb97c6f9f547fcc5af5d79 | [
"Apache-2.0"
] | permissive | mrunesson/stockrec | c1b37f54f1070c358c417d2264927d171ec9a38a | 603bd471446f445ceeeaf8ed4de8f45478c0a22d | refs/heads/master | 2023-04-09T07:27:32.749529 | 2023-03-20T15:53:29 | 2023-03-21T15:04:15 | 271,747,267 | 1 | 1 | Apache-2.0 | 2021-01-04T15:14:13 | 2020-06-12T08:23:28 | Python | UTF-8 | Python | false | false | 10,020 | py | import datetime
import unittest
from decimal import Decimal
from stockrec.extract import extract_forecast
from stockrec.model import Forecast, Direction, Signal
class TestSimpleExtractor(unittest.TestCase):
test_data = [
('Carnegie sänker Thule till behåll (köp), riktkurs 220 kronor.',
Forecast(extractor='simple',
raw='Carnegie sänker Thule till behåll (köp), riktkurs 220 kronor.',
date=datetime.date.today(),
analyst='Carnegie',
change_direction=Direction.LOWER,
company='Thule',
signal=Signal.HOLD,
prev_signal=Signal.BUY,
forecast_price=220,
currency='SEK'
)),
('UBS höjer Vale till köp (neutral), riktkurs 12 dollar (13).\xa0',
Forecast(extractor='simple',
raw='UBS höjer Vale till köp (neutral), riktkurs 12 dollar (13).\xa0',
date=datetime.date.today(),
analyst='UBS',
change_direction=Direction.RAISE,
company='Vale',
signal=Signal.BUY,
prev_signal=Signal.NEUTRAL,
forecast_price=12,
prev_forecast_price=13,
currency='USD'
)),
('Kepler Cheuvreux sänker LVMH till behåll (köp), riktkurs 400 euro.',
Forecast(extractor='simple',
raw='Kepler Cheuvreux sänker LVMH till behåll (köp), riktkurs 400 euro.',
date=datetime.date.today(),
analyst='Kepler Cheuvreux',
change_direction=Direction.LOWER,
company='LVMH',
signal=Signal.HOLD,
prev_signal=Signal.BUY,
forecast_price=400,
currency='EUR'
)),
('RBC höjer Zoom till outperform (sector perform), riktkurs 250 dollar.',
Forecast(extractor='simple',
raw='RBC höjer Zoom till outperform (sector perform), riktkurs 250 dollar.',
date=datetime.date.today(),
analyst='RBC',
change_direction=Direction.RAISE,
company='Zoom',
signal=Signal.OUTPERFORM,
prev_signal=Signal.HOLD,
forecast_price=250,
currency='USD'
)),
('Morgan Stanley sänker riktkursen för Lundin Energy till 245 kronor (325), upprepar jämvikt - BN',
Forecast(extractor='bn',
raw='Morgan Stanley sänker riktkursen för Lundin Energy till 245 kronor (325), upprepar jämvikt - BN',
date=datetime.date.today(),
analyst='Morgan Stanley',
change_direction=Direction.LOWER,
company='Lundin Energy',
signal=Signal.HOLD,
prev_signal=Signal.HOLD,
forecast_price=245,
prev_forecast_price=325,
currency='SEK'
)),
('Bank of America Merrill Lynch sänker EQT till underperform (neutral)',
Forecast(extractor='simple',
raw='Bank of America Merrill Lynch sänker EQT till underperform (neutral)',
date=datetime.date.today(),
analyst='Bank of America Merrill Lynch',
change_direction=Direction.LOWER,
company='EQT',
signal=Signal.UNDERPERFORM,
prev_signal=Signal.NEUTRAL,
forecast_price=None,
prev_forecast_price=None,
currency=None
)),
('Credit Suisse höjer riktkursen för Genmab till 2 300 danska kronor (1 950), upprepar outperform.',
Forecast(extractor='bn',
raw='Credit Suisse höjer riktkursen för Genmab till 2 300 danska kronor (1 950), upprepar outperform.',
date=datetime.date.today(),
analyst='Credit Suisse',
change_direction=Direction.RAISE,
company='Genmab',
signal=Signal.OUTPERFORM,
prev_signal=Signal.OUTPERFORM,
forecast_price=2300,
prev_forecast_price=1950,
currency='DKK'
)),
('JP Morgan sänker D.R. Horton till neutral (övervikt), riktkurs 59 dollar (42)',
Forecast(extractor='simple',
raw='JP Morgan sänker D.R. Horton till neutral (övervikt), riktkurs 59 dollar (42)',
date=datetime.date.today(),
analyst='JP Morgan',
change_direction=Direction.LOWER,
company='D R Horton',
signal=Signal.NEUTRAL,
prev_signal=Signal.BUY,
forecast_price=59,
prev_forecast_price=42,
currency='USD'
)),
('Deutsche Bank höjer riktkursen för Boliden till 250 kronor från 235 kronor. Rekommendationen köp upprepas. Det framgår av ett marknadsbrev.',
Forecast(extractor='bn',
raw='Deutsche Bank höjer riktkursen för Boliden till 250 kronor från 235 kronor. Rekommendationen köp upprepas. Det framgår av ett marknadsbrev.',
date=datetime.date.today(),
analyst='Deutsche Bank',
change_direction=Direction.RAISE,
company='Boliden',
signal=Signal.BUY,
prev_signal=Signal.BUY,
forecast_price=250,
prev_forecast_price=235,
currency='SEK'
)),
('Pareto Securities höjer riktkursen för investmentbolaget Kinnevik till 290 kronor från 262 kronor, enligt en ny analys.',
Forecast(extractor='bn',
raw='Pareto Securities höjer riktkursen för investmentbolaget Kinnevik till 290 kronor från 262 kronor, enligt en ny analys.',
date=datetime.date.today(),
analyst='Pareto Securities',
change_direction=Direction.RAISE,
company='investmentbolaget Kinnevik',
signal=Signal.UNKNOWN,
prev_signal=Signal.UNKNOWN,
forecast_price=290,
prev_forecast_price=262,
currency='SEK'
)),
('BTIG inleder bevakning på Tripadvisor med rekommendationen neutral.',
Forecast(extractor='inled',
raw='BTIG inleder bevakning på Tripadvisor med rekommendationen neutral.',
date=datetime.date.today(),
analyst='BTIG',
change_direction=Direction.NEW,
company='Tripadvisor',
signal=Signal.NEUTRAL,
prev_signal=Signal.UNKNOWN,
forecast_price=None,
prev_forecast_price=None,
currency=None
)),
('Goldman Sachs & Co sänker sin rekommendation för Outokumpu till neutral från köp.',
Forecast(extractor='bloomberg',
raw='Goldman Sachs & Co sänker sin rekommendation för Outokumpu till neutral från köp.',
date=datetime.date.today(),
analyst='Goldman Sachs & Co',
change_direction=Direction.LOWER,
company='Outokumpu',
signal=Signal.NEUTRAL,
prev_signal=Signal.BUY,
forecast_price=None,
prev_forecast_price=None,
currency=None
)),
('Redeye höjer motiverat värde för Systemair till 168 kronor (155).',
Forecast(extractor='bn',
raw='Redeye höjer motiverat värde för Systemair till 168 kronor (155).',
date=datetime.date.today(),
analyst='Redeye',
change_direction=Direction.RAISE,
company='Systemair',
signal=Signal.UNKNOWN,
prev_signal=Signal.UNKNOWN,
forecast_price=Decimal(168),
prev_forecast_price=Decimal(155),
currency='SEK'
)),
('Redeye höjer sitt motiverade värde i basscenariot för bettingbolaget Enlabs till 30 kronor, från tidigare 29 kronor.',
Forecast(extractor='bn',
raw='Redeye höjer sitt motiverade värde i basscenariot för bettingbolaget Enlabs till 30 kronor, från tidigare 29 kronor.',
date=datetime.date.today(),
analyst='Redeye',
change_direction=Direction.RAISE,
company='bettingbolaget Enlabs',
signal=Signal.UNKNOWN,
prev_signal=Signal.UNKNOWN,
forecast_price=Decimal(30),
prev_forecast_price=Decimal(29),
currency='SEK'
)),
('Castellum höjs sitt behåll (sälj), med riktkurs 165 kronor (200)',
Forecast(extractor='no_analyst',
raw='Castellum höjs sitt behåll (sälj), med riktkurs 165 kronor (200)',
date=datetime.date.today(),
analyst=None,
change_direction=Direction.RAISE,
company='Castellum',
signal=Signal.HOLD,
prev_signal=Signal.SELL,
forecast_price=Decimal(165),
prev_forecast_price=Decimal(200),
currency='SEK'
)),
]
def test_extractor_simple(self):
for s, expected in self.test_data:
self.assertEqual(expected, extract_forecast(s, date=datetime.date.today()))
| [
"M.Runesson@gmail.com"
] | M.Runesson@gmail.com |
b42d376714e61221c9b1932afe6a308354078de5 | 523fb785bda41e33546c929a5c2de6c93f98b434 | /专题学习/链表/mergeKLists.py | 89db71c8897b4a8abf67d8c47ea987374e83a389 | [] | no_license | lizhe960118/TowardOffer | afd2029f8f9a1e782fe56ca0ff1fa8fb37892d0e | a0608d34c6ed96c9071cc3b9bdf70c95cef8fcbd | refs/heads/master | 2020-04-27T10:33:21.452707 | 2019-05-02T10:47:01 | 2019-05-02T10:47:01 | 174,259,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def mergeKLists(self, lists):
"""
:type lists: List[ListNode]
:rtype: ListNode
"""
#把所有链表放入数组,排序后建立新链表返回
res_list = []
for l in lists:
while(l):
res_list.append(l.val)
l = l.next
res_list.sort()
dummy = ListNode(-1)
head = dummy
for num in res_list:
head.next = ListNode(num)
head = head.next
return dummy.next | [
"2957308424@qq.com"
] | 2957308424@qq.com |
de6ecc44ca37c28e472b8df0d1377818c08bd1ff | 391eafc9a385fc17f15619ac3eb1c248ff88991c | /src/run.py | f029fd667d05ca639c850699079685470c65a18d | [
"MIT"
] | permissive | greatEncoder/vscode | dc6098174898e546ec2211a3b793c3e5ab9b2e4c | fe66e9dbc79cd64afcb8fccfe058a822dc6d2610 | refs/heads/master | 2021-02-09T00:26:49.349847 | 2020-03-16T22:22:26 | 2020-03-16T22:22:26 | 244,215,240 | 0 | 0 | null | 2020-03-01T20:04:00 | 2020-03-01T20:03:59 | null | UTF-8 | Python | false | false | 8,774 | py | import sys
import ast
import bdb
import json
import re
import core
class LoopInfo:
def __init__(self, frame, lineno, indent):
self.frame = frame
self.lineno = lineno
self.indent = indent
self.iter = 0
class Logger(bdb.Bdb):
def __init__(self, lines):
bdb.Bdb.__init__(self)
self.lines = lines
self.time = 0
self.prev_env = None
self.data = {}
self.active_loops = []
# self.exception = False
def data_at(self, l):
if not(l in self.data):
self.data[l] = []
return self.data[l]
def user_line(self, frame):
# print("user_line ============================================")
# print(frame.f_code.co_name)
# print(frame.f_code.co_names)
# print(frame.f_code.co_filename)
# print(frame.f_code.co_firstlineno)
# print(dir(frame.f_code))
# print("lineno")
# print(frame.f_lineno)
# print(frame.__dir__())
# print("globals")
# print(frame.f_globals)
# print("locals")
# print(frame.f_locals)
if frame.f_code.co_name == "<module>" or frame.f_code.co_name == "<listcomp>" or frame.f_code.co_filename != "<string>":
return
# self.exception = False
adjusted_lineno = frame.f_lineno-1
print("---------------------------------------")
print("About to execute: " + self.lines[adjusted_lineno].strip())
self.record_loop_end(frame, adjusted_lineno)
self.record_env(frame, adjusted_lineno)
self.record_loop_begin(frame, adjusted_lineno)
def record_loop_end(self, frame, lineno):
curr_stmt = self.lines[lineno]
if self.prev_env != None and len(self.active_loops) > 0 and self.active_loops[-1].frame is frame:
prev_lineno = self.prev_env["lineno"]
if isinstance(prev_lineno, str):
prev_lineno = int(prev_lineno[1:])
prev_stmt = self.lines[prev_lineno]
loop_indent = self.active_loops[-1].indent
curr_indent = indent(curr_stmt)
if is_return_str(prev_stmt):
while len(self.active_loops) > 0:
self.active_loops[-1].iter += 1
for l in self.stmts_in_loop(self.active_loops[-1].lineno):
self.data_at(l).append(self.create_end_loop_dummy_env())
del self.active_loops[-1]
elif (curr_indent <= loop_indent and lineno != self.active_loops[-1].lineno):
# break statements don't go through the loop header, so we miss
# the last increment in iter, which is why we have to adjust here
if is_break_str(prev_stmt):
self.active_loops[-1].iter += 1
for l in self.stmts_in_loop(self.active_loops[-1].lineno):
self.data_at(l).append(self.create_end_loop_dummy_env())
del self.active_loops[-1]
def record_loop_begin(self, frame, lineno):
for l in self.active_loops:
print("Active loop at line " + str(l.lineno) + ", iter " + str(l.iter))
curr_stmt = self.lines[lineno]
if is_loop_str(curr_stmt):
if len(self.active_loops) > 0 and self.active_loops[-1].lineno == lineno:
self.active_loops[-1].iter += 1
else:
self.active_loops.append(LoopInfo(frame, lineno, indent(curr_stmt)))
for l in self.stmts_in_loop(lineno):
self.data_at(l).append(self.create_begin_loop_dummy_env())
def stmts_in_loop(self, lineno):
result = []
curr_stmt = self.lines[lineno]
loop_indent = indent(curr_stmt)
for l in range(lineno+1, len(self.lines)):
line = self.lines[l]
if line.strip() == "":
continue
if indent(line) <= loop_indent:
break
result.append(l)
return result
def active_loops_iter_str(self):
return ",".join([str(l.iter) for l in self.active_loops])
def active_loops_id_str(self):
return ",".join([str(l.lineno) for l in self.active_loops])
def add_loop_info(self, env):
env["#"] = self.active_loops_iter_str()
env["$"] = self.active_loops_id_str()
def create_begin_loop_dummy_env(self):
env = {"begin_loop":self.active_loops_iter_str()}
self.add_loop_info(env)
return env
def create_end_loop_dummy_env(self):
env = {"end_loop":self.active_loops_iter_str()}
self.add_loop_info(env)
return env
def record_env(self, frame, lineno):
if self.time >= 100:
self.set_quit()
return
env = {}
env["time"] = self.time
self.add_loop_info(env)
self.time = self.time + 1
for k in frame.f_locals:
if k != core.magic_var_name:
env[k] = repr(frame.f_locals[k])
env["lineno"] = lineno
self.data_at(lineno).append(env)
if (self.prev_env != None):
self.prev_env["next_lineno"] = lineno
env["prev_lineno"] = self.prev_env["lineno"]
self.prev_env = env
# def user_exception(self, frame, e):
# self.exception = True
def user_return(self, frame, rv):
# print("user_return ============================================")
# print(frame.f_code.co_name)
# print("lineno")
# print(frame.f_lineno)
# print(frame.__dir__())
# print("globals")
# print(frame.f_globals)
# print("locals")
# print(frame.f_locals)
if frame.f_code.co_name == "<module>" or frame.f_code.co_name == "<listcomp>" or frame.f_code.co_filename != "<string>":
return
# if self.exception:
# if rv == None:
# rv_str = "Exception"
# else:
# rv_str = "Exception(" + repr(rv) + ")"
# else:
# rv_str = repr(rv)
adjusted_lineno = frame.f_lineno-1
print("About to return: " + self.lines[adjusted_lineno].strip())
self.record_env(frame, "R" + str(adjusted_lineno))
#self.data_at("R" + str(adjusted_lineno))[-1]["rv"] = rv_str
self.data_at("R" + str(adjusted_lineno))[-1]["rv"] = repr(rv)
self.record_loop_end(frame, adjusted_lineno)
#self.record_loop_begin(frame, adjusted_lineno)
def pretty_print_data(self):
for k in self.data:
print("** Line " + str(k))
for env in self.data[k]:
print(env)
class WriteCollector(ast.NodeVisitor):
def __init__(self):
ast.NodeVisitor()
self.data = {}
def data_at(self, l):
if not(l in self.data):
self.data[l] = []
return self.data[l]
def record_write(self, lineno, id):
if (id != core.magic_var_name):
self.data_at(lineno-1).append(id)
def visit_Name(self, node):
#print("Name " + node.id + " @ line " + str(node.lineno) + " col " + str(node.col_offset))
if isinstance(node.ctx, ast.Store):
self.record_write(node.lineno, node.id)
def visit_Subscript(self, node):
#print("Subscript " + str(node.ctx) + " " + str(node.value) + " " + str(node.col_offset))
if isinstance(node.ctx, ast.Store):
id = self.find_id(node)
if id == None:
print("Warning: did not find id in subscript")
else:
self.record_write(node.lineno, id)
def find_id(self, node):
if hasattr(node, "id"):
return node.id
if hasattr(node, "value"):
return self.find_id(node.value)
return None
def is_loop_str(str):
return re.search("(for|while).*:", str.strip()) != None
def is_break_str(str):
return re.search("break", str.strip()) != None
def is_return_str(str):
return re.search("return", str.strip()) != None
def indent(str):
return len(str) - len(str.lstrip())
def compute_writes(code):
root = ast.parse(code)
#print(ast.dump(root))
write_collector = WriteCollector()
write_collector.visit(root)
return write_collector.data
def compute_runtime_data(code, lines):
l = Logger(lines)
l.run(code)
return l.data
def main():
if len(sys.argv) != 2:
print("Usage: run <file-name>")
exit(-1)
lines = core.load_code_lines(sys.argv[1])
code = "".join(lines)
print(code)
writes = compute_writes(code)
run_time_data = compute_runtime_data(code, lines)
with open(sys.argv[1] + ".out", "w") as out:
out.write(json.dumps((writes, run_time_data)))
main()
| [
"lerner@cs.ucsd.edu"
] | lerner@cs.ucsd.edu |
76c75743262d244fb0c331f24bf18c1f4b75d7cc | 50c1f7e4a3084ecd0ef72c9b20f8ea218cebe14c | /home/views.py | 3507d6e56d7fcbb516f8c88bcbc6ecadfdad1a86 | [] | no_license | rahulshivan05/Coder | 922240a494207d0fcf1a553d1749eb7c09c6425b | 79340971c4c1ac3123e5a65fc9fb423f87eac972 | refs/heads/main | 2023-02-17T13:50:51.491016 | 2021-01-13T15:55:22 | 2021-01-13T15:55:22 | 329,347,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,752 | py | from django.core import checks
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse, HttpResponseNotFound, Http404
from django.core.exceptions import PermissionDenied
# from django.contrib.postgres.operations import UnaccentExtension
from django.db.models import * # F, Lookup, Field, Transform, IntegerField, FloatField, Transform, CharField, TextField
from django.contrib.auth import get_user_model
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.core.cache import cache
from django.core.cache.utils import make_template_fragment_key
import warnings
from datetime import timedelta
from django.core.signing import TimestampSigner
import pytz
import re
import itertools
# import foobar
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.template.backends.base import BaseEngine
from django.template.backends.utils import csrf_input_lazy, csrf_token_lazy
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import SafeString
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from django.core.files.storage import Storage
from django.contrib.auth.handlers.modwsgi import check_password
from django.core.handlers.wsgi import WSGIHandler
from django.template import Context
from asgiref.sync import sync_to_async
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.core.management.base import BaseCommand, CommandError, no_translations
# import the logging library
import logging
import django
from django.core.signals import request_finished
import django.dispatch
from django.core.checks import Error, register, Tags
# Paginations
from django.core.paginator import Paginator
from django.core import serializers
from django.core.serializers.json import DjangoJSONEncoder
from django.core.serializers import serialize
from django.views.generic import ListView
from django.core.cache import CacheKeyWarning
from django.core.cache.backends.locmem import LocMemCache
from django.views.decorators.vary import vary_on_headers
from django.views.decorators.cache import patch_cache_control
from django.views.decorators.vary import vary_on_cookie
from django.views.decorators.cache import cache_control
from django.views.decorators.cache import never_cache
from django.views.decorators.http import condition
from django.core.signing import Signer
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.postgres.aggregates import BoolAnd
from django.contrib.postgres.aggregates import BoolOr
# from django.contrib.postgres.consints import ExclusionConstraint
from django.contrib.postgres.fields import DateTimeRangeField, RangeOperators
from django.contrib.postgres.search import SearchVector
from django.contrib.auth.backends import BaseBackend
from django.contrib.auth.hashers import check_password
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.db.models import Count
from home.models import Contact #, MovieView
from blog.models import Post
from django.contrib import messages
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from .models import Profile
from .forms import ProfileModelForm
from django.core.mail import send_mail
from django.core.mail import EmailMessage
from django.views import View
from django.urls import reverse
from .utils import token_generator
from django.utils import translation
from django.utils.translation import gettext as _
from django.conf import settings
from django.db.models import Q
from django.utils.encoding import force_bytes, force_text, DjangoUnicodeDecodeError
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.contrib.sites.shortcuts import get_current_site
import threading
from pytube import YouTube
from pytube import Playlist
import os
import geopy
from geopy.geocoders import Nominatim
import requests
# import netifaces as nif
import re, uuid
from getmac import get_mac_address as gma
import argparse
from django.db.models import Count
import speedtest
# import requests
import pandas as pd
from bs4 import BeautifulSoup
from ipaddress import ip_address
import socket
import re
import struct
import textwrap
from datetime import datetime, timedelta
from plyer import notification
######### nltk Tutorial ############
import nltk
import nltk.corpus
from nltk.corpus import brown
from nltk.tokenize import word_tokenize
from nltk.probability import FreqDist
fdist = FreqDist()
from nltk.tokenize import blankline_tokenize
from nltk.util import *
from nltk.stem import PorterStemmer
pst = PorterStemmer()
from nltk.stem import LancasterStemmer
lst = LancasterStemmer()
from nltk.stem import SnowballStemmer
sbst = SnowballStemmer('english')
from nltk.stem import wordnet
from nltk.stem import WordNetLemmatizer
word_lem = WordNetLemmatizer()
from nltk.corpus import stopwords
import re
punctuation = re.compile(r'[-.?!,:;()|0-9]')
from nltk import ne_chunk
# allPosts = Post
# Create your views here.
class EmailThread(threading.Thread):
def __init__(self, email):
self.email=email
threading.Thread.__init__(self)
def run(self):
self.email.send()
def home(request):
user_language = {}
translation.activate(user_language)
request.session[translation.LANGUAGE_SESSION_KEY] = user_language
# if translation.LANGUAGE_SESSION_KEY in request.session:
# del request.session[translation.LANGUAGE_SESSION_KEY]
title = _('Homepage')
allPosts = Post.objects.order_by('-views', '-timeStamp')[:3]
context = {'allPosts': allPosts, 'title': title}
return render(request, 'home/index.html', context)
def about(request):
return render(request, 'home/about.html')
def contact(request):
if request.user.is_authenticated:
if request.method == 'POST':
name = request.POST['name']
email = request.POST['email']
phone = request.POST['phone']
content = request.POST['content']
if len(name)<2 or len(email)<3 or len(phone)<10 or len(content)<4:
messages.error(request, "Please fill the form correctly")
notification.notify(
title='Please fill form correctly',
message='Fill the all requirements correctly before submit the form.',
app_icon='C:\\Users\\Rohit\\Desktop\\iCoder-Upgrade\\static\\img\\error.ico',
timeout=5
)
else:
contact = Contact(name=name, email=email, phone=phone, content=content)
contact.save()
messages.success(request, "Your message has been successfully sent")
notification.notify(
title='Your message has been successfully sent',
message='You entered Messages are sent successfully',
app_icon='C:\\Users\\Rohit\\Desktop\\iCoder-Upgrade\\static\\img\\success.ico',
timeout=5
)
return render(request, 'home/contact.html')
else:
messages.error(request, 'Your are not Logged In! Please login to Contact Me')
return render(request, 'home/contact.html')
def search(request, *args, **kwargs):
query = request.GET['query']
tokenize = word_tokenize(query)
# print(tokenize)
############# Not Working in the website ############
# for word in tokenize:
# fdist[word.lower()]+=1
# print(fdist)
# print(fdist(query))
########### END / #########################
Query_blank = blankline_tokenize(query)
# print(len(Query_blank))
quotes_tokens = nltk.word_tokenize(query)
# print(quotes_tokens)
quotes_bigrams = list(nltk.bigrams(quotes_tokens))
# print(quotes_bigrams)
# print(pst.stem(query))
# print(stopwords.words('english'))
# print(len(stopwords.words('english')))
post_punctuation = []
for words in tokenize:
word = punctuation.sub("",words)
if len(word)>0:
post_punctuation.append(word)
# print(post_punctuation)
NE_token = word_tokenize(query)
NE_tags = nltk.pos_tag(NE_token)
NE_NER = ne_chunk(NE_tags)
# print(NE_NER)
new_tokens = nltk.pos_tag(word_tokenize(query))
# print(new_tokens)
token = tokenize, Query_blank
# print(token)
# allPosts = Post.token.filter(title=query)
result = {}
# allPosts = Post.objects.filter(Q(title__icontains=query) | Q(content__icontains=query) | Q(author__icontains=query))
# params = {'allPosts': allPosts, 'query': query}
return render(request, 'home/search.html')
# return HttpResponse("Searching..")
def handleSignup(request):
if request.method == 'POST':
username = request.POST['username']
fname = request.POST['fname']
lname = request.POST['lname']
email = request.POST['email']
pass1 = request.POST['pass1']
pass2 = request.POST['pass2']
if len(username) > 10:
messages.error(request, "Username must be under 10 characters.")
return redirect('home')
if not username.isalnum():
messages.error(request, "Username should only contain letter and numbers.")
return redirect('home')
if pass1 != pass2:
messages.error(request, "Passwords do not match.")
return redirect('home')
# if not User.objects.filter(username=username).exists():
# messages.error(request, 'Your Username is Already Taken')
# if not User.objects.filter(email=email).exists():
# messages.error(request, 'Your email is Taken by other')
myuser = User.objects.create_user(username, email, pass1)
myuser.first_name = fname
myuser.last_name = lname
# myuser.is_active = False
myuser.save()
# uidb64 = urlsafe_base64_encode(force_bytes(myuser.pk))
# domain = get_current_site(request).domain
# link=reverse('activate', kwargs={'uidb64': uidb64, 'token': token_generator.make_token(myuser)})
# email_subject = 'Activate your Account'
# activate_url = 'http://'+domain+link
# email_body = 'Hi ' +myuser.username + ' Please use the link to verify your account\n ' + activate_url
# email = EmailMessage(
# email_subject,
# email_body,
# 'noreplay@store.com',
# [email],
# )
# email.send(fail_silently=False)
EmailThread(email).start()
# messages.success(request, "Your iCoder account has been created Successfully created")
messages.success(request, "Your account was successfully created at iCoder")
return redirect('/')
else:
return HttpResponse('404 Page Not Found')
class VerificationView(View):
def get(self, request, uidb64, token):
try:
id = force_text(urlsafe_base64_decode(uidb64))
myuser = User.objects.get(pk=id)
if not token_generator.check_token(myuser, token):
messages.error(request, 'Account already activated. Go to login')
return redirect('/')
if myuser.is_active:
return redirect('/')
myuser.is_active = True
myuser.save()
messages.success(request, 'Account activated Successfully '+myuser.username+' go forward to iCoder')
return redirect('/')
except expression as identifier:
pass
return HttpResponse("activation page")
def LoginView(request, *args, **kwargs):
if request.method == 'POST':
email = request.POST['email']
password = request.POST['password']
user = authenticate(email=email, password=password)
if user:
if user.is_active:
login(request, user)
messages.success(request, 'Successfully Logged in as '+user.username)
return redirect('/')
else:
messages.error(request, "Account not activate.Please check your email.")
return redirect('/')
return render(request, 'home/login_user.html')
# @staff_member_required
def handleLogin(request, *args):
if request.method == 'POST':
loginusername = request.POST['loginusername']
loginpass = request.POST['loginpass']
user = authenticate(username=loginusername, password=loginpass)
# if user is not None:
# login(request, user)
# messages.success(request, "Successfully Logged In as "+user.username)
# return redirect("home")
# else:
# messages.error(request, "invalid credentials. Please try again")
# return redirect('home')
if user:
if user.is_active:
login(request, user)
messages.success(request, 'Successfully Logged in as '+user.username)
return redirect('/')
else:
messages.error(request, "Account not activate.Please check your email.")
return redirect('/')
return HttpResponse("404 Page Not Found")
def handleLogout(request):
logout(request)
messages.success(request, "Successfully Logged Out.")
return redirect('home')
# def login(request, *args, **kwargs):
# return HttpResponse('Login page')
def my_profile_view(request, *args, **kwargs):
profile = Profile.objects.get()
# pro = Profile.objects.get(user=request.user)
form = ProfileModelForm(request.POST or None, request.FILES or None, instance=profile)
confirm = False
if request.method == 'POST':
if form.is_valid():
form.save()
confirm = True
context = {
'profile': profile,
'form': form,
'confirm': confirm,
}
return render(request, 'home/profile.html', context)
# return HttpResponse('Thhis is Profile page')
url = ''
def DownYtd_Video(request):
print('url :', url)
# print('resolution: ',resolutions)
return render(request, 'home/Down_YTD.html')
def Download_Video(request):
global url
url = request.GET.get('url')
if request.user.is_authenticated:
try:
obj = YouTube(url)
resolutions = []
strm_all = obj.streams.filter(progressive=True, file_extension='mp4')
for i in strm_all:
resolutions.append(i.resolution)
resolutions = list(dict.fromkeys(resolutions))
embed_url = url.replace("watch?v=", "embed/")
path = 'C:\\Users\\Rohit\\Downloads'
return render(request, 'home/download.html', {'rsl': resolutions, 'embed': embed_url, 'url': url})
except:
return HttpResponse("<h3>Sorry Invalid Format or Invalid Url ?</h3>")
else:
return redirect('home')
def download_complete(request, res):
global url
if request.user.is_authenticated:
homedir = os.path.expanduser("~")
dirs = homedir + '\\Downloads'
print(f'DIRECT: ', f"{dirs}")
if request.method == "POST":
YouTube(url).streams.get_by_resolution(res).download(dirs)
messages.success(request, "Download complete!. Please check out video")
# messages.error(request, "Invalid Things.Please try again?.")
return redirect('/')
else:
messages.error(request, "Invalid Things.Please try again?.")
return HttpResponse("<h3>Download Not Complete. Please try Again!.</h3>")
else:
return redirect('home')
def geolocator(request, *args, **kwargs):
if request.user.is_authenticated:
return render(request, 'home/geo.html')
else:
# return render(request, 'home/index.html')
messages.error(request, "Your are not Logged In!. That's You are not avail the geo location")
return redirect('home')
def geo_loc(request, *args, **kwargs):
text = request.GET['zipCode']
geolocator = Nominatim(user_agent="geoapiExercises")
location = geolocator.geocode(text)
context = {
'location': location,
'text': text,
'gma': gma
}
return render(request, 'home/geo_loc.html', context)
def stringError(request, slug):
# return redirect('home')
return render(request, 'home/error.html')
def speedTest(request, *args, **kwargs):
st = speedtest.Speedtest()
# option = int(request.GET.get('option'))
# print(type(option))
# if option == 1:
# print(type(option))
# print(st.download())
return render(request, 'home/speed.html')
def getdata(url):
r = requests.get(url)
return r.text
def get_fuel_rate(request, *args, **kwargs):
htmldata = getdata("https://www.goodreturns.in/petrol-price.html")
soup = BeautifulSoup(htmldata, 'html.parser')
mydatastr = ''
result = []
# searching all tr in the html data
# storing as a string
for table in soup.find_all('tr'):
mydatastr += table.get_text()
# set accourding to your required
mydatastr = mydatastr[1:]
itemlist = mydatastr.split("\n\n")
for item in itemlist[:-5]:
result.append(item.split("\n"))
# Calling DataFrame constructor on list
df = pd.DataFrame(result[:-8])
context = {
'df': df
}
return render(request, 'home/fuel.html', context)
def setcookie(request):
response = render(request, 'home/setcookie.html')
response.set_signed_cookie('name', 'rahul', salt='nm', expires=datetime.utcnow()+timedelta(days=2))
return response
def getcookie(request, *args, **kwargs):
name = request.get_signed_cookie('name', salt='nm')
context = {
'nm': name,
}
return render(request, 'home/getcookie.html', context)
def delcookie(request):
response = render(request, 'home/delcookie.html')
response.delete_cookie('name')
return response
| [
"rahulshivan05@gmail.com"
] | rahulshivan05@gmail.com |
e41d486baf0f584817240d5dfb4283ad35235fff | a80884040ce1c178274a3068d216f440dd541844 | /tests/operators/test_group_by.py | 148a994d874624aae29cd6aea6bd533dc90abce8 | [
"MIT"
] | permissive | maki-nage/rxsci | a4aae51edc1ef684b55df22e34c11aa1d54ef740 | 915e59ebf593c4b313265bb87cf0e1209ec2ee0f | refs/heads/master | 2023-01-19T14:32:11.638497 | 2023-01-17T08:06:35 | 2023-01-17T08:06:35 | 242,592,973 | 9 | 2 | MIT | 2022-11-08T21:54:16 | 2020-02-23T21:23:56 | Python | UTF-8 | Python | false | false | 4,013 | py | import rx
import rx.operators as ops
import rxsci as rs
from ..utils import on_probe_state_topology
def test_group_by_obs():
source = [1, 2, 2, 1]
actual_error = []
actual_completed = []
actual_result = []
mux_actual_result = []
def on_completed():
actual_completed.append(True)
store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
rx.from_(source).pipe(
rs.state.with_store(
store,
rx.pipe(
rs.ops.group_by(
lambda i: i,
rx.pipe(
ops.do_action(mux_actual_result.append),
),
))
),
).subscribe(
on_next=actual_result.append,
on_completed=on_completed,
on_error=actual_error.append,
)
assert actual_error == []
assert actual_completed == [True]
assert actual_result == source
assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
assert mux_actual_result[1:] == [
rs.OnCreateMux((0 ,(0,)), store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCreateMux((1, (0,)), store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCompletedMux((0, (0,)), store),
rs.OnCompletedMux((1, (0,)), store),
]
def test_group_by_list():
source = [1, 2, 2, 1]
actual_error = []
actual_completed = []
actual_result = []
mux_actual_result = []
def on_completed():
actual_completed.append(True)
store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
rx.from_(source).pipe(
rs.state.with_store(
store,
rx.pipe(
rs.ops.group_by(
lambda i: i,
[
ops.do_action(mux_actual_result.append),
],
))
),
).subscribe(
on_next=actual_result.append,
on_completed=on_completed,
on_error=actual_error.append,
)
assert actual_error == []
assert actual_completed == [True]
assert actual_result == source
assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
assert mux_actual_result[1:] == [
rs.OnCreateMux((0 ,(0,)), store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCreateMux((1, (0,)), store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCompletedMux((0, (0,)), store),
rs.OnCompletedMux((1, (0,)), store),
]
def test_group_by_without_store():
actual_error = []
rx.from_([1, 2, 3, 4]).pipe(
rs.ops.group_by(
lambda i: i % 2 == 0,
pipeline=rx.pipe(
)
)
).subscribe(on_error=actual_error.append)
assert type(actual_error[0]) is ValueError
def test_forward_topology_probe():
actual_topology_probe = []
source = [1, 2, 3, 4]
rx.from_(source).pipe(
rs.state.with_memory_store(
rx.pipe(
rs.ops.group_by(
lambda i: i % 2 == 0,
pipeline=rx.pipe()
),
on_probe_state_topology(actual_topology_probe.append),
)
),
).subscribe()
assert len(actual_topology_probe) == 1
def test_empty_source():
source = []
actual_result = []
on_completed = []
actual_error = []
rx.from_(source).pipe(
rs.state.with_memory_store(
rx.pipe(
rs.ops.group_by(
lambda i: i % 2 == 0,
pipeline=[]
),
)
),
).subscribe(
on_next=actual_result.append,
on_completed=lambda: on_completed.append(True),
on_error=actual_error.append,
)
assert actual_result == []
| [
"romain.picard@oakbits.com"
] | romain.picard@oakbits.com |
36bc327c7332f51a2ad9a259d48c874153a7cf0b | f27944e59e8c516ba519685f65055d98b1671b21 | /tests/milvus_benchmark/client.py | 4744c108540f163173f574aa93d5675445c30416 | [
"Apache-2.0",
"Zlib",
"BSD-3-Clause",
"LGPL-2.1-only",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"MIT",
"JSON",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | trungtv/milvus | c09239a0fab0cffe035d8fcb363992b670f18dcf | d0d4f4f269325bfd4d8a8d1621ee6a67dbe263a6 | refs/heads/master | 2020-09-01T07:21:03.061054 | 2019-10-30T05:39:45 | 2019-10-30T05:39:45 | 218,906,834 | 1 | 1 | Apache-2.0 | 2019-11-01T03:29:21 | 2019-11-01T03:29:20 | null | UTF-8 | Python | false | false | 7,743 | py | import pdb
import random
import logging
import json
import sys
import time, datetime
from multiprocessing import Process
from milvus import Milvus, IndexType, MetricType
logger = logging.getLogger("milvus_benchmark.client")
SERVER_HOST_DEFAULT = "127.0.0.1"
SERVER_PORT_DEFAULT = 19530
def time_wrapper(func):
"""
This decorator prints the execution time for the decorated function.
"""
def wrapper(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
end = time.time()
logger.info("Milvus {} run in {}s".format(func.__name__, round(end - start, 2)))
return result
return wrapper
class MilvusClient(object):
def __init__(self, table_name=None, ip=None, port=None):
self._milvus = Milvus()
self._table_name = table_name
try:
if not ip:
self._milvus.connect(
host = SERVER_HOST_DEFAULT,
port = SERVER_PORT_DEFAULT)
else:
self._milvus.connect(
host = ip,
port = port)
except Exception as e:
raise e
def __str__(self):
return 'Milvus table %s' % self._table_name
def check_status(self, status):
if not status.OK():
logger.error(status.message)
raise Exception("Status not ok")
def create_table(self, table_name, dimension, index_file_size, metric_type):
if not self._table_name:
self._table_name = table_name
if metric_type == "l2":
metric_type = MetricType.L2
elif metric_type == "ip":
metric_type = MetricType.IP
else:
logger.error("Not supported metric_type: %s" % metric_type)
create_param = {'table_name': table_name,
'dimension': dimension,
'index_file_size': index_file_size,
"metric_type": metric_type}
status = self._milvus.create_table(create_param)
self.check_status(status)
@time_wrapper
def insert(self, X, ids=None):
status, result = self._milvus.add_vectors(self._table_name, X, ids)
self.check_status(status)
return status, result
@time_wrapper
def create_index(self, index_type, nlist):
if index_type == "flat":
index_type = IndexType.FLAT
elif index_type == "ivf_flat":
index_type = IndexType.IVFLAT
elif index_type == "ivf_sq8":
index_type = IndexType.IVF_SQ8
elif index_type == "mix_nsg":
index_type = IndexType.MIX_NSG
elif index_type == "ivf_sq8h":
index_type = IndexType.IVF_SQ8H
index_params = {
"index_type": index_type,
"nlist": nlist,
}
logger.info("Building index start, table_name: %s, index_params: %s" % (self._table_name, json.dumps(index_params)))
status = self._milvus.create_index(self._table_name, index=index_params, timeout=6*3600)
self.check_status(status)
def describe_index(self):
return self._milvus.describe_index(self._table_name)
def drop_index(self):
logger.info("Drop index: %s" % self._table_name)
return self._milvus.drop_index(self._table_name)
@time_wrapper
def query(self, X, top_k, nprobe):
status, result = self._milvus.search_vectors(self._table_name, top_k, nprobe, X)
self.check_status(status)
return status, result
def count(self):
return self._milvus.get_table_row_count(self._table_name)[1]
def delete(self, timeout=60):
logger.info("Start delete table: %s" % self._table_name)
self._milvus.delete_table(self._table_name)
i = 0
while i < timeout:
if self.count():
time.sleep(1)
i = i + 1
continue
else:
break
if i < timeout:
logger.error("Delete table timeout")
def describe(self):
return self._milvus.describe_table(self._table_name)
def exists_table(self):
return self._milvus.has_table(self._table_name)
@time_wrapper
def preload_table(self):
return self._milvus.preload_table(self._table_name, timeout=3000)
def fit(table_name, X):
milvus = Milvus()
milvus.connect(host = SERVER_HOST_DEFAULT, port = SERVER_PORT_DEFAULT)
start = time.time()
status, ids = milvus.add_vectors(table_name, X)
end = time.time()
logger(status, round(end - start, 2))
def fit_concurrent(table_name, process_num, vectors):
processes = []
for i in range(process_num):
p = Process(target=fit, args=(table_name, vectors, ))
processes.append(p)
p.start()
for p in processes:
p.join()
if __name__ == "__main__":
# table_name = "sift_2m_20_128_l2"
table_name = "test_tset1"
m = MilvusClient(table_name)
# m.create_table(table_name, 128, 50, "l2")
print(m.describe())
# print(m.count())
# print(m.describe_index())
insert_vectors = [[random.random() for _ in range(128)] for _ in range(10000)]
for i in range(5):
m.insert(insert_vectors)
print(m.create_index("ivf_sq8h", 16384))
X = [insert_vectors[0]]
top_k = 10
nprobe = 10
print(m.query(X, top_k, nprobe))
# # # print(m.drop_index())
# # print(m.describe_index())
# # sys.exit()
# # # insert_vectors = [[random.random() for _ in range(128)] for _ in range(100000)]
# # # for i in range(100):
# # # m.insert(insert_vectors)
# # # time.sleep(5)
# # # print(m.describe_index())
# # # print(m.drop_index())
# # m.create_index("ivf_sq8h", 16384)
# print(m.count())
# print(m.describe_index())
# sys.exit()
# print(m.create_index("ivf_sq8h", 16384))
# print(m.count())
# print(m.describe_index())
import numpy as np
def mmap_fvecs(fname):
x = np.memmap(fname, dtype='int32', mode='r')
d = x[0]
return x.view('float32').reshape(-1, d + 1)[:, 1:]
print(mmap_fvecs("/poc/deep1b/deep1B_queries.fvecs"))
# SIFT_SRC_QUERY_DATA_DIR = '/poc/yuncong/ann_1000m'
# file_name = SIFT_SRC_QUERY_DATA_DIR+'/'+'query.npy'
# data = numpy.load(file_name)
# query_vectors = data[0:2].tolist()
# print(len(query_vectors))
# results = m.query(query_vectors, 10, 10)
# result_ids = []
# for result in results[1]:
# tmp = []
# for item in result:
# tmp.append(item.id)
# result_ids.append(tmp)
# print(result_ids[0][:10])
# # gt
# file_name = SIFT_SRC_QUERY_DATA_DIR+"/gnd/"+"idx_1M.ivecs"
# a = numpy.fromfile(file_name, dtype='int32')
# d = a[0]
# true_ids = a.reshape(-1, d + 1)[:, 1:].copy()
# print(true_ids[:3, :2])
# print(len(true_ids[0]))
# import numpy as np
# import sklearn.preprocessing
# def mmap_fvecs(fname):
# x = np.memmap(fname, dtype='int32', mode='r')
# d = x[0]
# return x.view('float32').reshape(-1, d + 1)[:, 1:]
# data = mmap_fvecs("/poc/deep1b/deep1B_queries.fvecs")
# print(data[0], len(data[0]), len(data))
# total_size = 10000
# # total_size = 1000000000
# file_size = 1000
# # file_size = 100000
# file_num = total_size // file_size
# for i in range(file_num):
# fname = "/test/milvus/raw_data/deep1b/binary_96_%05d" % i
# print(fname, i*file_size, (i+1)*file_size)
# single_data = data[i*file_size : (i+1)*file_size]
# single_data = sklearn.preprocessing.normalize(single_data, axis=1, norm='l2')
# np.save(fname, single_data)
| [
"hai.jin@zilliz.com"
] | hai.jin@zilliz.com |
5d95667ea4bdc4bfe5a1025aefdf96ae56266dde | 2bf29863d6030b75136b7d0bfc76ee78cb3125c5 | /vfplot.py | f06402c4db48c4386ddc3eb5a7208196da620862 | [] | no_license | michellekli/visitor-forecasting | 28ec72397e094af1888c9c2f7ffd82f988b21160 | da33a0b0332b643faa8e37c984f008a64eb73c35 | refs/heads/master | 2020-07-02T13:12:52.847545 | 2019-08-15T20:29:45 | 2019-08-15T20:29:45 | 201,531,344 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,907 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import seaborn as sns
import statsmodels.api as sm
import calendar
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
import vfdata as vfd
def plot_config(title=''):
plt.xticks(rotation=45, horizontalalignment='right')
fig = plt.gcf()
fig.set_size_inches(16, 10)
ax = plt.gca()
ax.set_title(title)
def plot_open_stores_daily(df, stores_subset=None, title=''):
if stores_subset is None:
subset_df = df
else:
subset_df = df.loc[df['air_store_id'].isin(stores_subset), :]
subset_counts = subset_df['visit_date'].value_counts().reset_index()
subset_counts['visit_dow'] = subset_counts['index'].apply(lambda x: x.weekday())
subset_counts['visit_day_name'] = subset_counts['index'].apply(lambda x: x.day_name())
subset_counts['color'] = subset_counts['visit_dow'].apply(lambda x: sns.color_palette()[x])
plt.scatter(subset_counts['index'], subset_counts['visit_date'], c=subset_counts['color'])
plt.legend(handles=[mpatches.Patch(color=sns.color_palette()[i], label=calendar.day_name[i]) for i in range(7)])
plot_config(title)
ax = plt.gca()
ax.set_ylabel('Number of Restaurants Open')
plt.show()
return subset_counts
def plot_median_visitors_daily(df, stores_subset=None, title=''):
if stores_subset is None:
subset_df = df
else:
subset_df = df.loc[df['air_store_id'].isin(stores_subset), :]
subset_counts = subset_df.groupby(by='visit_date')['visitors'].median().reset_index()
subset_counts['visit_dow'] = subset_counts['visit_date'].apply(lambda x: x.weekday())
subset_counts['visit_day_name'] = subset_counts['visit_date'].apply(lambda x: x.day_name())
subset_counts['color'] = subset_counts['visit_dow'].apply(lambda x: sns.color_palette()[x])
sns.lineplot(x='visit_date', y='visitors', hue='visit_day_name', data=subset_counts)
plot_config(title)
plt.show()
return subset_counts
def plot_store_counts_by(df, by, stores_subset=None, title=''):
if stores_subset is None:
subset_df = df
else:
subset_df = df.loc[df['air_store_id'].isin(stores_subset), :]
subset_counts = subset_df.groupby(by=by)['air_store_id'].nunique().sort_values(ascending=False)
sns.barplot(x=subset_counts.index, y=subset_counts, color=sns.color_palette()[0])
plot_config(title)
plt.show()
return subset_counts
def plot_median_visitors_by(df, by, stores_subset=None, title=''):
if stores_subset is None:
subset_df = df
else:
subset_df = df.loc[df['air_store_id'].isin(stores_subset), :]
subset_counts = subset_df.groupby(by=by)['visitors'].median().sort_values(ascending=False)
sns.barplot(x=subset_counts.index, y=subset_counts, color=sns.color_palette()[0])
plot_config(title)
plt.show()
return subset_counts
def plot_store_counts_comparison(df, by, title=''):
nrows = 2
ncols = 2
ax = plt.subplot2grid((nrows,ncols), (1,0), colspan=ncols)
subset_counts = df.groupby(by=by)['air_store_id'].nunique().sort_values(ascending=False)
overall_index = list(subset_counts.index)
sns.barplot(x=subset_counts.index, y=subset_counts, ax=ax)
plot_config()
plt.ylabel('Number of Stores')
plt.title('All Stores')
ax = plt.subplot2grid((nrows,ncols), (0,0))
subset_df = df.loc[df['air_store_id'].isin(vfd.get_stores_before(df)), :]
subset_counts = subset_df.groupby(by=by)['air_store_id'].nunique().sort_values(ascending=False).reindex(index=overall_index)
sns.barplot(x=subset_counts.index, y=subset_counts, ax=ax)
plot_config()
plt.xticks([])
plt.xlabel('')
plt.ylabel('')
plt.title('Stores Before July 2016')
ax = plt.subplot2grid((nrows,ncols), (0,1))
subset_df = df.loc[df['air_store_id'].isin(vfd.get_stores_starting(df)), :]
subset_counts = subset_df.groupby(by=by)['air_store_id'].nunique().sort_values(ascending=False).reindex(index=overall_index)
sns.barplot(x=subset_counts.index, y=subset_counts, ax=ax)
plot_config()
plt.xticks([])
plt.xlabel('')
plt.ylabel('')
plt.title('Stores Starting July 2016')
fig = plt.gcf()
fig.suptitle(title)
plt.show()
def plot_median_visitors_comparison(df, by, title=''):
nrows = 1
ncols = 1
ax = plt.subplot2grid((nrows,ncols), (0,0))
subset_counts = df.groupby(by=by)['visitors'].median().sort_values(ascending=False)
overall_index = list(subset_counts.index)
sns.scatterplot(x=np.arange(len(overall_index)), y=subset_counts, ax=ax, label='All Stores')
plot_config()
plt.xticks(ticks=np.arange(len(overall_index)), labels=overall_index)
offset = max(1, len(overall_index)//10) / -10
subset_df = df.loc[df['air_store_id'].isin(vfd.get_stores_before(df)), :]
subset_counts = subset_df.groupby(by=by)['visitors'].median().sort_values(ascending=False).reindex(index=overall_index)
sns.scatterplot(x=np.arange(len(overall_index)) + offset, y=subset_counts, ax=ax, label='Before July 2016')
plot_config()
offset = max(1, len(overall_index)//10) / 10
subset_df = df.loc[df['air_store_id'].isin(vfd.get_stores_starting(df)), :]
subset_counts = subset_df.groupby(by=by)['visitors'].median().sort_values(ascending=False).reindex(index=overall_index)
sns.scatterplot(x=np.arange(len(overall_index)) + offset, y=subset_counts, ax=ax, label='Starting July 2016')
plot_config(title)
plt.show()
def plot_acf_pacf(df=None, index=None, values=None, lags=25, title='Original values'):
if (index is None or values is None) and df is None:
raise ValueError("Either both or neither of `index` and `values` must "
"be specified.")
if df is not None:
index = df.index
values = df['visitors']
plt.plot(index, values)
plot_config(title=title)
fig = plt.gcf()
fig.set_size_inches(16, 3)
ax = plt.gca()
ax.axhline(y=0, color='gray')
plt.show()
plot_acf(values, lags=lags)
fig = plt.gcf()
fig.set_size_inches(16, 3)
plt.show()
plot_pacf(values, lags=lags)
fig = plt.gcf()
fig.set_size_inches(16, 3)
plt.show()
def plot_forecast(forecast, title='', save_path=None):
sns.lineplot(x='visit_date', y='visitors', data=forecast, label='observed', alpha=0.9)
sns.lineplot(x='visit_date', y='fitted', data=forecast, label='fitted', alpha=0.8)
sns.lineplot(x='visit_date', y='forecast', data=forecast, label='forecast')
ax = plt.gca()
ax.fill_between(forecast['visit_date'],
forecast['95_lower'],
forecast['95_upper'],
color='k',
alpha=0.2)
plot_config(title)
ax.set_ylim(bottom=0)
ax.set_ylabel('Number of Visitors')
ax.set_xlabel('Date')
if save_path is not None:
plt.savefig(save_path)
plt.show()
def plot_residuals(index, residuals, title=''):
ax = plt.subplot2grid((1, 2), (0, 0))
sns.distplot(residuals, ax=ax, norm_hist=True)
sns.distplot(np.random.standard_normal(1000),
kde=True, hist=False, norm_hist=True, label='N(0,1)')
ax.set_title('Histogram plus estimated density')
ax = plt.subplot2grid((1, 2), (0, 1))
sm.qqplot(np.array(residuals), fit=True, line='45', ax=ax)
fig = plt.gcf()
fig.set_size_inches(16, 7)
ax.set_title('Normal Q-Q')
plt.show()
plot_acf_pacf(index=index, values=residuals, lags=10, title='Standardized residual')
def plot_rolling_window_analysis(analysis, model, cutoff=0):
residuals = analysis.loc[analysis['model'].str.contains(model), 'fitted_residual'].values[-1][cutoff:]
plot_residuals(range(len(residuals)), residuals/np.std(residuals))
| [
"michelle.li.862@gmail.com"
] | michelle.li.862@gmail.com |
f3a741df2f360ded2a04b6cb0bc198fd067bfbe5 | 69bc75893c707785e5cec300b3e2f1e2381726ab | /info-treel-steps/dev-files/view-hex.py | 4de8df80abe02e3b6d232ffd224c3f636ca7fd33 | [
"MIT"
] | permissive | pierremolinaro/real-time-kernel-teensy-for-students | 2704116e7bf56047568462df2486744585781d71 | e2b153c30537f2cf394da12a2bd337955c3f1b26 | refs/heads/master | 2023-05-12T21:55:38.595726 | 2023-05-02T15:52:04 | 2023-05-02T15:52:04 | 137,371,482 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,622 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# From: https://github.com/msolters/intel-hex-viewer
#------------------------------------------------------------------------------
import sys
def parse_hex_line( line ):
if len( current_line ) == 0: return
bytecount = int( line[0:2], 16 )
address = int( line[2:6], 16 )
rec_type = int( line[6:8], 16 )
rec_output = str(hex(address)) + '\t(' + str(bytecount) + ')\t'
if rec_type == 0:
rec_output += '(data)'
rec_output += '\t\t' + line[8:(8+2*(bytecount))]
elif rec_type == 1:
rec_output += '(end of file)'
elif rec_type == 2:
rec_output += '(extended segment address)'
elif rec_type == 3:
rec_output += '(start segment address)'
elif rec_type == 4:
rec_output += '(extended linear address)'
elif rec_type == 5:
rec_output += '(start linear address)'
print (rec_output)
# (1) Open the Hex File
hex_file_path = sys.argv[1]
print ("Parsing " + hex_file_path)
hex_file = open(hex_file_path, "rb")
# (2) Analyze the hex file line by line
current_line = ""
try:
byte = "1" # initial placeholder
while byte != "":
byte = hex_file.read(1).decode("utf-8")
if byte == ":":
# (1) Parse the current line!
parse_hex_line( current_line )
# (2) Reset the current line to build the next one!
current_line = ""
else:
current_line += byte
parse_hex_line( current_line )
finally:
hex_file.close()
| [
"pierre@pcmolinaro.name"
] | pierre@pcmolinaro.name |
27780a5a5eeb3156d1fc17cd2c89f81ea03c6187 | cf2592c342869e9b027794ff985726e53d90e171 | /DFS.py | bac15cb2c4504a3b44c480e762dea082bc42a638 | [] | no_license | Utsabab/Data-Structures- | bb37e9dec89d64e18b2cec52c5bc93265f86093b | d58bdc0db99e9cd71fc99cb1586093c84a25b973 | refs/heads/master | 2021-01-22T04:01:18.522277 | 2017-02-17T19:48:58 | 2017-02-17T19:48:58 | 81,491,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | G = {}
G["A"] = ["B", "C"]
G["B"] = ["C", "D", "E"]
G["C"] = ["D"]
G["D"] = []
G["E"] = []
#DFS:
def printing(root):
stack = []
stack += root
searched = []
while stack:
person = stack.pop()
print(person)
for i in G[person]:
if i not in searched:
stack.append(i)
searched.append(i)
printing("A") | [
"utsab.khakurel@bison.howard.edu"
] | utsab.khakurel@bison.howard.edu |
c53cdeb4d119d5301a3ddabfc5f7a46417bf5ff6 | 8a79b77ab6acfc97b58ae9f987629e8ee00038c8 | /setup.py | 27ade12065d3cf1a86989380f5d11f6ea39a73cf | [] | no_license | Konrad-git-code/Aspp2021-exercises-day4 | d38d394561bc08c08b39e7edeca5997409bf0fbf | 446022871d0cbd75a17b2853c4b19670bd4a2378 | refs/heads/main | 2023-03-08T01:09:04.482587 | 2021-02-20T21:32:34 | 2021-02-20T21:32:34 | 340,119,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | from distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules = cythonize("cy_primes.pyx")
)
| [
"noreply@github.com"
] | noreply@github.com |
6af9434c46be76fce9d56f3ea60f2fca581ad793 | bc0dd74217258c8bdd30e6095dfd7a3edca2dd09 | /assignments/CarND-Vehicle-Detection-P5/f2f.py | c7ddcf21d773d672880881636ee6f76213c48ccd | [] | no_license | akamlani/selfdrivingcar | d645872f4129fcd4c68c3d4967fdd9c784086cc8 | eadd43b4c6d60c71e283b7c43cba61030377eb47 | refs/heads/master | 2020-06-12T10:19:55.748107 | 2017-05-02T18:44:52 | 2017-05-02T18:44:52 | 75,585,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,461 | py | import numpy as np
import cv2
from scipy.ndimage.measurements import label
import viz_utils as viz
class Vehicle(object):
def __init__(self):
self.detected = False # was vehicle detected in last iteration
self.n_detections = 0 # number of times this vehicle has been seen
self.n_nondetections = 0 # number of consecutive times this hard has not been detected
self.xpixels = None # pixel x values of last detection
self.ypixels = None # pixel y values of last detection
self.recent_xfitted = [] # x position of last n fits of the bounding box
self.recent_yfitted = [] # y position of last n fits of bounding box
self.recent_wfitted = [] # width position of last n fits of bounding box
self.recent_hfitted = [] # height position of last n fits of bounding box
self.bestx = None # average x position of last n fits
self.besty = None # average y position of last n fits
self.bestw = None # average width of last n fits
self.besth = None # average height of last n fits
class F2FTracker(object):
def __init__(self, dimensions, window_size=10):
"""
window_size: 1 for single image, else window over multiple frames
"""
self.nframes = 0 # frame_cnt
self.window_size = window_size # nframes
self.threshold = 0 if window_size == 1 else 1
rows, cols = dimensions
self.heatmap = np.zeros((rows, cols, window_size), dtype=np.float32)
def process_frame(self, base_img, heatmap_coords):
# get current heatmap
window_idx = self.nframes % self.window_size
heat_curr = viz.add_heat(base_img, heatmap_coords)
self.heatmap[:, :, window_idx] = heat_curr
# create a smooth heatmap over a window of frames
curr_slice = self.heatmap[:, :, :self.nframes + 1]
item = curr_slice if self.nframes < self.window_size else self.heatmap
heat_smooth = np.mean(item, axis=2)
# improve heatmap instances
heat_thresh = viz.apply_threshold(heat_smooth, threshold=1)
# annotate image via heatmap
labels = label(heat_thresh)
draw_img = viz.draw_labeled_bboxes(base_img, labels)
self.nframes += 1
return draw_img, heat_thresh, labels
| [
"akamlani@gmail.com"
] | akamlani@gmail.com |
bcb6aaa7bfdf74309d4b17ba49c96fce029d923f | 5e965b37305c3f55583e80aab937f7c7c603198d | /lessons/lesson6.py | 3757612c1db7df13d486ee4e68473343d4e72cbe | [] | no_license | izlatkin/CourseraTask | aaddfc744b2ad5bc70a862962a97700e809d84dc | 3f7015d88d433d2596ec8e94f79ae4bea3d475c1 | refs/heads/master | 2020-05-20T06:06:02.260764 | 2016-09-14T21:51:04 | 2016-09-14T21:51:04 | 68,225,567 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | __author__ = 'ilya'
class Animal:
# def __init__(self, name = 'No Name'):
# self._name = name
def __init__(self, **kwargs):
self._attributes = kwargs
def set_attributes(self, key, value):
self._attributes[key] = value
def get_attributes(self, key):
return self._attributes.get(key, None)
def noise(self):
print("Errrrrr")
def move(self):
print("Moving Forward")
def eat(self):
print("Crunch , Crunch")
class Dog(Animal):
def noise(self):
print("Woof, Woof")
# super(Dog, self).noise(Animal)
class Cat(Animal):
def noise(self):
print("Meow")
def tackToMe(Animal):
Animal.noise()
Animal.eat()
#dog = Animal(name = "Puppy")
#dog.noise()
#dog.move()
#dog.eat()
#print(dog.get_attributes('name'))
#dog.set_attributes('Feet', 3)
#print(dog.get_attributes('Feet'))
jake = Dog()
print(jake.noise())
print(jake.move())
sophie = Cat()
an = Animal()
tackToMe(sophie)
| [
"zlatkin_ilya@mail.ru"
] | zlatkin_ilya@mail.ru |
6b49c63ef32c5f55e7862680cebf93ec06f086c9 | 00a381ceb3526e69f3bdef3fe4639e7471d52431 | /CarND-Capstone/ros/src/waypoint_updater/waypoint_updater.py | e7a2e3f37fe1b19fcabd4cc77568d1a9bbc35f72 | [
"MIT"
] | permissive | mskarthik/CarND-Capstone | 1704fd997cd1b52462103d5e6bac1a0061d8f9fb | db6f2711d367eac5289e02c50891403d3b00bb6f | refs/heads/master | 2022-12-06T02:26:39.931001 | 2019-10-31T01:37:10 | 2019-10-31T01:37:10 | 217,895,385 | 0 | 0 | MIT | 2022-11-22T00:23:32 | 2019-10-27T18:07:11 | Makefile | UTF-8 | Python | false | false | 5,734 | py | #!/usr/bin/env python
import numpy as np
import rospy
from geometry_msgs.msg import PoseStamped
from styx_msgs.msg import Lane, Waypoint
from std_msgs.msg import Int32
from scipy.spatial import KDTree
import math
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
TODO (for Yousuf and Aaron): Stopline location for each traffic light.
'''
LOOKAHEAD_WPS = 120 # Number of waypoints we will publish. You can change this number
SPEED = 4
MAX_DECEL = 1.0
class WaypointUpdater(object):
def __init__(self):
rospy.init_node('waypoint_updater')
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
# TODO: Add a subscriber for /traffic_waypoint and /obstacle_waypoint below
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
# TODO: Add other member variables you need below
self.pose = None
self.base_waypoints = None
self.waypoints_2d = None
self.waypoint_tree = None
self.stopline_wp_idx = -1
self.loop()
def loop(self):
rate = rospy.Rate(50)
while not rospy.is_shutdown():
if self.pose and self.base_waypoints :
self.publish_waypoints()
rate.sleep()
def get_closest_waypoint_idx(self):
x = self.pose.pose.position.x
y = self.pose.pose.position.y
closest_idx = self.waypoint_tree.query([x,y],1)[1]
#Check if closest is ahead or behind vehicle
closest_coord = self.waypoints_2d[closest_idx]
prev_coord = self.waypoints_2d[closest_idx-1]
#Equation for hyperplane through closest_coords
cl_vect = np.array(closest_coord)
prev_vect = np.array(prev_coord)
pos_vect = np.array([x,y])
val = np.dot(cl_vect-prev_vect, pos_vect-cl_vect)
#print pos_vect,prev_vect,cl_vect,val,closest_idx
if(val > 0):
closest_idx = (closest_idx + 1)%len(self.waypoints_2d)
return closest_idx
def publish_waypoints(self):
final_lane = self.generate_lane()
self.final_waypoints_pub.publish(final_lane)
def generate_lane(self):
lane = Lane()
closest_idx = self.get_closest_waypoint_idx()
farthest_idx = closest_idx + LOOKAHEAD_WPS
if self.stopline_wp_idx == -1 or (self.stopline_wp_idx >= farthest_idx):
lane.header = self.base_waypoints.header
base_lane_waypoints = self.base_waypoints.waypoints[closest_idx:farthest_idx]
lane.waypoints = base_lane_waypoints
for i in range(len(lane.waypoints)):
self.set_waypoint_velocity(lane.waypoints,i,SPEED)
else:
lane.header = self.base_waypoints.header
base_lane_waypoints = self.base_waypoints.waypoints[closest_idx:farthest_idx]
for i in range(len(lane.waypoints)):
self.set_waypoint_velocity(base_lane_waypoints,i,SPEED)
lane.waypoints = self.decelerate_waypoints(base_lane_waypoints, closest_idx)
return lane
def decelerate_waypoints(self, waypoints, closest_idx):
temp = []
for i, wp in enumerate(waypoints):
p = Waypoint()
p.pose = wp.pose
stop_idx = max(self.stopline_wp_idx - closest_idx - 2,0)
dist = self.distance(waypoints, i, stop_idx)
vel = math.sqrt(2* MAX_DECEL * dist)
if vel < 1.:
vel = 0.
p.twist.twist.linear.x = min(vel, wp.twist.twist.linear.x)
temp.append(p)
return temp
def pose_cb(self, msg):
self.pose = msg
def waypoints_cb(self, waypoints):
if not self.waypoints_2d:
self.waypoints_2d = [[waypoint.pose.pose.position.x,waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]
self.waypoint_tree = KDTree(self.waypoints_2d)
#print self.waypoints_2d
self.base_waypoints = waypoints
def traffic_cb(self, msg):
# TODO: Callback for /traffic_waypoint message. Implement
self.stopline_wp_idx = msg.data
pass
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
| [
"mskarthik@gmail.com"
] | mskarthik@gmail.com |
b1773069faef715be3286dd77a956aa6b95ec95a | 5388cb15c4d3721109935971b5e54c3ea209dbee | /setup.py | e147c81218d98aed25d663906e73507024bd23bb | [] | no_license | Nubbify/KeepingTrack | 098eb841677d3d9c2447e3fff5db1b66a8efa69b | 3de33f6a3d981ba213d561b99f59efe0afd15466 | refs/heads/master | 2021-08-04T15:20:39.356304 | 2019-12-09T05:18:41 | 2019-12-09T05:18:41 | 235,156,152 | 0 | 0 | null | 2021-01-05T19:41:41 | 2020-01-20T17:20:22 | JavaScript | UTF-8 | Python | false | false | 515 | py | from setuptools import find_packages
from setuptools import setup
setup(
name="keepingtrack",
version="0.0.1",
maintainer="Oscar Bautista",
maintainer_email="oscar.v.bautista@gmail.com",
description="A todo list app focusing on nested notes and attachments",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=["flask"],
extras_require={"tests": ["pytest", "coverage"]},
setup_requires=["pytest-runner"],
tests_require=['pytest']
) | [
"Oscar.V.Bautista@gmail.com"
] | Oscar.V.Bautista@gmail.com |
3cb7b6f1f80cfbb30c21ea153e6a637bbc1effc9 | dd2382f4ee45ea6a5690662e369d6ff96d6c150d | /accounts/urls.py | 5cf8535b130889d650c76e5bd80f8be4127c9080 | [] | no_license | samathaluca/authorisation-lessons | 89693c45274037f380e749ec0ce254eba2d97f0e | 69d10681ade6a8c9c4a7187823c4dca3df3761b4 | refs/heads/master | 2021-05-22T16:11:14.847750 | 2020-04-25T23:13:47 | 2020-04-25T23:13:47 | 252,997,425 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | from django.conf.urls import url, include
from accounts.views import logout, login, register
from accounts import url_reset
urlpatterns = [
url(r'^logout/', logout, name="logout"),
url(r'^login/', login, name="login"),
url(r'^register/', register, name="register"),
# url(r'^profile/', user_profile, name="profile"),
url(r'^password-reset/', include(url_reset))
]
| [
"samanthaluca@aol.com"
] | samanthaluca@aol.com |
555ab84accb35fdd7a4be6c3279a0dfd0fda301b | 71acb7214efd91c0d327f6d8958e1798eadb4401 | /locations/spiders/lidl_be.py | 82d333e830b64a9538b85a87b7b5987b418fa8c1 | [
"CC0-1.0",
"MIT"
] | permissive | alltheplaces/alltheplaces | 21b9f8b4ace1352e52ae7b8f8825a930d2cb033e | 1bcbb55cfcf06f2c714465570711f6e83f205c22 | refs/heads/master | 2023-08-30T19:45:35.098658 | 2023-08-30T17:51:54 | 2023-08-30T17:51:54 | 61,166,935 | 453 | 176 | NOASSERTION | 2023-09-14T17:16:40 | 2016-06-15T01:09:18 | Python | UTF-8 | Python | false | false | 1,410 | py | import re
from locations.hours import DAYS_FR, OpeningHours, day_range, sanitise_day
from locations.spiders.lidl_gb import LidlGBSpider
from locations.storefinders.virtualearth import VirtualEarthSpider
class LidlBESpider(VirtualEarthSpider):
name = "lidl_be"
item_attributes = LidlGBSpider.item_attributes
dataset_id = "2be5f76f36e8484e965e84b7ee0cd1b1"
dataset_name = "Filialdaten-BE/Filialdaten-BE"
key = "AvGfUYinH_I7qdNZWDlXTHHysoytHWqkqZpxHBN9Z0Z0YLQup0u6qZoB8uQXUW_p"
def parse_item(self, item, feature, **kwargs):
item["name"] = feature["ShownStoreName"]
oh = OpeningHours()
for day, start_time, end_time in re.findall(
r"(\w+ - \w+|\w+) (\d{2}:\d{2})-(\d{2}:\d{2})",
feature["OpeningTimes"],
):
if "-" in day:
start_day, end_day = day.split("-")
start_day = sanitise_day(start_day, DAYS_FR)
end_day = sanitise_day(end_day, DAYS_FR)
else:
start_day = sanitise_day(day, DAYS_FR)
end_day = None
if start_day and end_day:
for d in day_range(start_day, end_day):
oh.add_range(d, start_time, end_time)
elif start_day:
oh.add_range(start_day, start_time, end_time)
item["opening_hours"] = oh.as_opening_hours()
yield item
| [
"noreply@github.com"
] | noreply@github.com |
6a70f2cc95e497009551b3295f2cf8429c1bcad3 | 1173175cd324cfaa4dd6db9230adc939f4dd88e6 | /09_button.py | 28f0c8a4de9ee2df8410272954ada0217e095be3 | [] | no_license | kmoad/gtk3-tutorial | de59efc6191e5b214423be986539825955dd0139 | 98fbd07fd777d7c0244b64668891eab8d4131fa3 | refs/heads/main | 2023-05-13T11:18:02.324495 | 2021-05-29T01:19:52 | 2021-05-29T01:19:52 | 369,915,134 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,122 | py | import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
class ButtonWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title='Button Demo')
self.set_border_width(10)
hbox = Gtk.Box(spacing=6)
self.add(hbox)
button = Gtk.Button.new_with_label('Click Me')
button.connect('clicked', self.on_click_me_clicked)
hbox.pack_start(button, True, True, 0)
button = Gtk.Button.new_with_mnemonic("_Open")
button.connect("clicked", self.on_open_clicked)
hbox.pack_start(button, True, True, 0)
button = Gtk.Button.new_with_mnemonic("_Close")
button.connect("clicked", self.on_close_clicked)
hbox.pack_start(button, True, True, 0)
def on_click_me_clicked(self, button):
print('"Click me" button was clicked')
def on_open_clicked(self, button):
print('"Open" button was clicked')
def on_close_clicked(self, button):
print("Closing application")
Gtk.main_quit()
win = ButtonWindow()
win.connect('destroy', Gtk.main_quit)
win.show_all()
Gtk.main() | [
"kyle.moad@gmail.com"
] | kyle.moad@gmail.com |
268b03511df9b1daf0eee6d6ed67b1c79a9b4a19 | f6ebe0c6bc6a044e84875a46e3086ae7cf9f6168 | /domoWebConfigParser.py | 9a6ca5ee02aac0b8e97c548c4696c885c649821a | [] | no_license | Manu-31/domoweb | 6adc7d088fa94d08fe013f3d8d0c212f37ffd814 | 7192039e39b01f6ef1729c409a3934e235bc7cb5 | refs/heads/master | 2021-01-19T11:42:18.368771 | 2018-11-17T16:41:14 | 2018-11-17T16:41:14 | 61,647,468 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,009 | py | # -*- coding: utf-8 -*-
#=============================================================
# A configuraion tool for domoWeb based on python ConfigParser
#=============================================================
import ConfigParser
config = ConfigParser.ConfigParser()
config.optionxform = str
#-------------------------------------------------------------
# Lecture du fichier de configuration
#
# On va chercher la configuration dans les fichiers suivants et dans
# cet ordre
# le fichier fourni en paramètre
# ${HOME}/.domoweb.cfg
# (A FAIRE)
# /etc/domoweb.cfg
#config.read(['/etc/domoweb.cfg', os.path.expanduser('~/.domoweb.cfg')])
#config.read([os.path.expanduser('~/.domoweb.cfg')])
#-------------------------------------------------------------
def configParserInit(configFileName) :
if (configFileName is None) :
configFileName = os.path.expanduser('~/.domoweb.cfg')
print "Reading configuration from '"+configFileName+"'"
config.read(configFileName)
| [
"manu@manu-chaput.net"
] | manu@manu-chaput.net |
dfa39f1519de44a9bc2e2795a2c624fa07e64ccb | c1bdce1c720394a02f4dbd582c4e5bf3d26d234b | /ScoreManagement/ScoreManager.py | 3fdc895a0a2d596413d19ac948c2e93df230415f | [] | no_license | TuanMinh-Vu/LambdaFunctions-Database | 4eaaaa2c226d17901f26567ad7c2d3377e60e355 | df4818924f5d23cbc36ccea6a989fdf667a2bd7f | refs/heads/master | 2022-07-23T19:31:02.718322 | 2020-04-19T10:24:30 | 2020-04-19T10:24:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,187 | py | import json
import boto3
def lambda_handler(event, context):
dynamodb = boto3.resource('dynamodb')
scoreTable = dynamodb.Table("Score")
if event['httpMethod'] == 'GET':
if 'queryStringParameters' in event:
params = event['queryStringParameters']
if 'GameName' in params and 'Username' in params:
gameName = params['GameName']
user = params['Username']
key = gameName + ': ' + user
data = scoreTable.get_item(Key={'Key': key})
if 'Item' in data:
message = data['Item']
return send_message(200, str(message))
else:
return send_message(400, 'Cant find ' + user + ' in game ' + gameName)
else:
return send_message(400, 'GameName and Username cant be empty')
else:
return send_message(400, 'Parameters cant be empty')
elif event['httpMethod'] == 'PUT':
if 'body' in event:
receivedData = event['body']
body = json.loads(receivedData)
if 'GameName' in body and 'Username' in body and 'Score' in body:
gameName = body['GameName']
user = body['Username']
key = gameName + ': ' + user
data = scoreTable.get_item(Key={'Key': key})
usersTable = dynamodb.Table('Users')
#check user
mUser = usersTable.get_item(Key={'ID': user})
if not 'Item' in mUser:
return send_message(400, 'Couldnt find user ' + user)
gamesTable = dynamodb.Table('RemoteSettings')
#check game
mGame = gamesTable.get_item(Key={'game_name': gameName})
if not 'Item' in mGame:
return send_message(400, 'Couldnt find game ' + gameName)
if 'Item' in data:
scoreTable.update_item(
Key = {
'Key' : key
},
UpdateExpression = 'SET Score = :val',
ExpressionAttributeValues={
':val': body['Score']
}
)
return send_message(200, 'Updated score succesfully')
else:
newUser = {
'Key' : key,
'Score': body['Score']
}
scoreTable.put_item(
Item = newUser
)
return send_message(200, 'Updated score succesfully')
else:
return send_message(400, 'Username, GameName, Score cant be empty')
else:
return send_message(400, 'Username, GameName, Score cant be empty')
else:
return send_message(400, 'Only GET, PUT request are supported')
def send_message(code, message):
return{
'statusCode' : code,
'body': json.dumps(message)
}
| [
"vuminh99hp@gmail.com"
] | vuminh99hp@gmail.com |
a023a90d0524987f2323283b3ec3458123ea34d6 | 89c682cc1758faa10bceea7280d716a18096e112 | /filterCol.py | 30f58eef6c7bf22035b97f0cbdcab133abbf3914 | [] | no_license | kidlin/tools | be8a3753f08c62e0fbc3caf329035bde34e4b547 | 5a384198ae8178bbf64b76e9f69a5ac2275b71bf | refs/heads/master | 2020-09-22T00:25:29.721749 | 2020-01-12T05:25:22 | 2020-01-12T05:25:22 | 224,986,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,496 | py | #!/usr/bin/python
# Software License Agreement (BSD License)
#
# Copyright (c) 2013, Juergen Sturm, TUM
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of TUM nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Requirements:
# sudo apt-get install python-argparse
import argparse
import sys
import os
import shutil
def copyfromTXT(target, col, newFile):
f = open(target,'r')
data = f.readlines()
f.close()
name1=[]
for line in data:
list = line.strip('\n').split(' ')
name1.append(list[int(col)])
with open(newFile,'w') as f:
for i in name1:
f.writelines(i+'\r\n')
if __name__ == '__main__':
# parse command line
parser = argparse.ArgumentParser(description='''
This script extracts the expected col from a txt file
''')
parser.add_argument('first_file', help='target file')
parser.add_argument('col', help='the col you want')
parser.add_argument('new_file', help='save to')
args = parser.parse_args()
copyfromTXT(args.first_file, args.col, args.new_file)
| [
"lintp@zju.edu.cn"
] | lintp@zju.edu.cn |
182ca5a1b657ff23de907689d948cd50219592c2 | 7dbd0bcc1a19fc9317102096c4e6848d8d343f43 | /PipelineClusterNew/CrossValidationII.py | c3eefc8be5f8ac6a25f21a19c5a0f5984c80fddf | [] | no_license | ruit/BayesTeller | 4f23ce58e23e5e3f647406db003946d4e8f98756 | e37a87a35ff10369aaeb3e200320734e41bfd762 | refs/heads/master | 2020-04-06T04:22:14.136804 | 2015-08-24T10:28:11 | 2015-08-24T10:28:11 | 22,233,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,270 | py | #!/usr/bin/python
# Tian R. <tianremiATgmail.com>
# Nov 4, 2014
# Nov 5, 2014
# Dec 12, 2014
# Randomly mask some patients as test data
import random, math
import sys, re
infile1 = sys.argv[1]
infile2 = sys.argv[2]
totalPatListFile = sys.argv[3]
tempdir = sys.argv[4]
# Sep 19, 2014
# Tian R. compare SNVs freq in normal and tumor, build a pridictive model
# Sep 22, 2014. 1000G SNV freq <= 0.5% as the filtering at the beginning!!!
def GenerateTestList (totalPatListFile):
#totalList is a single column file, containing all patients
totalL=[]
for line in open(totalPatListFile, "r"):
line=line.strip("\n")
totalL.append(line)
halfOne=[]
halfTwo=[]
while len(halfOne) < round(len(totalL) / 2) :
singleP=random.choice(totalL)
if singleP not in halfOne:
halfOne.append(singleP)
for anyP in totalL:
if anyP not in halfOne:
halfTwo.append(anyP)
return [halfOne, halfTwo]
def unique1(seq):
# order preserving
checked = []
for e in seq:
if e not in checked:
checked.append(e)
return checked
def SortedKey2GivenStartEndAsList(input, outfile, testList):
'''
Input file is of two columns.
The first col is mut IDs, mut be sorted. No redundancies.
#100 A_start
#100 P2
#101 void
#102 P3,P4
#102 Z_end
testList=['Patient1','Patient2']
'''
import re
# June 19, 27, 2014
last_zuo=""
current_zuo=""
last_you=""
current_you=""
f=open(input, "r")
out=open(outfile, "w")
for line in f:
array=line.strip("\n").split("\t")
current_zuo=array[0]
current_you=array[1]
#start with "A_start", Oct 21, 2014
if current_you=="A_start":
last_zuo="what"
last_you=current_you
if last_you != "":
#if last_you is not empty
if current_you != "Z_end":
if last_zuo != "what":
if last_zuo=="":
last_zuo=current_zuo
else:
last_zuo=last_zuo+","+current_zuo
last_you=last_you+","+current_you
else:
last_zuo=""
last_you=last_you+","+current_you
else:
patlist=last_you.split(",")
# patlist might contain "void" (empty) elements
newlist=[]
for e in patlist:
fuhe=re.search("Patient", e)
if fuhe and e not in testList:
newlist.append(e)
#print "Patients counted for modeling are " + e
out.write(last_zuo+"\t"+str(len(unique1(newlist)))+"\n")
print "Patients counted for modeling are "+ ";".join(unique1(newlist))
#print "-------------------------------------------------"
#empty the lists
last_zuo=""
last_you=""
f.close()
out.close()
def writeTestPatList(testList, outTestPatFile):
f_out = open(outTestPatFile, "w")
for p in testList:
f_out.write(p+"\n")
f_out.close()
def main():
pairs=GenerateTestList(totalPatListFile)
print pairs
#complete 2CV!!!
SortedKey2GivenStartEndAsList(infile1, infile1+".outA", pairs[0])
SortedKey2GivenStartEndAsList(infile2, infile2+".outA", pairs[0])
writeTestPatList(pairs[0], tempdir+"/testPatList4A.tab") ###!!!! double check Dec 12, 2014
print "-------------------------------------"
SortedKey2GivenStartEndAsList(infile1, infile1+".outB", pairs[1])
SortedKey2GivenStartEndAsList(infile2, infile2+".outB", pairs[1])
writeTestPatList(pairs[1], tempdir+"/testPatList4B.tab")
if __name__=="__main__":
main()
| [
"tianremi@gmail.com"
] | tianremi@gmail.com |
f725442ebab9d20319de0cae94b13354fa60e6a9 | f844fe4eb921da07dd6e93856c27af84752a4ec9 | /tools/pred_prey_experiment_file.py | cc75a2d7b487d69fe6220fc5b6b15b31566d73b9 | [
"MIT"
] | permissive | jcbpyle/FLAMEGPU | 3ef16edd04bb40914db96665c8415c07d51ac6d7 | c472982da0b7d20a61f12c64e6dcf8b1953ef434 | refs/heads/master | 2020-05-20T06:04:25.265348 | 2019-06-04T16:23:52 | 2019-06-04T16:23:52 | 185,419,573 | 0 | 0 | null | 2019-05-07T14:33:32 | 2019-05-07T14:33:32 | null | UTF-8 | Python | false | false | 19,385 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 25 11:53:16 2019
@author: James
"""
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 7 12:49:41 2019
@author: James
"""
from smse_experiment_functions import simulation_ga, batch_simulation, train_regression_surrogate_model, train_classifier_surrogate, surrogate_ga, surrogate_ga_sim_eval
import os
import sys
import random
import datetime
######################################## GLOBALS ###################################
wd = os.getcwd()
MU = 100
LAMBDA = 25
GA_GENERATIONS = 196
SIMGA_RUNS = 10
SURRGA_RUNS = 100
COMPAREGA_RUNS = 10
SIMGA_MAXTIME = 10000
BATCH_MAXTIME = 10000
SM_MAXTIME = 6
SURRGA_MAXTIME = 100
COMPAREGA_MAXTIME = 10000
MAX_BATCH = 50000
SM_DATASIZES = [5000,10000,15000,25000,35000,50000]
#SM_DATASIZES = [50000]
SM_TRAINING = [0.05, 0.1, 0.25, 0.5, 0.75]
#Set in progress experimental directory and initilise seeds and time recording csv
BASE_DIRECTORY = wd+"/"
SIMGA = BASE_DIRECTORY+"simulation_ga/"
BATCHSIM = BASE_DIRECTORY+"batch_simulation_data/"
SURR = BASE_DIRECTORY+"surrogate_model_training/"
SURRGA = BASE_DIRECTORY+"surrogate_ga/"
SURRSIM = BASE_DIRECTORY+"surrogate_vs_sim_comparison/"
TEST = BASE_DIRECTORY+"testing/"
directories = [BASE_DIRECTORY,SIMGA,BATCHSIM,SURR,SURRGA,SURRSIM,TEST]
for d in directories:
if not os.path.exists(d):
os.mkdir(d)
if not os.path.exists(d+"seeds.csv"):
open(d+"seeds.csv","w").close()
if not os.path.exists(d+"times.csv"):
open(d+"times.csv","w").close()
######################################## SIMULATION GA BASELINE EXPERIMENT ##############################################
#Preliminary experiment, simulation ga performance averages
#GA limitation discovery/performance in terms of average and variation graph. Should show tuning towards high quality
def perform_x_continuous_simulation_ga(x,mu,lam,gagen,gat=9999):
print("Baseline experiment: Simulation GAs (Continuous primary fitness implementation) to ascertain average performance and variance.")
loc = SIMGA+"/continuous_fitness/"
if not os.path.exists(loc):
os.mkdir(loc)
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("simulation_ga_cont_fitness,"+str(seed)+"\n")
sf.close()
sf2 = open(SIMGA+"seeds.csv","a")
sf2.write("simulation_ga_cont_fitness,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Run experiment over x sim GAs
for i in range(x):
simulation_ga(mu,lam,gagen,gat,loc,"continuous")
################################################################################################pred_prey_ga_random(mu,lam,gagen,gat,"/continuous_fitness/",seed)
print("completed continuous sim GA ",i+1," out of ",x)
final_time = datetime.datetime.now()
simga_time_taken = final_time-initial_time
minutes = int(simga_time_taken.seconds/60)
if minutes>gat:
print("Time limit reached, exiting with ",i," completed GA")
break
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",simulation_ga_cont_fitness_total_time,"+str(simga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SIMGA+"times.csv","a")
tf.write(str(x)+",simulation_ga_cont_fitness_total_time,"+str(simga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Continuous sim GA performance experiment complete.\n")
return
def perform_x_discrete_simulation_ga(x,mu,lam,gagen,gat=9999):
print("Baseline experiment: Simulation GAs (Discrete primary fitness implementation) to ascertain average performance and variance.")
loc = SIMGA+"/discrete_fitness/"
if not os.path.exists(loc):
os.mkdir(loc)
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("simulation_ga_discrete_fitness,"+str(seed)+"\n")
sf.close()
sf2 = open(SIMGA+"seeds.csv","a")
sf2.write("simulation_ga_discrete_fitness,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Run experiment over x sim GAs
for i in range(x):
simulation_ga(mu,lam,gagen,gat,loc,"discrete")
################################################################################################pred_prey_ga_random(mu,lam,gagen,gat,"/continuous_fitness/",seed)
print("completed discrete sim GA ",i+1," out of ",x)
final_time = datetime.datetime.now()
simga_time_taken = final_time-initial_time
minutes = int(simga_time_taken.seconds/60)
if minutes>gat:
print("Time limit reached, exiting with ",i," completed GA")
break
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",simulation_ga_discrete_fitness_total_time,"+str(simga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SIMGA+"times.csv","a")
tf.write(str(x)+",simulation_ga_discrete_fitness_total_time,"+str(simga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Discrete sim GA performance experiment complete.\n")
return
######################################## BATCH SIMULATION SURROGATE TRAINING DATA GENERATION ################################################
# Batch simulate the desired number of paramter vectors
def x_batch_simulations(x, typ, time=9999):
print("Experiment: Perform required number of simulations in batch to generate training data for surrogate model.")
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write(typ+"_batch_sim_data_generation,"+str(seed)+"\n")
sf.close()
sf2 = open(BATCHSIM+"seeds.csv","a")
sf2.write(typ+"_batch_sim_data_generation,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Run x batch simulations by queue with maximum runtime time
batch_simulation(x,time,typ,seed,BATCHSIM)
final_time = datetime.datetime.now()
batch_data_time_taken = final_time-initial_time
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+","+typ+"_batch_sim_data_generation,"+str(batch_data_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(BATCHSIM+"times.csv","a")
tf.write(str(x)+","+typ+"_batch_sim_data_generation,"+str(batch_data_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Completed training data batch simulations.\n")
return
############################################# TRAIN SURROGATE MODELS ###############################################
# Train surrogate models for as long as time permits. Log and save the best performing models
def train_regression_surrogate_models(data,training,batch_data_loc,loc,time=9999):
print("Experiment: Train surrogate models until time limit is reached, save best performing models.")
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("regression_surrogate_training,"+str(seed)+"\n")
sf.close()
sf2 = open(SURR+"seeds.csv","a")
sf2.write("regression_surrogate_training,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Train regression models with maximum runtime time for each type of model to be trained, return the number of models trained
x, completion_time1 = train_regression_surrogate_model(data,training,initial_time,time,batch_data_loc,loc,1)
y, completion_time2 = train_regression_surrogate_model(data,training,completion_time1,time,batch_data_loc,loc,2)
z, final_time = train_regression_surrogate_model(data,training,completion_time2,time,batch_data_loc,loc,3)
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",regression_surrogate_training_primary,"+str(completion_time1-initial_time)+",start,"+str(initial_time)+",finish,"+str(completion_time1)+"\n")
tf.write(str(y)+",regression_surrogate_training_secondary,"+str(completion_time2-completion_time1)+",start,"+str(completion_time1)+",finish,"+str(completion_time2)+"\n")
tf.write(str(z)+",regression_surrogate_training_tertiary,"+str(final_time-completion_time2)+",start,"+str(completion_time2)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SURR+"times.csv","a")
tf.write(str(x)+",regression_surrogate_training_primary,"+str(completion_time1-initial_time)+",start,"+str(initial_time)+",finish,"+str(completion_time1)+"\n")
tf.write(str(y)+",regression_surrogate_training_secondary,"+str(completion_time2-completion_time1)+",start,"+str(completion_time1)+",finish,"+str(completion_time2)+"\n")
tf.write(str(z)+",regression_surrogate_training_tertiary,"+str(final_time-completion_time2)+",start,"+str(completion_time2)+",finish,"+str(final_time)+"\n")
tf.close()
print("Completed training regression surrogate models.\n")
return
def train_classifier_surrogate_model(data,training,batch_data_loc,loc,time=9999):
print("Experiment: Train surrogate models until time limit is reached, save best performing model.")
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("classifier_surrogate_training,"+str(seed)+"\n")
sf.close()
sf2 = open(SURR+"seeds.csv","a")
sf2.write("classifier_surrogate_training,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Train classifier model with maximum runtime time, return the number of models trained
x, final_time = train_classifier_surrogate(data,training,initial_time,time,batch_data_loc,loc)
surrogate_training_time_taken = final_time-initial_time
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",classifier_surrogate_training,"+str(surrogate_training_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SURR+"times.csv","a")
tf.write(str(x)+",classifier_surrogate_training,"+str(surrogate_training_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Completed training classifier surrogate models.\n")
return
############################################ SURROGATE GUIDED GA ###############################################
# Perform multiple surrogate guided GA to measure average fitness and variance
def perform_x_continuous_surrogate_ga(models,x,mu,lam,gagen,gat=9999):
loc = SURRGA+"/continuous_fitness/"
if not os.path.exists(loc):
os.mkdir(loc)
print("Surrogate GAs (Continuous primary fitness implementation) to ascertain average performance and variance.")
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("surrogate_ga_cont_fitness,"+str(seed)+"\n")
sf.close()
sf2 = open(SURRGA+"seeds.csv","a")
sf2.write("surrogate_ga_cont_fitness,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Run experiment over x surr GAs
final_time = surrogate_ga(x,models,mu,lam,gagen,gat,SURR,loc,"continuous")
surrga_time_taken = final_time-initial_time
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",surrogate_ga_cont_fitness_total_time,"+str(surrga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SURRGA+"times.csv","a")
tf.write(str(x)+",surrogate_ga_cont_fitness_total_time,"+str(surrga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Continuous surr GA performance experiment complete.\n")
return
def perform_x_discrete_surrogate_ga(models,x,mu,lam,gagen,gat=9999):
print("Surrogate GAs (Discrete primary fitness implementation) to ascertain average performance and variance.")
loc = SURRGA+"/discrete_fitness/"
if not os.path.exists(loc):
os.mkdir(loc)
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("surrogate_ga_discrete_fitness,"+str(seed)+"\n")
sf.close()
sf2 = open(SURRGA+"seeds.csv","a")
sf2.write("surrogate_ga_discrete_fitness,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Run experiment over x surr GAs
final_time = surrogate_ga(x,models,mu,lam,gagen,gat,SURR,loc,"discrete")
surrga_time_taken = final_time-initial_time
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",surrogate_ga_discrete_fitness_total_time,"+str(surrga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SURRGA+"times.csv","a")
tf.write(str(x)+",surrogate_ga_discrete_fitness_total_time,"+str(surrga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Discrete surr GA performance experiment complete.\n")
return
############################################# SURROGATE PREDICTED VS SIMULATION EVALUATED FITNESSES OF GA POP #########################
# Perform multiple surrogate guided GA while evaluating the true fitness of the discovered population by simulation (displays surrogate prediction accuracy/true quality of solutions)
def perform_x_continuous_compare_ga(models,x,mu,lam,gagen,gat=9999):
print("Surrogate GAs (Continuous primary fitness implementation) with discovered populations evalauted by simulation at each generation.")
loc = TEST+"/continuous_fitness/"
if not os.path.exists(loc):
os.mkdir(loc)
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("comparison_ga_cont,"+str(seed)+"\n")
sf.close()
sf2 = open(SURRSIM+"seeds.csv","a")
sf2.write("comparison_ga_cont,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Run experiment over x surr GAs with pop evaluated by sim each generation
final_time = surrogate_ga_sim_eval(x,models,mu,lam,gagen,gat,loc,SURR,"continuous")
comparison_ga_time_taken = final_time-initial_time
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",comparison_ga_cont_total_time,"+str(comparison_ga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SURRSIM+"times.csv","a")
tf.write(str(x)+",comparison_ga_cont_total_time,"+str(comparison_ga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Continuous comparison GA experiment complete.\n")
return
def perform_x_discrete_compare_ga(models,x,mu,lam,gagen,gat=9999):
print("Surrogate GAs (Discrete primary fitness implementation) with discovered populations evalauted by simulation at each generation.")
loc = TEST+"/discrete_fitness/"
if not os.path.exists(loc):
os.mkdir(loc)
#Set and record seed
seed = random.randrange(sys.maxsize)
random.seed(seed)
sf = open(BASE_DIRECTORY+"seeds.csv","a")
sf.write("comparison_ga_discrete,"+str(seed)+"\n")
sf.close()
sf2 = open(SURRSIM+"seeds.csv","a")
sf2.write("comparison_ga_discrete,"+str(seed)+"\n")
sf2.close()
#Track experiment time
initial_time = datetime.datetime.now()
#Run experiment over x surr GAs with pop evaluated by sim each generation
final_time = surrogate_ga_sim_eval(x,models,mu,lam,gagen,gat,loc,SURR,"discrete")
comparison_ga_time_taken = final_time-initial_time
#Log time
tf = open(BASE_DIRECTORY+"times.csv","a")
tf.write(str(x)+",comparison_ga_discrete_total_time,"+str(comparison_ga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
tf = open(SURRSIM+"times.csv","a")
tf.write(str(x)+",comparison_ga_discrete_total_time,"+str(comparison_ga_time_taken)+",start,"+str(initial_time)+",finish,"+str(final_time)+"\n")
tf.close()
print("Discrete comparison GA experiment complete.\n")
return
######################################### TEST EXPERIMENTS ##################################################
#perform_x_continuous_simulation_ga(2,2,1,3,5)#WORKS
#perform_x_discrete_simulation_ga(2,2,1,3,5)#WORKS
#x_batch_simulations(500,"continuous",1)#WORKS
#x_batch_simulations(500,"discrete",1)#WORKS
#train_regression_surrogate_models([100,250,500],[0.5,0.75,0.95],BATCHSIM+"/continuous/",SURR,2)#WORKS
#train_classifier_surrogate_model([100,250,500],[0.5,0.75,0.95],BATCHSIM+"/discrete/",SURR,2)#WORKS
#perform_x_continuous_surrogate_ga([100,250,500],250,2,1,3,1)#WORKS
#perform_x_discrete_surrogate_ga([100,250,500],250,2,1,3,1)#WORKS
#perform_x_continuous_compare_ga([100,250,500],2,2,1,3,2)#WORKS
#perform_x_discrete_compare_ga([100,250,500],2,2,1,3,2)#WORKS
######################################### MEDIUM SCALE TEST #################################################
#perform_x_continuous_simulation_ga(5,20,5,16,100)
#perform_x_discrete_simulation_ga(5,20,5,16,100)
#x_batch_simulations(500,"continuous",100)
#x_batch_simulations(500,"discrete",100)
#train_regression_surrogate_models([250,400,500],[0.5,0.75,0.95],BATCHSIM+"/continuous/",SURR,5)
#train_classifier_surrogate_model([250,400,500],[0.5,0.75,0.95],BATCHSIM+"/discrete/",SURR,5)
#perform_x_continuous_surrogate_ga([250,400,500],1000,20,5,16,100)
#perform_x_discrete_surrogate_ga([250,400,500],1000,20,5,16,100)
#perform_x_continuous_compare_ga([250,400,500],2,20,5,16,100)
#perform_x_discrete_compare_ga([250,400,500],2,20,5,16,100)
######################################### RUN EXPERIMENTS ###################################################
###### SIMULATION GA #####
perform_x_continuous_simulation_ga(SIMGA_RUNS,MU,LAMBDA,GA_GENERATIONS,SIMGA_MAXTIME)
perform_x_discrete_simulation_ga(SIMGA_RUNS,MU,LAMBDA,GA_GENERATIONS,SIMGA_MAXTIME)
#
#
####### BATCH SIM FOR SURROGATE DATA #####
x_batch_simulations(MAX_BATCH,"continuous",BATCH_MAXTIME)
x_batch_simulations(MAX_BATCH,"discrete",BATCH_MAXTIME)
#
#
###### TRAIN SURROGATE MODELS #####
train_regression_surrogate_models(SM_DATASIZES,SM_TRAINING,BATCHSIM+"continuous/",SURR,SM_MAXTIME)
train_classifier_surrogate_model(SM_DATASIZES,SM_TRAINING,BATCHSIM+"discrete/",SURR,SM_MAXTIME)
#
#
###### SURROGATE GAs #####
perform_x_continuous_surrogate_ga(SM_DATASIZES,SURRGA_RUNS,MU,LAMBDA,GA_GENERATIONS,SURRGA_MAXTIME)
perform_x_discrete_surrogate_ga(SM_DATASIZES,SURRGA_RUNS,MU,LAMBDA,GA_GENERATIONS,SURRGA_MAXTIME)
#
#
###### SURR VS SIM FITNESS COMPARISON #####
perform_x_continuous_compare_ga(SM_DATASIZES,COMPAREGA_RUNS,MU,LAMBDA,GA_GENERATIONS,COMPAREGA_MAXTIME)
perform_x_discrete_compare_ga(SM_DATASIZES,COMPAREGA_RUNS,MU,LAMBDA,GA_GENERATIONS,COMPAREGA_MAXTIME)
| [
"jcbpyle1@sheffield.ac.uk"
] | jcbpyle1@sheffield.ac.uk |
55cda4355fc80172efff1fa6ae695a9b6a2d1a81 | f7411485d2603aa8c2841f88bf5bfb2e1930951e | /FinalProject/FinalProject/test_gamestate.py | bfbc777db516c00a29ede1736212e0ac86c79762 | [] | no_license | Johnspeanut/Computer_science_fundation_course | 156e03e8cf6fcca4ddcbfaa837b8c55f95083045 | 79a13f3152c7e61d8d6cc10da2213a15c8a364e5 | refs/heads/master | 2023-05-13T01:55:10.171165 | 2021-05-31T07:00:31 | 2021-05-31T07:00:31 | 372,412,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 934 | py | from gamestate import GameState
def test_init():
piece_board = GameState()
assert(piece_board.selected == None)
assert(piece_board.turn == "black")
def test_create_pieces():
piece_board = GameState()
assert(len(piece_board.pieces) == 8)
assert(len(piece_board.pieces[0]) == 8)
assert(piece_board.pieces_left_black == 12)
assert(piece_board.pieces_left_red == 12)
def test_get_piece():
piece_board = GameState()
assert(piece_board.get_piece(0, 4) == 0)
assert(piece_board.get_piece(0, 3).color == "black")
assert(piece_board.get_piece(0, 0) == 0)
def test_turn_change():
piece_board = GameState()
assert(piece_board.turn == "black")
piece_board.turn_change()
assert(piece_board.turn == "black")
def test_remove():
piece_board = GameState()
piece = piece_board.get_piece(0, 3)
piece_board.remove([piece])
assert(piece_board.get_piece(0, 3) == 0)
| [
"pengqiong2015fall@hotmail.com"
] | pengqiong2015fall@hotmail.com |
a23d7d31cbfc3aa13e3386a439560ab81cd7ee62 | 67084751337f327092049a61b6d8c630cd3767de | /genderðnicity/genderðnicity.py | a445edda0389876fea99e843f3c5e2a9369470eb | [
"MIT"
] | permissive | habereet/awesomeScripts | 0f0a33d9a35de8b4449c1263db4407beaf0178fa | 2d77f3619314aa7840fed57e0cf451fe5c1a70a9 | refs/heads/master | 2023-01-05T01:29:30.063154 | 2020-10-31T00:36:16 | 2020-10-31T00:36:16 | 303,189,908 | 0 | 0 | MIT | 2020-10-11T18:48:07 | 2020-10-11T18:48:06 | null | UTF-8 | Python | false | false | 367 | py | import requests
def main(url):
data = requests.get(url).json()
return data
def print_data(url):
data = main(url)
for key, value in data.items():
print(f"{key} : {value}")
if __name__ == "__main__":
full_name = input("Please Enter your full name :")
url = f"https://api.diversitydata.io/?fullname={full_name}"
print_data(url)
| [
"noreply@github.com"
] | noreply@github.com |
bbb437e84e29a7a57b6e783426de789e1c3f6ad7 | 4cb288c8b3274b9dc7959ca3bc2d5e4b3bf04618 | /python/ccxt/async_support/bitopro.py | 611f663bd07e3270ce070643f4ab02e0aff6649b | [
"MIT"
] | permissive | yijixiuxin/ccxt | 7537f73148472efc912f3997040e373cabf2ae0c | d71cd424b9d19b82f2234d8be55dacf311e01a31 | refs/heads/master | 2022-10-01T18:39:29.356725 | 2022-09-20T21:28:02 | 2022-09-20T21:28:02 | 168,174,277 | 0 | 0 | MIT | 2019-01-29T15:05:10 | 2019-01-29T15:05:10 | null | UTF-8 | Python | false | false | 62,980 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
import hashlib
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class bitopro(Exchange):
def describe(self):
return self.deep_extend(super(bitopro, self).describe(), {
'id': 'bitopro',
'name': 'BitoPro',
'countries': ['TW'], # Taiwan
'version': 'v3',
'rateLimit': 100,
'pro': True,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'cancelAllOrders': True,
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'editOrder': False,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDepositAddress': False,
'fetchDeposits': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchMarginMode': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': False,
'fetchOrderTrades': False,
'fetchPositionMode': False,
'fetchPositions': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': False,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': True,
'fetchTransactionFees': False,
'fetchTransactions': False,
'fetchTransfer': False,
'fetchTransfers': False,
'fetchWithdrawal': True,
'fetchWithdrawals': True,
'setLeverage': False,
'setMarginMode': False,
'transfer': False,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'3h': '3h',
'6h': '6h',
'12h': '12h',
'1d': '1d',
'1w': '1w',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/158227251-3a92a220-9222-453c-9277-977c6677fe71.jpg',
'api': {
'rest': 'https://api.bitopro.com/v3',
},
'www': 'https://www.bitopro.com',
'doc': [
'https://github.com/bitoex/bitopro-offical-api-docs/blob/master/v3-1/rest-1/rest.md',
],
'fees': 'https://www.bitopro.com/fees',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'api': {
'public': {
'get': [
'order-book/{pair}',
'tickers',
'tickers/{pair}',
'trades/{pair}',
'provisioning/currencies',
'provisioning/trading-pairs',
'provisioning/limitations-and-fees',
'trading-history/{pair}',
],
},
'private': {
'get': [
'accounts/balance',
'orders/history',
'orders/all/{pair}',
'orders/trades/{pair}',
'orders/{pair}/{orderId}',
'wallet/withdraw/{currency}/{serial}',
'wallet/withdraw/{currency}/id/{id}',
'wallet/depositHistory/{currency}',
'wallet/withdrawHistory/{currency}',
],
'post': [
'orders/{pair}',
'orders/batch',
'wallet/withdraw/{currency}',
],
'put': [
'orders',
],
'delete': [
'orders/{pair}/{id}',
'orders/all',
'orders/{pair}',
],
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'maker': self.parse_number('0.001'),
'taker': self.parse_number('0.002'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.002')],
[self.parse_number('3000000'), self.parse_number('0.00194')],
[self.parse_number('5000000'), self.parse_number('0.0015')],
[self.parse_number('30000000'), self.parse_number('0.0014')],
[self.parse_number('300000000'), self.parse_number('0.0013')],
[self.parse_number('550000000'), self.parse_number('0.0012')],
[self.parse_number('1300000000'), self.parse_number('0.0011')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.001')],
[self.parse_number('3000000'), self.parse_number('0.00097')],
[self.parse_number('5000000'), self.parse_number('0.0007')],
[self.parse_number('30000000'), self.parse_number('0.0006')],
[self.parse_number('300000000'), self.parse_number('0.0005')],
[self.parse_number('550000000'), self.parse_number('0.0004')],
[self.parse_number('1300000000'), self.parse_number('0.0003')],
],
},
},
},
'options': {
'networks': {
'ERC20': 'ERC20',
'ETH': 'ERC20',
'TRX': 'TRX',
'TRC20': 'TRX',
},
},
'precisionMode': TICK_SIZE,
'exceptions': {
'exact': {
'Unsupported currency.': BadRequest, # {"error":"Unsupported currency."}
'Unsupported order type': BadRequest, # {"error":"Unsupported order type"}
'Invalid body': BadRequest, # {"error":"Invalid body"}
'Invalid Signature': AuthenticationError, # {"error":"Invalid Signature"}
'Address not in whitelist.': BadRequest,
},
'broad': {
'Invalid amount': InvalidOrder, # {"error":"Invalid amount 0.0000000001, decimal limit is 8."}
'Balance for ': InsufficientFunds, # {"error":"Balance for eth not enough, only has 0, but ordered 0.01."}
'Invalid ': BadRequest, # {"error":"Invalid price -1."}
'Wrong parameter': BadRequest, # {"error":"Wrong parameter: from"}
},
},
'commonCurrencies': {
},
})
async def fetch_currencies(self, params={}):
"""
fetches all available currencies on an exchange
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an associative dictionary of currencies
"""
response = await self.publicGetProvisioningCurrencies(params)
currencies = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "currency":"eth",
# "withdrawFee":"0.007",
# "minWithdraw":"0.001",
# "maxWithdraw":"1000",
# "maxDailyWithdraw":"2000",
# "withdraw":true,
# "deposit":true,
# "depositConfirmation":"12"
# }
# ]
# }
#
result = {}
for i in range(0, len(currencies)):
currency = currencies[i]
currencyId = self.safe_string(currency, 'currency')
code = self.safe_currency_code(currencyId)
deposit = self.safe_value(currency, 'deposit')
withdraw = self.safe_value(currency, 'withdraw')
fee = self.safe_number(currency, 'withdrawFee')
withdrawMin = self.safe_number(currency, 'minWithdraw')
withdrawMax = self.safe_number(currency, 'maxWithdraw')
limits = {
'withdraw': {
'min': withdrawMin,
'max': withdrawMax,
},
'amount': {
'min': None,
'max': None,
},
}
result[code] = {
'id': currencyId,
'code': code,
'info': currency,
'type': None,
'name': None,
'active': deposit and withdraw,
'deposit': deposit,
'withdraw': withdraw,
'fee': fee,
'precision': None,
'limits': limits,
}
return result
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for bitopro
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
response = await self.publicGetProvisioningTradingPairs()
markets = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "pair":"shib_twd",
# "base":"shib",
# "quote":"twd",
# "basePrecision":"8",
# "quotePrecision":"6",
# "minLimitBaseAmount":"100000",
# "maxLimitBaseAmount":"5500000000",
# "minMarketBuyQuoteAmount":"1000",
# "orderOpenLimit":"200",
# "maintain":false,
# "orderBookQuotePrecision":"6",
# "orderBookQuoteScaleLevel":"5"
# }
# ]
# }
#
result = []
for i in range(0, len(markets)):
market = markets[i]
active = not self.safe_value(market, 'maintain')
id = self.safe_string(market, 'pair')
uppercaseId = id.upper()
baseId = self.safe_string(market, 'base')
quoteId = self.safe_string(market, 'quote')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
limits = {
'amount': {
'min': self.safe_number(market, 'minLimitBaseAmount'),
'max': self.safe_number(market, 'maxLimitBaseAmount'),
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
'leverage': {
'min': None,
'max': None,
},
}
result.append({
'id': id,
'uppercaseId': uppercaseId,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': base,
'quoteId': quote,
'settle': None,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'derivative': False,
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'limits': limits,
'precision': {
'price': self.parse_number(self.parse_precision(self.safe_string(market, 'quotePrecision'))),
'amount': self.parse_number(self.parse_precision(self.safe_string(market, 'basePrecision'))),
},
'active': active,
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
#
# {
# "pair":"btc_twd",
# "lastPrice":"1182449.00000000",
# "isBuyer":false,
# "priceChange24hr":"-1.99",
# "volume24hr":"9.13089740",
# "high24hr":"1226097.00000000",
# "low24hr":"1181000.00000000"
# }
#
marketId = self.safe_string(ticker, 'pair')
market = self.safe_market(marketId, market)
symbol = self.safe_string(market, 'symbol')
return self.safe_ticker({
'symbol': symbol,
'timestamp': None,
'datetime': None,
'high': self.safe_string(ticker, 'high24hr'),
'low': self.safe_string(ticker, 'low24hr'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': None,
'open': None,
'close': self.safe_string(ticker, 'lastPrice'),
'last': self.safe_string(ticker, 'lastPrice'),
'previousClose': None,
'change': None,
'percentage': self.safe_string(ticker, 'priceChange24hr'),
'average': None,
'baseVolume': self.safe_string(ticker, 'volume24hr'),
'quoteVolume': None,
'info': ticker,
}, market)
async def fetch_ticker(self, symbol, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.publicGetTickersPair(self.extend(request, params))
ticker = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "pair":"btc_twd",
# "lastPrice":"1182449.00000000",
# "isBuyer":false,
# "priceChange24hr":"-1.99",
# "volume24hr":"9.13089740",
# "high24hr":"1226097.00000000",
# "low24hr":"1181000.00000000"
# }
# }
#
return self.parse_ticker(ticker, market)
async def fetch_tickers(self, symbols=None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
:param [str]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
response = await self.publicGetTickers()
tickers = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "pair":"xrp_twd",
# "lastPrice":"21.26110000",
# "isBuyer":false,
# "priceChange24hr":"-6.53",
# "volume24hr":"102846.47084802",
# "high24hr":"23.24460000",
# "low24hr":"21.13730000"
# }
# ]
# }
#
return self.parse_tickers(tickers, symbols)
async def fetch_order_book(self, symbol, limit=None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/en/latest/manual.html#order-book-structure>` indexed by market symbols
"""
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderBookPair(self.extend(request, params))
#
# {
# "bids":[
# {
# "price":"1175271",
# "amount":"0.00022804",
# "count":1,
# "total":"0.00022804"
# }
# ],
# "asks":[
# {
# "price":"1176906",
# "amount":"0.0496",
# "count":1,
# "total":"0.0496"
# }
# ]
# }
#
return self.parse_order_book(response, market['symbol'], None, 'bids', 'asks', 'price', 'amount')
def parse_trade(self, trade, market):
#
# fetchTrades
# {
# "timestamp":1644651458,
# "price":"1180785.00000000",
# "amount":"0.00020000",
# "isBuyer":false
# }
#
# fetchMyTrades
# {
# "tradeId":"5685030251",
# "orderId":"9669168142",
# "price":"11821.8",
# "action":"SELL",
# "baseAmount":"0.01",
# "quoteAmount":"118.218",
# "fee":"0.236436",
# "feeSymbol":"BNB",
# "isTaker":true,
# "timestamp":1644905714862,
# "createdTimestamp":1644905714862
# }
#
id = self.safe_string(trade, 'tradeId')
orderId = self.safe_string(trade, 'orderId')
timestamp = None
if id is None:
timestamp = self.safe_timestamp(trade, 'timestamp')
else:
timestamp = self.safe_integer(trade, 'timestamp')
marketId = self.safe_string(trade, 'pair')
market = self.safe_market(marketId, market)
symbol = self.safe_string(market, 'symbol')
price = self.safe_string(trade, 'price')
type = self.safe_string_lower(trade, 'type')
side = self.safe_string_lower(trade, 'action')
if side is None:
isBuyer = self.safe_value(trade, 'isBuyer')
if isBuyer:
side = 'buy'
else:
side = 'sell'
amount = self.safe_string(trade, 'amount')
if amount is None:
amount = self.safe_string(trade, 'baseAmount')
fee = None
feeAmount = self.safe_string(trade, 'fee')
feeSymbol = self.safe_currency_code(self.safe_string(trade, 'feeSymbol'))
if feeAmount is not None:
fee = {
'cost': feeAmount,
'currency': feeSymbol,
'rate': None,
}
isTaker = self.safe_value(trade, 'isTaker')
takerOrMaker = None
if isTaker is not None:
if isTaker:
takerOrMaker = 'taker'
else:
takerOrMaker = 'maker'
return self.safe_trade({
'id': id,
'info': trade,
'order': orderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'takerOrMaker': takerOrMaker,
'type': type,
'side': side,
'price': price,
'amount': amount,
'cost': None,
'fee': fee,
}, market)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
"""
get the list of most recent trades for a particular symbol
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.publicGetTradesPair(self.extend(request, params))
trades = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "timestamp":1644651458,
# "price":"1180785.00000000",
# "amount":"0.00020000",
# "isBuyer":false
# }
# ]
# }
#
return self.parse_trades(trades, market, since, limit)
async def fetch_trading_fees(self, params={}):
"""
fetch the trading fees for multiple markets
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a dictionary of `fee structures <https://docs.ccxt.com/en/latest/manual.html#fee-structure>` indexed by market symbols
"""
await self.load_markets()
response = await self.publicGetProvisioningLimitationsAndFees(params)
tradingFeeRate = self.safe_value(response, 'tradingFeeRate', {})
first = self.safe_value(tradingFeeRate, 0)
#
# {
# "tradingFeeRate":[
# {
# "rank":0,
# "twdVolumeSymbol":"\u003c",
# "twdVolume":"3000000",
# "bitoAmountSymbol":"\u003c",
# "bitoAmount":"7500",
# "makerFee":"0.001",
# "takerFee":"0.002",
# "makerBitoFee":"0.0008",
# "takerBitoFee":"0.0016"
# }
# ],
# "orderFeesAndLimitations":[
# {
# "pair":"BTC/TWD",
# "minimumOrderAmount":"0.0001",
# "minimumOrderAmountBase":"BTC",
# "minimumOrderNumberOfDigits":"0"
# }
# ],
# "restrictionsOfWithdrawalFees":[
# {
# "currency":"TWD",
# "fee":"15",
# "minimumTradingAmount":"100",
# "maximumTradingAmount":"1000000",
# "dailyCumulativeMaximumAmount":"2000000",
# "remarks":"",
# "protocol":""
# }
# ],
# "cryptocurrencyDepositFeeAndConfirmation":[
# {
# "currency":"TWD",
# "generalDepositFees":"0",
# "blockchainConfirmationRequired":""
# }
# ],
# "ttCheckFeesAndLimitationsLevel1":[
# {
# "currency":"TWD",
# "redeemDailyCumulativeMaximumAmount":"",
# "generateMinimumTradingAmount":"",
# "generateMaximumTradingAmount":"",
# "generateDailyCumulativeMaximumAmount":""
# }
# ],
# "ttCheckFeesAndLimitationsLevel2":[
# {
# "currency":"TWD",
# "redeemDailyCumulativeMaximumAmount":"20000000",
# "generateMinimumTradingAmount":"30",
# "generateMaximumTradingAmount":"10000000",
# "generateDailyCumulativeMaximumAmount":"10000000"
# }
# ]
# }
#
result = {}
maker = self.safe_number(first, 'makerFee')
taker = self.safe_number(first, 'takerFee')
for i in range(0, len(self.symbols)):
symbol = self.symbols[i]
result[symbol] = {
'info': first,
'symbol': symbol,
'maker': maker,
'taker': taker,
'percentage': True,
'tierBased': True,
}
return result
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return [
self.safe_integer(ohlcv, 'timestamp'),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'volume'),
]
async def fetch_ohlcv(self, symbol, timeframe='5m', since=None, limit=None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int|None since: timestamp in ms of the earliest candle to fetch
:param int|None limit: the maximum amount of candles to fetch
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [[int]]: A list of candles ordered as timestamp, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
resolution = self.timeframes[timeframe]
request = {
'pair': market['id'],
'resolution': resolution,
}
# we need to have a limit argument because "to" and "from" are required
if limit is None:
limit = 500
timeframeInSeconds = self.parse_timeframe(timeframe)
alignedSince = None
if since is None:
request['to'] = self.seconds()
request['from'] = request['to'] - (limit * timeframeInSeconds)
else:
timeframeInMilliseconds = timeframeInSeconds * 1000
alignedSince = int(math.floor(since / timeframeInMilliseconds)) * timeframeInMilliseconds
request['from'] = int(math.floor(since / 1000))
request['to'] = self.sum(request['from'], limit * timeframeInSeconds)
response = await self.publicGetTradingHistoryPair(self.extend(request, params))
data = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "timestamp":1644581100000,
# "open":"1214737",
# "high":"1215110",
# "low":"1214737",
# "close":"1215110",
# "volume":"0.08423959"
# }
# ]
# }
#
sparse = self.parse_ohlcvs(data, market, timeframe, since, limit)
return self.insert_missing_candles(sparse, timeframeInSeconds, alignedSince, limit)
def insert_missing_candles(self, candles, distance, since, limit):
# the exchange doesn't send zero volume candles so we emulate them instead
# otherwise sending a limit arg leads to unexpected results
length = len(candles)
if length == 0:
return candles
result = []
copyFrom = candles[0]
timestamp = None
if since is None:
timestamp = copyFrom[0]
else:
timestamp = since
i = 0
candleLength = len(candles)
resultLength = 0
while((resultLength < limit) and (i < candleLength)):
candle = candles[i]
if candle[0] == timestamp:
result.append(candle)
i = self.sum(i, 1)
else:
copy = self.array_concat([], copyFrom)
copy[0] = timestamp
# set open, high, low to close
copy[1] = copy[4]
copy[2] = copy[4]
copy[3] = copy[4]
copy[5] = self.parse_number('0')
result.append(copy)
timestamp = self.sum(timestamp, distance * 1000)
resultLength = len(result)
copyFrom = result[resultLength - 1]
return result
def parse_balance(self, response):
#
# [{
# "currency":"twd",
# "amount":"0",
# "available":"0",
# "stake":"0",
# "tradable":true
# }]
#
result = {
'info': response,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
amount = self.safe_string(balance, 'amount')
available = self.safe_string(balance, 'available')
account = {
'free': available,
'total': amount,
}
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.load_markets()
response = await self.privateGetAccountsBalance(params)
balances = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "currency":"twd",
# "amount":"0",
# "available":"0",
# "stake":"0",
# "tradable":true
# }
# ]
# }
#
return self.parse_balance(balances)
def parse_order_status(self, status):
statuses = {
'-1': 'open',
'0': 'open',
'1': 'open',
'2': 'closed',
'3': 'closed',
'4': 'canceled',
}
return self.safe_string(statuses, status, None)
def parse_order(self, order, market=None):
#
# createOrder
# {
# orderId: '2220595581',
# timestamp: '1644896744886',
# action: 'SELL',
# amount: '0.01',
# price: '15000',
# timeInForce: 'GTC'
# }
#
# fetchOrder
# {
# "id":"8777138788",
# "pair":"bnb_twd",
# "price":"16000",
# "avgExecutionPrice":"0",
# "action":"SELL",
# "type":"LIMIT",
# "timestamp":1644899002598,
# "status":4,
# "originalAmount":"0.01",
# "remainingAmount":"0.01",
# "executedAmount":"0",
# "fee":"0",
# "feeSymbol":"twd",
# "bitoFee":"0",
# "total":"0",
# "seq":"BNBTWD548774666",
# "timeInForce":"GTC",
# "createdTimestamp":1644898944074,
# "updatedTimestamp":1644899002598
# }
#
id = self.safe_string_2(order, 'id', 'orderId')
timestamp = self.safe_integer_2(order, 'timestamp', 'createdTimestamp')
side = self.safe_string(order, 'action')
side = side.lower()
amount = self.safe_string_2(order, 'amount', 'originalAmount')
price = self.safe_string(order, 'price')
marketId = self.safe_string(order, 'pair')
market = self.safe_market(marketId, market, '_')
symbol = self.safe_string(market, 'symbol')
orderStatus = self.safe_string(order, 'status')
status = self.parse_order_status(orderStatus)
type = self.safe_string_lower(order, 'type')
average = self.safe_string(order, 'avgExecutionPrice')
filled = self.safe_string(order, 'executedAmount')
remaining = self.safe_string(order, 'remainingAmount')
timeInForce = self.safe_string(order, 'timeInForce')
fee = None
feeAmount = self.safe_string(order, 'fee')
feeSymbol = self.safe_currency_code(self.safe_string(order, 'feeSymbol'))
if Precise.string_gt(feeAmount, '0'):
fee = {
'currency': feeSymbol,
'cost': feeAmount,
}
return self.safe_order({
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': self.safe_integer(order, 'updatedTimestamp'),
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'cost': None,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': None,
'info': order,
}, market)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
create a trade order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float|None price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'type': type,
'pair': market['id'],
'action': side,
'amount': self.amount_to_precision(symbol, amount),
'timestamp': self.milliseconds(),
}
orderType = type.upper()
if orderType == 'LIMIT':
request['price'] = self.price_to_precision(symbol, price)
if orderType == 'STOP_LIMIT':
request['price'] = self.price_to_precision(symbol, price)
stopPrice = self.safe_value_2(params, 'triggerPrice', 'stopPrice')
params = self.omit(params, ['triggerPrice', 'stopPrice'])
if stopPrice is None:
raise InvalidOrder(self.id + ' createOrder() requires a stopPrice parameter for ' + orderType + ' orders')
else:
request['stopPrice'] = self.price_to_precision(symbol, stopPrice)
condition = self.safe_string(params, 'condition')
if condition is None:
raise InvalidOrder(self.id + ' createOrder() requires a condition parameter for ' + orderType + ' orders')
else:
request['condition'] = condition
response = await self.privatePostOrdersPair(self.extend(request, params), params)
#
# {
# orderId: '2220595581',
# timestamp: '1644896744886',
# action: 'SELL',
# amount: '0.01',
# price: '15000',
# timeInForce: 'GTC'
# }
#
return self.parse_order(response, market)
async def cancel_order(self, id, symbol=None, params={}):
"""
cancels an open order
:param str id: order id
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'id': id,
'pair': market['id'],
}
response = await self.privateDeleteOrdersPairId(self.extend(request, params))
#
# {
# "orderId":"8777138788",
# "action":"SELL",
# "timestamp":1644899002465,
# "price":"16000",
# "amount":"0.01"
# }
#
return self.parse_order(response, market)
async def cancel_orders(self, ids, symbol=None, params={}):
"""
cancel multiple orders
:param [str] ids: order ids
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
id = market['uppercaseId']
request = {}
request[id] = ids
response = await self.privatePutOrders(self.extend(request, params))
#
# {
# "data":{
# "BNB_TWD":[
# "5236347105",
# "359488711"
# ]
# }
# }
#
return response
async def cancel_all_orders(self, symbol=None, params={}):
"""
cancel all open orders
:param str|None symbol: unified market symbol, only orders in the market of self symbol are cancelled when symbol is not None
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {
# 'pair': market['id'], # optional
}
# privateDeleteOrdersAll or privateDeleteOrdersPair
method = self.safe_string(self.options, 'privateDeleteOrdersPair', 'privateDeleteOrdersAll')
if symbol is not None:
market = self.market(symbol)
request['pair'] = market['id']
method = 'privateDeleteOrdersPair'
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "BNB_TWD":[
# "9515988421",
# "4639130027"
# ]
# }
# }
#
return result
async def fetch_order(self, id, symbol=None, params={}):
"""
fetches information on an order made by the user
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'orderId': id,
'pair': market['id'],
}
response = await self.privateGetOrdersPairOrderId(self.extend(request, params))
#
# {
# "id":"8777138788",
# "pair":"bnb_twd",
# "price":"16000",
# "avgExecutionPrice":"0",
# "action":"SELL",
# "type":"LIMIT",
# "timestamp":1644899002598,
# "status":4,
# "originalAmount":"0.01",
# "remainingAmount":"0.01",
# "executedAmount":"0",
# "fee":"0",
# "feeSymbol":"twd",
# "bitoFee":"0",
# "total":"0",
# "seq":"BNBTWD548774666",
# "timeInForce":"GTC",
# "createdTimestamp":1644898944074,
# "updatedTimestamp":1644899002598
# }
#
return self.parse_order(response, market)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches information on multiple orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
# 'startTimestamp': 0,
# 'endTimestamp': 0,
# 'statusKind': '',
# 'orderId': '',
}
if since is not None:
request['startTimestamp'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetOrdersAllPair(self.extend(request, params), params)
orders = self.safe_value(response, 'data')
if orders is None:
orders = []
#
# {
# "data":[
# {
# "id":"2220595581",
# "pair":"bnb_twd",
# "price":"15000",
# "avgExecutionPrice":"0",
# "action":"SELL",
# "type":"LIMIT",
# "createdTimestamp":1644896744886,
# "updatedTimestamp":1644898706236,
# "status":4,
# "originalAmount":"0.01",
# "remainingAmount":"0.01",
# "executedAmount":"0",
# "fee":"0",
# "feeSymbol":"twd",
# "bitoFee":"0",
# "total":"0",
# "seq":"BNBTWD8540871774",
# "timeInForce":"GTC"
# }
# ]
# }
#
return self.parse_orders(orders, market, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
request = {
'statusKind': 'OPEN',
}
return self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches information on multiple closed orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
request = {
'statusKind': 'DONE',
}
return self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all trades made by the user
:param str symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html#trade-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.privateGetOrdersTradesPair(self.extend(request, params))
trades = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "tradeId":"5685030251",
# "orderId":"9669168142",
# "price":"11821.8",
# "action":"SELL",
# "baseAmount":"0.01",
# "quoteAmount":"118.218",
# "fee":"0.236436",
# "feeSymbol":"BNB",
# "isTaker":true,
# "timestamp":1644905714862,
# "createdTimestamp":1644905714862
# }
# ]
# }
#
return self.parse_trades(trades, market, since, limit)
def parse_transaction_status(self, status):
states = {
'COMPLETE': 'ok',
'INVALID': 'failed',
'PROCESSING': 'pending',
'WAIT_PROCESS': 'pending',
'FAILED': 'failed',
'EXPIRED': 'failed',
'CANCELLED': 'failed',
'EMAIL_VERIFICATION': 'pending',
'WAIT_CONFIRMATION': 'pending',
}
return self.safe_string(states, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits
# {
# "serial":"20220214X766799",
# "timestamp":"1644833015053",
# "address":"bnb1xml62k5a9dcewgc542fha75fyxdcp0zv8eqfsh",
# "amount":"0.20000000",
# "fee":"0.00000000",
# "total":"0.20000000",
# "status":"COMPLETE",
# "txid":"A3CC4F6828CC752B9F3737F48B5826B9EC2857040CB5141D0CC955F7E53DB6D9",
# "message":"778553959",
# "protocol":"MAIN",
# "id":"2905906537"
# }
#
# fetchWithdrawals or fetchWithdraw
# {
# "serial":"20220215BW14069838",
# "timestamp":"1644907716044",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8.00000000",
# "fee":"2.00000000",
# "total":"10.00000000",
# "status":"COMPLETE",
# "txid":"50bf250c71a582f40cf699fb58bab978437ea9bdf7259ff8072e669aab30c32b",
# "protocol":"TRX",
# "id":"9925310345"
# }
#
# withdraw
# {
# "serial":"20220215BW14069838",
# "currency":"USDT",
# "protocol":"TRX",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8",
# "fee":"2",
# "total":"10"
# }
#
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
id = self.safe_string(transaction, 'serial')
txId = self.safe_string(transaction, 'txid')
timestamp = self.safe_integer(transaction, 'timestamp')
amount = self.safe_number(transaction, 'total')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'message')
status = self.safe_string(transaction, 'status')
fee = self.safe_number(transaction, 'fee')
return {
'info': transaction,
'id': id,
'txid': txId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'network': None,
'addressFrom': None,
'address': address,
'addressTo': address,
'tagFrom': None,
'tag': tag,
'tagTo': tag,
'type': None,
'amount': amount,
'currency': code,
'status': self.parse_transaction_status(status),
'updated': None,
'fee': {
'currency': code,
'cost': fee,
'rate': None,
},
}
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
"""
fetch all deposits made to an account
:param str code: unified currency code
:param int|None since: the earliest time in ms to fetch deposits for
:param int|None limit: the maximum number of deposits structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchDeposits() requires the code argument')
await self.load_markets()
currency = self.safe_currency(code)
request = {
'currency': currency['id'],
# 'endTimestamp': 0,
# 'id': '',
# 'statuses': '', # 'ROCESSING,COMPLETE,INVALID,WAIT_PROCESS,CANCELLED,FAILED'
}
if since is not None:
request['startTimestamp'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetWalletDepositHistoryCurrency(self.extend(request, params))
result = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "serial":"20220214X766799",
# "timestamp":"1644833015053",
# "address":"bnb1xml62k5a9dcewgc542fha75fyxdcp0zv8eqfsh",
# "amount":"0.20000000",
# "fee":"0.00000000",
# "total":"0.20000000",
# "status":"COMPLETE",
# "txid":"A3CC4F6828CC752B9F3737F48B5826B9EC2857040CB5141D0CC955F7E53DB6D9",
# "message":"778553959",
# "protocol":"MAIN",
# "id":"2905906537"
# }
# ]
# }
#
return self.parse_transactions(result, currency, since, limit, {'type': 'deposit'})
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
"""
fetch all withdrawals made from an account
:param str code: unified currency code
:param int|None since: the earliest time in ms to fetch withdrawals for
:param int|None limit: the maximum number of withdrawals structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawals() requires the code argument')
await self.load_markets()
currency = self.safe_currency(code)
request = {
'currency': currency['id'],
# 'endTimestamp': 0,
# 'id': '',
# 'statuses': '', # 'PROCESSING,COMPLETE,EXPIRED,INVALID,WAIT_PROCESS,WAIT_CONFIRMATION,EMAIL_VERIFICATION,CANCELLED'
}
if since is not None:
request['startTimestamp'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetWalletWithdrawHistoryCurrency(self.extend(request, params))
result = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "serial":"20220215BW14069838",
# "timestamp":"1644907716044",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8.00000000",
# "fee":"2.00000000",
# "total":"10.00000000",
# "status":"COMPLETE",
# "txid":"50bf250c71a582f40cf699fb58bab978437ea9bdf7259ff8072e669aab30c32b",
# "protocol":"TRX",
# "id":"9925310345"
# }
# ]
# }
#
return self.parse_transactions(result, currency, since, limit, {'type': 'withdrawal'})
async def fetch_withdrawal(self, id, code=None, params={}):
"""
fetch data on a currency withdrawal via the withdrawal id
:param str id: withdrawal id
:param str code: unified currency code of the currency withdrawn, default is None
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawal() requires the code argument')
await self.load_markets()
currency = self.safe_currency(code)
request = {
'serial': id,
'currency': currency['id'],
}
response = await self.privateGetWalletWithdrawCurrencySerial(self.extend(request, params))
result = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "serial":"20220215BW14069838",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8.00000000",
# "fee":"2.00000000",
# "total":"10.00000000",
# "status":"COMPLETE",
# "txid":"50bf250c71a582f40cf699fb58bab978437ea9bdf7259ff8072e669aab30c32b",
# "protocol":"TRX",
# "id":"9925310345",
# "timestamp":"1644907716044"
# }
# }
#
return self.parse_transaction(result, currency)
async def withdraw(self, code, amount, address, tag=None, params={}):
"""
make a withdrawal
:param str code: unified currency code
:param float amount: the amount to withdraw
:param str address: the address to withdraw to
:param str|None tag:
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
tag, params = self.handle_withdraw_tag_and_params(tag, params)
await self.load_markets()
self.check_address(address)
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': self.number_to_string(amount),
'address': address,
}
if 'network' in params:
networks = self.safe_value(self.options, 'networks', {})
requestedNetwork = self.safe_string_upper(params, 'network')
params = self.omit(params, ['network'])
networkId = self.safe_string(networks, requestedNetwork)
if networkId is None:
raise ExchangeError(self.id + ' invalid network ' + requestedNetwork)
request['protocol'] = networkId
if tag is not None:
request['message'] = tag
response = await self.privatePostWalletWithdrawCurrency(self.extend(request, params))
result = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "serial":"20220215BW14069838",
# "currency":"USDT",
# "protocol":"TRX",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8",
# "fee":"2",
# "total":"10"
# }
# }
#
return self.parse_transaction(result, currency)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if headers is None:
headers = {}
headers['X-BITOPRO-API'] = 'ccxt'
if api == 'private':
self.check_required_credentials()
if method == 'POST' or method == 'PUT':
body = self.json(params)
payload = self.string_to_base64(body)
signature = self.hmac(payload, self.encode(self.secret), hashlib.sha384)
headers['X-BITOPRO-APIKEY'] = self.apiKey
headers['X-BITOPRO-PAYLOAD'] = payload
headers['X-BITOPRO-SIGNATURE'] = signature
elif method == 'GET' or method == 'DELETE':
if query:
url += '?' + self.urlencode(query)
nonce = self.milliseconds()
rawData = {
'nonce': nonce,
}
rawData = self.json(rawData)
payload = self.string_to_base64(rawData)
signature = self.hmac(payload, self.encode(self.secret), hashlib.sha384)
headers['X-BITOPRO-APIKEY'] = self.apiKey
headers['X-BITOPRO-PAYLOAD'] = payload
headers['X-BITOPRO-SIGNATURE'] = signature
elif api == 'public' and method == 'GET':
if query:
url += '?' + self.urlencode(query)
url = self.urls['api']['rest'] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to the default error handler
if code >= 200 and code < 300:
return
feedback = self.id + ' ' + body
error = self.safe_string(response, 'error')
self.throw_exactly_matched_exception(self.exceptions['exact'], error, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], error, feedback)
raise ExchangeError(feedback) # unknown message
| [
"travis@travis-ci.org"
] | travis@travis-ci.org |
332fdc6a4f568c0b678f79ad6d5b540a4e84634e | a4751b8404a64ad537d82af34d1cd02cdb1c459c | /furkhan.py | 5cd208f2e360db880b080a78ddedd37e56688d7a | [] | no_license | adiatgit/leetcode | 37499e7e85ec9ccd4447c14c92384ad3afc6abe0 | 36682b28ee37a4360951c162570fbcca5a215e07 | refs/heads/main | 2020-12-09T10:20:30.536707 | 2020-10-06T05:09:57 | 2020-10-06T05:09:57 | 233,274,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | def checkSum(givenList,S):
sum = 0
for i in givenList:
sum=sum+i
if sum == S:
return True
return False
def getCombinations(n,k):
for i in range(k+1):
| [
"sawwalakhea@gmail.com"
] | sawwalakhea@gmail.com |
8b233babbcc3f3efc6de6e4720599c590b22e2d3 | 71fdffc6f4ed975d042073691960e554a2b76be0 | /Opening an Image.py | 7b32c5f0f0308fd5ddff5ce2bf21fd032d29acf4 | [] | no_license | BhavyaShah1234/MyWholeImageProcessingFolder | 1abe4f1f35625daf5b0e532c4e285267cf90719e | fa8af03537c576c1c3661eb57a7346ab0db24f56 | refs/heads/main | 2023-04-05T00:25:24.932163 | 2021-04-08T07:04:04 | 2021-04-08T07:04:04 | 355,788,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | import cv2
# Load/Read image
img1 = cv2.imread('lena.png', 0)
img2 = cv2.imread('lena.png', 1)
img3 = cv2.imread('lena.png', -1)
# Show Image
cv2.imshow('Output1', img1)
cv2.imshow('Output2', img2)
cv2.imshow('Output3', img3)
# Retain/Delay Image
cv2.waitKey(0)
# On clicking close on image window the program stops running
cv2.destroyAllWindows()
| [
"noreply@github.com"
] | noreply@github.com |
e66b10771ba670c27a4b675fb88874a56e811e81 | 0693fe0e5fbcc6ef775529a28ca891c8ad906a58 | /pdb/pdb_composite.py | 3f4875bdd7d2b9bab890b2df2a3e77b5dbbaa10c | [
"MIT"
] | permissive | shellydeforte/PDB | 81cd4b8b9d3141cab2645b564e74a67bd6f2e7e8 | 287350c0049a10cee10654d093a1d06128f9d7aa | refs/heads/master | 2021-01-10T03:48:20.769031 | 2015-12-15T15:00:04 | 2015-12-15T15:00:04 | 47,725,527 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,952 | py | # -*- coding: utf-8 -*-
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import pandas as pd
from io import open
from Bio import SeqIO
from pdb.lib.data_paths import build_abs_path
from pdb.lib.progress_bar import ProgressBar
def create_pdb_composite(df, ss_dis, uni_folder):
""" Creates a secondary structure composite and outputs a new DataFrame.
1. Goes through the DataFrame row by row, and creates interval_dict.
2. Uses this dictionary to create a composite structure.
3. Creates a new dataframe with PDB_CHAIN, UNIPROT, SEC_STRUCT.
Notes:
Multiple UniProt IDs can be attached to a single PDB_CHAIN.
Therefore, the only unique key for a given ss sequence is
the PDB ID, the PDB chain and the UniProt ID. That's why
the dictionary is keyed with pdb_chain_uni.
Args:
df (DataFrame): A pre-filtered dataframe from pdb_chain_uniprot.tsv.
ss_dis (dictionary): A dictionary extracted from ss_dis.txt.
ss_dis has the following form:
ss_dis[pdb_A] = {
'sequence': '',
'secstr': '',
'disorder': ''
}
uni_folder (Unicode): a directory path to the folder that has single
UniProt fasta files.
Returns:
A new DataFrame with PDB_CHAIN, UNIPROT, SEC_STRUCT.
"""
interval_dict = _create_interval_dict(df)
structure_dict = _create_struct_dict(interval_dict, ss_dis, uni_folder)
df = pd.DataFrame(structure_dict)
return df
def _create_interval_dict(df):
"""Create a dictionary of intervals.
The first interval corresponds with RES_BEG, RES_END, which are the
indexes (starting from 1) for the PDB chain. Note that this is
required because the PDB chain is not always fully composed of the
UniProt entry. The second interval corresponds to SP_BEG, SP_END,
this is the corresponding interval on the full UniProt entry,
starting with an index of 1.
Args:
df (DataFrame): A pre-filtered dataframe from pdb_chain_uniprot.tsv.
Returns:
interval_dict (dictionary): A dictionary in the following form:
{
'11BG_A_Q3E840':
[
[
[1, 124],
[27, 150]
]
]
}
"""
interval_dict = {}
progress = ProgressBar(
len(df.index),
start_msg="Creating interval dictionary..",
end_msg="Done creating interval dictionary."
)
for i, row in df.iterrows():
uni_id = row.SP_PRIMARY
pdb_chain_uni = ''.join([
row.PDB,
'_',
row.CHAIN,
'_',
uni_id
])
intervals = [[row.RES_BEG, row.RES_END], [row.SP_BEG, row.SP_END]]
if pdb_chain_uni in interval_dict.keys():
interval_dict[pdb_chain_uni].append(intervals)
else:
interval_dict[pdb_chain_uni] = [intervals]
progress.inc()
return interval_dict
def _create_struct_dict(interval_dict, ss_dis, uni_folder):
""" Creates the structure dictionary.
Args:
interval_dict (dict): A dictionary in the following form:
{
'11BG_A_Q3E840':
[
[
[1, 124],
[27, 150]
]
]
}
uni_folder (Unicode): A path to the folder that has single
UniProt fasta files.
ss_dis: a dictionary extracted from ss_dis.txt, in the following form:
ss_dis[pdb_A] = {
'sequence': '',
'secstr': '',
'disorder': ''
}
Returns:
A dictionary in the following form:
{
'PDB_CHAIN': [],
'UNIPROT': [],
'SEC_STRUCT': []
}
"""
structure_dict = {'PDB_CHAIN': [], 'SP_PRIMARY': [], 'SEC_STRUCT': []}
for pdb_chain_uni in interval_dict:
pdb_chain = ''.join([
pdb_chain_uni.split('_')[0],
'_',
pdb_chain_uni.split('_')[1]
])
uni_id = pdb_chain_uni.split('_')[2]
uni_fp = build_abs_path(uni_folder, uni_id)
len_uni_seq = len(
(SeqIO.read(open(uni_fp), "fasta")).seq
)
disorder = ss_dis[pdb_chain]['disorder']
ss = ss_dis[pdb_chain]['secstr']
intervals = interval_dict[pdb_chain_uni]
pdb_struct = _create_pdb_struct(intervals, disorder, ss, len_uni_seq)
structure_dict['PDB_CHAIN'].append(pdb_chain)
structure_dict['SP_PRIMARY'].append(uni_id)
structure_dict['SEC_STRUCT'].append(pdb_struct)
return structure_dict
def _create_pdb_struct(intervals, disorder, ss, uni_seq_len):
"""Create PDB structure.
1. Creates a sequence that is all '-'.
2. Iterate through the PDB indexes, add the distance between the
PDB interval and the UniProt interval to get the UniProt index.
3. If there is a ss, substitutes that value. If there is a disorder
value ('X'), substitutes that value. If neither is present,
substitutes a 'P'.
Example:
Given the follow arguments (these don't go together):
disorder = 'XX----------------------'
ss = ' TT EE SS HHHHHHHHHHHT TEEEEEEEE SGGG '
intervals = [[[3, 197], [296, 490]], [[200, 367], [765, 932]]]
uni_seq_len = 963
Something like this is created:
[-,-,-,-,'P','P','X','E','E','H','H','H'...]
And something like this is returned:
'---PPXEEHH'
The return string is the same length as the Uniprot sequence.
Args:
intervals (list): A list of interval information. (Interval
numbering starts at 1, so must be adjusted down 1.)
disorder (Unicode): The missing regions from ss_dis.
ss (Unicode): The secondary structure from ss_dis.
uni_seq_len (int): The length of the UniProt sequence.
Returns:
A string that represents the secondary structure elements.
"""
def interval_map(x):
return x + (interval[1][0] - interval[0][0])
structure = ['-'] * uni_seq_len
for interval in intervals:
for i in range(interval[0][0] - 1, interval[0][1]):
j = interval_map(i)
if ss[i] != ' ':
structure[j] = ss[i]
if disorder[i] != '-':
structure[j] = disorder[i]
if structure[j] == '-':
structure[j] = 'P'
assert len(structure) == uni_seq_len
assert ' ' not in structure
return ''.join(structure)
| [
"mwelcks@gmail.com"
] | mwelcks@gmail.com |
19c2fc6ce6c8cc922ca844bf9f171416384b75a9 | 66596c9d2e97ef2997ca587ee2924210360e6730 | /deeplearnig/redneuronal/probando.py | 4d4bbd6c277be84ae26fa3ea13cc21eabf3ee97e | [] | no_license | rmsg23/android | ec9eb17c0996bd5c1a9eb111ea9ab774aeaaecdf | a4e90e1a21856f2ccd1218a0dbd1c279885e15d5 | refs/heads/master | 2021-01-09T20:38:24.390383 | 2016-06-09T19:22:40 | 2016-06-09T19:22:40 | 60,798,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,132 | py | entrada X[i]: 64 datos (matrix de 8*8)
a= [ 0.0 , -0.33501649,-1.09493684, -0.66762355, 0.26875116, 0.7446178 ,
-0.10895844, -0.12502292,-0.05907756, -0.62400926, -0.25504011, 0.00517797,
-1.31330167, 1.12772113, 0.87958306, -0.13043338, -0.04462507, -0.72764628,
1.07165259, -0.51590096,-1.14964846, 0.67687682, 0.678579 , -0.11422184,
-0.03337973, 0.16859065, 0.95451315, 0.03037272, -1.1262881 , 1.09838387,
-0.35755713, -0.04723238, 0.0 , -0.67237227, -0.89629737, -0.0114553,
0.96064411, 0.21400225,-0.82269451, 0.0 , -0.06134367, -0.5312841,
-1.05283456, -0.50129908, 1.01118593, 1.18784054, -0.33635139, -0.08874162,
-0.03543326, -0.40357499,-1.33033057, 0.08817693, -0.26718796, 0.86931494,
-0.14743634, -0.20978513,-0.02359646, -0.29908135, -1.08938309, -0.47766056,
0.84963214, 0.8876023 ,-0.5056698 , -0.19600752]
----------------------------
----------------------------
salida y[i]: 10 datos matrix de
b=[ 5.66565757e-07 , 1.56568143e-08, 1.74980776e-11 , 1.70110339e-07,
3.39088428e-07 , 1.05518023e-09, 3.64836109e-05 , 1.96613900e-10,
9.99948678e-01 , 1.37456697e-05] EL NUMERO PREDICHO ES LA POSISION DONDE ESTA EL VALOR MAS GRANDE EN ESTE CASO : 8 = 9.99948678e-01
ESTE ES EL NUMERO EXTRAIDO DEL SET DE DATOS
(array([ 0., 0., 5., 13., 9., 1., 0., 0., 0., 0., 13.,
15., 10., 15., 5., 0., 0., 3., 15., 2., 0., 11.,
8., 0., 0., 4., 12., 0., 0., 8., 8., 0., 0.,
5., 8., 0., 0., 9., 8., 0., 0., 4., 11., 0.,
1., 12., 7., 0., 0., 2., 14., 5., 10., 12., 0.,
0., 0., 0., 6., 13., 10., 0., 0., 0.]), 64)
ESTE ES EL NUMERO ESCALADO : RANGO (2:-2) usando preprocessing.scale() permite obtener datos con media cero y varianza 1
(array([ 0. , -0.33501649, -0.04308102, 0.27407152, -0.66447751,
-0.84412939, -0.40972392, -0.12502292, -0.05907756, -0.62400926,
0.4829745 , 0.75962245, -0.05842586, 1.12772113, 0.87958306,
-0.13043338, -0.04462507, 0.11144272, 0.89588044, -0.86066632,
-1.14964846, 0.51547187, 1.90596347, -0.11422184, -0.03337973,
0.48648928, 0.46988512, -1.49990136, -1.61406277, 0.07639777,
1.54181413, -0.04723238, 0. , 0.76465553, 0.05263019,
-1.44763006, -1.73666443, 0.04361588, 1.43955804, 0. ,
-0.06134367, 0.8105536 , 0.63011714, -1.12245711, -1.06623158,
0.66096475, 0.81845076, -0.08874162, -0.03543326, 0.74211893,
1.15065212, -0.86867056, 0.11012973, 0.53761116, -0.75743581,
-0.20978513, -0.02359646, -0.29908135, 0.08671869, 0.20829258,
-0.36677122, -1.14664746, -0.5056698 , -0.19600752]), 64)
ESTE NUMERO ES UN: 0
AL PASAR POR LA RED NEURONAL OBTENEMOS
[[ 9.99989938e-01 3.25610035e-16 1.27379652e-08 1.64891900e-08
2.40727638e-07 1.32713485e-06 4.57020418e-09 1.77605854e-06
3.47891165e-10 6.68367283e-06]]
EL VALOR MAS GRANDE ESTA EN LA POSISION 0 CON 9.99989938e-01
LO QUE CONCUERDA CON LO DICHO ANTERIORMENTE DICHO | [
"rmsg23@gmail.com"
] | rmsg23@gmail.com |
36d47cb2d7b2e38504ce9d4283769b55041d8980 | 3f4219ad9dd3462967a3924b6f491db20581b0a2 | /Sentdex-Tutorials/test.py | 94e0c6a771b9f02397e7bd86f8f22bbd83b3e66b | [] | no_license | rezakrimi/Machine-Learning | d81dc741ef9755d62079b58792f01055c50d3dbb | 8c7ad77996d6ba9ac18c6e9913fbe05e0cab7895 | refs/heads/master | 2021-05-14T18:16:53.764565 | 2018-05-08T01:50:33 | 2018-05-08T04:42:06 | 116,067,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 489 | py | import pandas as pd
import numpy as np
# df = pd.DataFrame(np.arange(12).reshape(3,4),
# columns=['A', 'B', 'C', 'D'])
#
# print (df.drop([1], 0))
#
# a= [1, 2, 3, 4, 5]
#
# print (a[-3:])
a = np.array([[1,2,3],[1,1,1]])
b = np.array([[1,2],[1,2],[1,2]])
print(np.dot(a, b))
print('completing ')
a = [[[1,2,3],[1]],
[[4, 2, 3], [1]],
[[5, 2, 3], [1]],
[[6, 2, 3], [1]],
[[7, 2, 3], [1]],
[[8, 2, 3], [1]],]
a = np.array(a)
print(a[-3:, 0]) | [
"reza@Rezas-MacBook-Pro.local"
] | reza@Rezas-MacBook-Pro.local |
97eb63dbfe1a1eae8ede3992dbe594c47baec695 | e0ddffd0c355aa635f8823a9b3c2eddfcc7848eb | /links_left.py | bd199b038bdef88998b59936a9bad1044bed5fdf | [] | no_license | quanted/cyan_app | 871f0532a30995efa02121d0696c64900173f867 | 7a71ce647bb66ba39e1e1ea32b4f9b64a2fc5f9d | refs/heads/dev | 2021-01-13T03:54:37.564319 | 2019-10-16T16:23:26 | 2019-10-16T16:23:26 | 78,225,520 | 0 | 0 | null | 2020-04-14T19:15:29 | 2017-01-06T17:35:20 | Python | UTF-8 | Python | false | false | 745 | py | from django.template.loader import render_to_string
from collections import OrderedDict
from django.shortcuts import redirect
# 03ubertext_links_left:
def ordered_list(model=None, page=None):
link_dict = OrderedDict([
('Model', OrderedDict([
('CyAN', 'cyan'),
])
),
('Documentation', OrderedDict([
('API Documentation', '/qedinternal.epa.gov/cyan/rest'),
('Source Code', '/github.com/quanted/qed_cyan')
])
)
])
#return render_to_string('hwbi/03ubertext_links_left_drupal.html', {
return render_to_string('03cyan_links_left_drupal.html', {
'LINK_DICT': link_dict,
'MODEL': model,
'PAGE': page
}) | [
"arignatius@gmail.com"
] | arignatius@gmail.com |
33fcb1655b493726efde46bf4eda9f6a56feed54 | 28f83582f02bfbec4e0ee32924913260093549ee | /fercam/fercam/asgi.py | 0ac3f057463aabe36256add27f5abe9bb2ad4b9f | [] | no_license | fabianomobono/transport | 9b8ce8457d1edc3bb1479e3efb4870ba64060403 | 302821b8423ba4ef5e60971a50521a0a31729cb1 | refs/heads/main | 2023-02-09T11:40:54.875356 | 2021-01-04T15:07:58 | 2021-01-04T15:07:58 | 314,293,625 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | import os
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
from django.core.asgi import get_asgi_application
import transport.routing
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fercam.settings")
application = ProtocolTypeRouter({
"http": get_asgi_application(),
"websocket": AuthMiddlewareStack(
URLRouter(
transport.routing.websocket_urlpatterns
)
),
})
| [
"fabian.omobono@gmail.com"
] | fabian.omobono@gmail.com |
b942e9a5078d289779b1ca2a0a4a0379847ad14e | ec5a786a883c9ea1837b9b615a5f374da69d21bf | /App.py | 8cd16bf8f39d3d65ec47c8b27d4b44cfb91b8130 | [
"Apache-2.0"
] | permissive | zhenleiji/ZPong | b5721240b0047e39014bec5e7c4cf514af599984 | 6a123239b9c92478b303301396064f9f22f536f9 | refs/heads/master | 2021-01-23T04:28:55.550005 | 2017-03-26T01:56:18 | 2017-03-26T02:09:05 | 86,199,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,850 | py | import pygame
from model.Ball import Ball
from model.BarRight import BarRight
from model.BarLeft import BarLeft
from model.BarTop import BarTop
from model.BarBottom import BarBottom
class App:
def __init__(self):
# Initialise pygame
self._running = True
self._display_surf = None
self.size_screen = self.weight, self.height = 1024, 764
self.clock = pygame.time.Clock()
self.background = 255, 255, 255
# Initialise objects
self.bars = [BarLeft(self.size_screen, 50), BarRight(self.size_screen, 50), BarTop(self.size_screen, 50),
BarBottom(self.size_screen, 50)]
self.ball = Ball(self.size_screen, [self.weight / 2, self.height / 2], [10, 10])
def on_init(self):
pygame.init()
self._display_surf = pygame.display.set_mode(self.size_screen, pygame.HWSURFACE | pygame.DOUBLEBUF)
self._running = True
def on_event(self, event):
for bar in self.bars:
bar.on_event(event)
if event.type == pygame.QUIT:
self._running = False
def on_cleanup(self):
pygame.quit()
def on_update(self):
for bar in self.bars:
bar.check_collision(self.ball)
bar.on_update()
self.ball.on_update()
def on_draw(self):
self._display_surf.fill(self.background)
for bar in self.bars:
bar.on_draw(self._display_surf)
self.ball.on_draw(self._display_surf)
pygame.display.flip()
def on_execute(self):
if self.on_init() == False:
self._running = False
while (self._running):
for event in pygame.event.get():
self.on_event(event)
self.on_update()
self.on_draw()
self.clock.tick(30)
self.on_cleanup()
| [
"zhenlei.ji@gmail.com"
] | zhenlei.ji@gmail.com |
cbc7fcc8575360d730c1b24ee4619b0094ce1e49 | dacb0592ae5140f38c11de700685d21eb170a794 | /facture/Client.py | 3a9e1048440e97abbf9f18ad7c922f874883bb21 | [] | no_license | carlosegomez/factureProject | 605f27c78b21698d19675079f9aaadecf3579925 | bfe3254182026f33f986c461c44104bce3f331e0 | refs/heads/master | 2021-07-25T07:12:58.157216 | 2020-04-16T13:44:02 | 2020-04-16T13:44:02 | 149,171,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | from dataclasses import dataclass
@dataclass
class Client:
name: str
last_name: str
phone: str
address: str
| [
"carlos.gomez-cardenas@ansys.com"
] | carlos.gomez-cardenas@ansys.com |
f1f06da392a28869848367fd1c0d1f4047137f9d | 839dc463e9fcbb06d7994995227420f0e107dde0 | /portfolio/portfolio/urls.py | 5b92022b61b448e82c9efd3640420ce376661bd5 | [] | no_license | jmcclena94/django-portfolio | f0508750eb3ba1e4fedfe1c36b90902861066461 | 1deb07035de371320fab2e953c48454f16be914d | refs/heads/master | 2020-12-31T04:42:40.296275 | 2017-04-15T19:02:06 | 2017-04-15T19:02:06 | 58,496,222 | 1 | 0 | null | 2017-04-15T19:02:06 | 2016-05-10T22:00:56 | Python | UTF-8 | Python | false | false | 534 | py | from django.conf.urls import url, include
from django.contrib import admin
from portfolio.views import HomepageView
from portfolio import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', HomepageView.as_view()),
url(r'^', include('projects.urls')),
url(r'^', include('blog.urls')),
url(r'^', include('subscribe.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| [
"jmcclena94@gmail.com"
] | jmcclena94@gmail.com |
cb1c16ee59fe20890a221136d81fcc1734dc8a2d | 940bdfb1d2014e0fdf8c1d138efb43935446864a | /ayush_crowdbotics_347/settings.py | 24c34f26d61c779f77db6396b510cd90b427c8e0 | [] | no_license | payush/ayush-crowdbotics-347 | f8568a28c0fd328161e9961d1f4ffc73ed1ff3de | 08b235df039628147296a723f18dc976317479db | refs/heads/master | 2020-03-23T19:49:01.171461 | 2018-07-23T11:14:46 | 2018-07-23T11:14:46 | 142,003,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,130 | py | """
Django settings for ayush_crowdbotics_347 project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'br8n%%zz9_*mw+%so6e=q21!m$82iugifwit)lyt@s^w207*4w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ayush_crowdbotics_347.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ayush_crowdbotics_347.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
import environ
env = environ.Env()
ALLOWED_HOSTS = ['*']
SITE_ID = 1
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
DATABASES = {
'default': env.db()
}
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
LOCAL_APPS = [
'home',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
# allauth
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = None
LOGIN_REDIRECT_URL = '/'
| [
"ayushpuroheet@gmail.com"
] | ayushpuroheet@gmail.com |
149a32f41cf34c3a51f8d317601177f0d4f27b59 | 067573d864754a7ce73014086cd6c9165e2b5ea0 | /scripts/pMSSMtree.cfg.py | a99460e708046205f3dac742f4ace7e7d0d8f716 | [] | no_license | UhhCmsAnalysis/Run2pMSSM | 3f586d8dcbaacd4de2ed908062fe9875b43fef4c | bb6c7c7309108b26ff1d8f2062f712d9b848555a | refs/heads/master | 2020-12-21T08:53:50.884254 | 2020-02-09T20:33:58 | 2020-02-09T20:33:58 | 236,379,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,020 | py | FILE = open("scripts/analyses.cfg.py")
exec(FILE)
FILE.close()
#################################
# parameters, masses, etc
#################################
treeCfg = [
[ "params",
{"files":"idata/parameters/params_batch*.txt","base":""}],
[ "moreparams",
{"files":"idata/moreparams/moreparams.txt","base":""}],
[ "fs",
{"files":"idata/fs/fs.txt","base":"fs"}],
[ "lilith",
{"files":"idata/moreparams/lilith.txt"}],
# [ "xsect13",
# {"files":"idata/xsect/xsect_13*txt","base":"","skip_ID":[],"skip_col":"pointName"}],
[ "xsect8",
{"files":"idata/xsect/xsect_8*txt","base":"","skip_ID":[2321,8344,6640],"skip_col":"pointName"}],
[ "xsect7",
{"files":"idata/xsect/xsect_7*txt","base":"","skip_ID":[2321,8344,6640]}],
]
datadir = "idata"
#################################
# likelihoods
#################################
def addLlhd2Cfg(anaList,ext=""):
for ana in anaList:
for sr in ana[1]:
base = ana[0]
base += sr
base += ext.replace(".","")
key = base + "_llhd"
files = datadir + "/" + ana[0] + "/llhd" + sr + ext + ".txt"
treeCfg.append([key,{"files":files,"base":base}])
addLlhd2Cfg(ana7)
addLlhd2Cfg(ana8)
addLlhd2Cfg(ana13)
addLlhd2Cfg(ana7n8n13)
#################################
# Z-values
#################################
def addZ2Cfg(anaList,ext=""):
for ana in anaList:
for sr in ana[1]:
base = ana[0]
base += sr
base += ext.replace(".","_")
key = base + "_Z"
files = datadir + "/" + ana[0] + "/Z" + sr + ext + ".txt"
treeCfg.append([key,{"files":files,"base":base}])
#addZ2Cfg(ana7)
#addZ2Cfg(ana8)
#addZ2Cfg(ana7n8)
addZ2Cfg(ana7z)
addZ2Cfg(ana8z)
addZ2Cfg(ana13z)
addZ2Cfg(ana7n8n13z)
addZ2Cfg(ana7n8n13lossyz)
################################
# print
################################
#for entry in treeCfg:
# print entry[0],entry[1]
| [
"samuel.bein@gmail.com"
] | samuel.bein@gmail.com |
95575754503668a4a962079a47a87cd7b7287f22 | 6d338d50ee2195f389d7682c1dde72a812087448 | /algorithm/2D/_mini_radius.py | bc1c91239942f4b8415510fefc4ef1da8e25d23f | [] | no_license | HaopengSun/Poisson-disk-sampling-based-algorithm | 28928949d877a1bda8dff93599e70713f194b41d | 7c926510a9da29c83249fb79a24e818a5b5fe47e | refs/heads/master | 2023-07-04T16:02:28.128156 | 2021-08-17T02:14:36 | 2021-08-17T02:14:36 | 364,426,836 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 212 | py | def particles_mini(maximum_radius, minimum_radius, Circles):
for circle in Circles:
if circle.r >= minimum_radius and circle.r <= maximum_radius:
if circle.r != minimum_radius:
return False
return True | [
"45277284+serendipity-harpoon@users.noreply.github.com"
] | 45277284+serendipity-harpoon@users.noreply.github.com |
6bacb134a528804dff45b812c5ea7e73e151f3ac | 0add7953d3e3ce2df9e8265102be39b758579753 | /built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/model_zoo/model_zoo.py | 5deb87cd07e7947c8ec193b4da018690b923ef91 | [
"Apache-2.0",
"MIT"
] | permissive | Huawei-Ascend/modelzoo | ae161c0b4e581f8b62c77251e9204d958c4cf6c4 | df51ed9c1d6dbde1deef63f2a037a369f8554406 | refs/heads/master | 2023-04-08T08:17:40.058206 | 2020-12-07T08:04:57 | 2020-12-07T08:04:57 | 319,219,518 | 1 | 1 | Apache-2.0 | 2023-03-24T22:22:00 | 2020-12-07T06:01:32 | Python | UTF-8 | Python | false | false | 3,377 | py | # -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Model zoo."""
import torch
import logging
import os
from vega.model_zoo.torch_vision_model import get_torchvision_model_file
from vega.search_space.networks import NetworkDesc, NetTypes
from vega.core.common import TaskOps
from vega.core.common.general import General
class ModelZoo(object):
"""Model zoo."""
@classmethod
def set_location(cls, location):
"""Set model zoo location.
:param location: model zoo location.
:type localtion: str.
"""
General.model_zoo.model_zoo_path = location
@classmethod
def get_model(cls, model_desc=None, model_checkpoint=None):
"""Get model from model zoo.
:param network_name: the name of network, eg. ResNetVariant.
:type network_name: str or None.
:param network_desc: the description of network.
:type network_desc: str or None.
:param model_checkpoint: path of model.
:type model_checkpoint: str.
:return: model.
:rtype: model.
"""
try:
network = NetworkDesc(model_desc)
model = network.to_model()
except Exception as e:
logging.error("Failed to get model, model_desc={}, msg={}".format(
model_desc, str(e)))
raise e
logging.info("Model was created.")
logging.debug("model_desc={}".format(model_desc))
if model_checkpoint is not None:
logging.info("Load model with weight.")
model = cls._load_pretrained_model(network, model, model_checkpoint)
logging.info("Model was loaded.")
return model
@classmethod
def _load_pretrained_model(cls, network, model, model_checkpoint):
if not model_checkpoint and network._model_type == NetTypes.TORCH_VISION_MODEL:
model_file_name = get_torchvision_model_file(network._model_name)
full_path = "{}/torchvision_models/checkpoints/{}".format(
TaskOps().model_zoo_path, model_file_name)
else:
full_path = model_checkpoint
logging.info("load model weights from file.")
logging.debug("Weights file: {}".format(full_path))
if not os.path.isfile(full_path):
raise "Pretrained model is not existed, model={}".format(full_path)
checkpoint = torch.load(full_path)
model.load_state_dict(checkpoint)
return model
@classmethod
def infer(cls, model, dataloader):
"""Infer the result."""
model.eval()
infer_result = []
with torch.no_grad():
model.cuda()
for _, input in enumerate(dataloader):
if isinstance(input, list):
input = input[0]
logits = model(input.cuda())
if isinstance(logits, tuple):
logits = logits[0]
infer_result.extend(logits)
return infer_result
| [
"1571856591@qq.com"
] | 1571856591@qq.com |
0343a12712af23f99051af1d1eb45efc8aa04b53 | 5dfa9dfb2d2d604f54de7020aed11642f03f1186 | /SLAC/dark_defects_offline/v0/validator_dark_defects_offline.py | 30eb49152670cb1873fc87d6cdb693baf4218fea | [
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | lsst-camera-dh/harnessed-jobs | 49a9a65f3368771ff7b7b22caa94fc8f384681f4 | 352f48b70633b0f0e3faf941198edf1de85f4989 | refs/heads/master | 2021-03-19T16:57:36.199351 | 2019-03-10T21:18:46 | 2019-03-10T21:18:46 | 34,645,042 | 0 | 1 | null | 2018-04-03T23:37:34 | 2015-04-27T03:59:33 | Python | UTF-8 | Python | false | false | 1,050 | py | #!/usr/bin/env python
import lsst.eotest.sensor as sensorTest
import lcatr.schema
import siteUtils
import eotestUtils
sensor_id = siteUtils.getUnitId()
mask_file = '%s_dark_pixel_mask.fits' % sensor_id
eotestUtils.addHeaderData(mask_file, LSST_NUM=sensor_id, TESTTYPE='SFLAT_500',
DATE=eotestUtils.utc_now_isoformat(),
CCD_MANU=siteUtils.getCcdVendor().upper())
results = [lcatr.schema.fileref.make(mask_file)]
eotest_results = '%s_eotest_results.fits' % sensor_id
data = sensorTest.EOTestResults(eotest_results)
amps = data['AMP']
npixels = data['NUM_DARK_PIXELS']
ncolumns = data['NUM_DARK_COLUMNS']
for amp, npix, ncol in zip(amps, npixels, ncolumns):
results.append(lcatr.schema.valid(lcatr.schema.get('dark_defects'),
amp=amp,
dark_pixels=npix,
dark_columns=ncol))
results.append(siteUtils.packageVersions())
lcatr.schema.write_file(results)
lcatr.schema.validate_file()
| [
"jchiang@slac.stanford.edu"
] | jchiang@slac.stanford.edu |
46747fbc3c33b336048baf27aad12d4a044b8473 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /yfooETHj3sHoHTJsv_11.py | 3b80e0b8989222b1ece889e3f7396b901396c028 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | """
Create a function that returns `True` when `num1` is equal to `num2`;
otherwise return `False`.
### Examples
is_same_num(4, 8) ➞ False
is_same_num(2, 2) ➞ True
is_same_num(2, "2") ➞ False
### Notes
Don't forget to `return` the result.
"""
def is_same_num(num1, num2):
if num1==num2:
return True
else: return False
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
394530c218df75cb6d4205db5a36ae592e904fac | 768caee28ad4eddc7dd0b70b8f293854b25cf25b | /source/examples/mini-tutoriales/mipaquetepython/mipaquetepython/app.py | a3779dc5aadba97063a4ef9233bb278dbccbfc24 | [] | no_license | macagua/collective.spanishdocumentation | bb904cfaaf2e8151d4af18362f379f03a4351715 | a5638b87329dfe54746179bbf1848dad43d62012 | refs/heads/master | 2021-06-06T04:24:26.467552 | 2012-11-28T14:59:26 | 2012-11-28T14:59:26 | 2,941,330 | 2 | 2 | null | 2018-11-07T17:34:54 | 2011-12-08T16:27:56 | Python | UTF-8 | Python | false | false | 76 | py | var = raw_input("Introduzca alguna frase: ")
print "Usted introdujo: ", var
| [
"leonardocaballero@gmail.com"
] | leonardocaballero@gmail.com |
644841a27ffee6c9768e10bb6b4a3bba985e8167 | 7c01e9df3cd2375b756af2f7f25d5bcd24d65d17 | /save-msg | 56831f6c978d5043418cad7b36299d117a9d33b0 | [] | no_license | Seefooo/manage-commit-msg | 809427568599602b25d8782ee9400fe23982cfd7 | 87885d7706b3d0ef1be66752528c44f0614b474d | refs/heads/master | 2022-09-10T20:58:47.511164 | 2020-02-27T20:03:42 | 2020-02-27T20:03:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | #!/usr/bin/env python3
import shutil
import sys
def main():
lines = []
with open(sys.argv[1]) as f:
for line in f:
if line.startswith("#"):
continue
if not line.strip():
continue
lines.append(f"# {line}")
if lines:
lines.insert(0,"\n") # Add a new line at the beginning of the commit message
lines.insert(1,"# Previous Commit Message:\n")
if len(lines) > 3:
lines.insert(3,"\n") # add a new line after the title
with open('.git/pre-commit-saved-commit-msg','w') as f:
f.writelines(lines)
if __name__ == '__main__':
exit(main())
| [
"ygamal@sciencelogic.com"
] | ygamal@sciencelogic.com | |
e873276993669b26ecf9589aadea1be08a10112f | bc59bc0c84e4c9ecda50c94c5bb36adc31400bf4 | /mlflowx_test.py | 17c194fec785fc740840526b6d324399bb57a26d | [] | no_license | dum3ng/pipeline-test | 6e352cba5de513a7d497c9314b6e04aaae2684f7 | 3928f0f2828853c31c7b3f9362d2289353490791 | refs/heads/master | 2020-08-22T05:10:21.999403 | 2019-10-25T11:37:08 | 2019-10-25T11:37:08 | 216,324,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 71 | py | from mlflowx.pipeline import run_pipeline
run_pipeline('.', 'default') | [
"du.mang@malgo.cn"
] | du.mang@malgo.cn |
f7419fb394f5041e57a940c0fad0976acd25ff21 | 2f5d75911722e62c174f80a4d66030a8e0e393fd | /P.4/sensor.py | e2d64b0456dc21fea4b35d32f384bd6f417e8f5a | [] | no_license | AlbertoIbanez/LSE-2015-Alberto | be331e7df94d55fef738d7a0e0b75f95e38ab4e9 | 5c21c547e33733c91be8607aed07199dcb1eb1bd | refs/heads/master | 2016-08-05T05:26:49.613559 | 2015-04-22T13:17:53 | 2015-04-22T13:17:53 | 31,070,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,774 | py | import random
import socket
import time
import sys
class sensor:
def __init__(self, sensorName, centralNodeIp, sleepTime, placement):
self.sensorName = sensorName
self.centralNodeIp = centralNodeIp
self.sleepTime = sleepTime
self.temperature = 0.0
self.luminosity = 0.0
self.noise = 0.0
self.placement = placement
self.socketInstance = socket.socket()
def readTemperature(self):
#Temperature sensing simulation. Generate a temperature between 15.0 C to 45.0 C
self.temperature = round(15.0 + random.random() * 30,1)
def readLuminosity(self):
#Luminosity sensing simulation.
self.luminosity = round(0.0 + random.random() * 20,1)
def readNoise(self):
#Luminosity sensing simulation.Generate a noise between 20.0 dB to 110.0 dB
self.noise = round(20.0 + random.random() * 90,1)
def readData(self):
self.readTemperature()
self.readLuminosity()
self.readNoise()
def sendData(self):
self.socketInstance = socket.socket()
self.socketInstance.connect((self.centralNodeIp, 9999))
#The message is in plain text (as a string). The way of building this message is "our protocol"
message ="SensorName," + str(self.sensorName) + \
",SensorTemperature," + str(self.temperature) + \
",SensorLuminosity," + str(self.luminosity) + \
",SensorNoise," + str(self.noise) + \
",SensorType,"
self.socketInstance.send(message)
self.socketInstance.close()
def sleepSensor(self):
time.sleep(self.sleepTime)
def run(self):
while True:
self.readData()
self.sendData()
self.sleepSensor()
if(len(sys.argv) == 5):
mySensor = sensor(sys.argv[1],str(sys.argv[2]),int(sys.argv[3]),str(sys.argv[4])) #sensorName, centralNodeIp, sleepTime, placement
mySensor.run()
else:
print "Invalid number of arguments"
| [
"alberto.ibanezflamarique@gmail.com"
] | alberto.ibanezflamarique@gmail.com |
42e8aa3fbab03891fdbdecdec7e5c1f68e211ce4 | 541209b715276738ff047f5b8de37f3e1a0b6a39 | /Doubler.py | 4d78d86d876071e33e4289efdd0d9434ec189bc7 | [] | no_license | binonguyen1090/PythonExercises | bef8914c93d88bcfe421edd4549749b34d7e00f2 | bb0056b20ba66fea700e7300e90625e87ef1d590 | refs/heads/master | 2020-08-29T18:16:01.873898 | 2019-11-26T04:16:49 | 2019-11-26T04:16:49 | 218,124,846 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | # Write a method doubler(numbers) that takes an array of numbers and returns a new array
# where every element of the original array is multiplied by 2.
#
# print doubler([1, 2, 3, 4]) # => [2, 4, 6, 8]
# puts
# print doubler([7, 1, 8]) # => [14, 2, 16]
def doubler(arr):
new = []
for i in range(0,len(arr)):
new.append(arr[i]*2)
return new
print (doubler([1, 2, 3, 4])) # => [2, 4, 6, 8]
print (doubler([7, 1, 8])) # => [14, 2, 16]
| [
"binonguyen1090@gmail.com"
] | binonguyen1090@gmail.com |
234660705b99c018421e1a94c25dbe952e2900e5 | dbd85f5f672133244c53e177debe81f3dd845d5b | /week01/task2/mytest/middlewares.py | 005532369dd4f7fded1cfb0f951f155de99a8cd0 | [] | no_license | amelaxie/Python001-class01 | 85ac8f5f954798eadb03ade84fe07a342e84527a | 7a934e3078bc8685101516c8e77f21980dc95d30 | refs/heads/master | 2022-12-09T04:19:59.345189 | 2020-09-06T14:29:44 | 2020-09-06T14:29:44 | 273,846,504 | 0 | 0 | null | 2020-06-21T06:16:08 | 2020-06-21T06:16:08 | null | UTF-8 | Python | false | false | 3,581 | py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class MytestSpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class MytestDownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"remotedesk@qq.com"
] | remotedesk@qq.com |
c0aab4d5da741f5199ba82dee2e82895cffe5344 | 3ff80a99a95b9c0e13d9452eaa37419335e846ed | /VgammaTuplizer_normalfixed/Ntuplizer/crabconfigs/crab_MC2017WJetsToQQ_HT600to800.py | eab1e046df130cae462f4e360f3d48f9652c9468 | [] | no_license | XL-Seb-Yan/WGammaProducer | 9a58a109c6a8429ce29c9d213864d32f8c33ff50 | 7d1edb4707b5eb60f9d5fe475b8ae347879372e6 | refs/heads/master | 2022-01-15T20:00:59.992103 | 2019-07-03T08:10:55 | 2019-07-03T08:10:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 961 | py | from CRABClient.UserUtilities import config, getUsernameFromSiteDB
config = config()
config.General.requestName = 'Wgamma2017_WJetsToQQ_HT600to800_%s'%"Jul18"
config.General.workArea = 'crab_jobs_WgammaMC_2017_%s'%"Jul18"
config.General.transferOutputs = True
config.General.transferLogs = True
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'VgammaTuplizer/Ntuplizer/config_generic.py'
config.JobType.inputFiles=[]
config.JobType.sendExternalFolder = True
config.Data.inputDataset = '/WJetsToQQ_HT600to800_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM'
config.Data.inputDBS = 'global'
config.Data.splitting = 'LumiBased'
config.Data.unitsPerJob = 30
config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
config.Data.publication = False
config.Data.outputDatasetTag = 'Wgamma2017_WJetsToQQ_HT600to800_%s'%"Jul18"
config.Site.storageSite = 'T3_US_FNALLPC'
| [
"xlyan0636@gmail.com"
] | xlyan0636@gmail.com |
be8933396f92ba4e0bbc0f721914a0ef71410726 | 20cf2cb73adfed63cf182fc12a09aa3aadc033c6 | /filter.py | ba1f301e7c35530bd36538e7e6db9a0ebf49052c | [] | no_license | arunkumar27-ank-tech/Python-Programs | 678ae558e8c141a6302e2705849c97258974c4eb | a56788057d1bf8848681e38eb569874d84db7337 | refs/heads/master | 2023-06-16T14:50:36.146381 | 2021-07-15T13:57:54 | 2021-07-15T13:57:54 | 386,305,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py | from functools import reduce
lst = [1, 2, 3, 4, 5, 6, 7, 8, 9]
evens = list(filter(lambda n: n % 2 == 0, lst))
doubles = list(map(lambda n: n+2, evens))
sum1 = reduce(lambda a, b: a+b, doubles)
print(evens)
print(doubles)
print(sum1)
| [
"arunkumar834428@gmail.com"
] | arunkumar834428@gmail.com |
f84e1c6f02ef74ae1af2ad57059953141012d950 | 957ffc377b411bab574739f0a0fcfa911bc7e877 | /mnist.py | 0536d2fb27d53b759ea536177c56249e580f7a5f | [] | no_license | wanpeng16/Wasserstein-PCA | bfe0369f70fd964bc21bfd002c772e65e89a41c8 | d627a8a1ea32c1f8208425d451db213435d49d0f | refs/heads/master | 2020-04-07T22:23:21.095728 | 2018-06-19T08:43:12 | 2018-06-19T08:43:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,075 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for downloading and reading MNIST data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import numpy as np
import scipy.io as sio
import scipy
import math
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.python.framework import dtypes
import os.path
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
def read_MNIST_dataset(size=1000, one_hot=False, dtype=dtypes.float64, reshape=True):
TRAIN_IMAGES = os.path.dirname(__file__) + '/datasets/train-images-idx3-ubyte'
TRAIN_LABELS = os.path.dirname(__file__) + '/datasets/train-labels-idx1-ubyte'
# TEST_IMAGES = os.path.dirname(__file__) + '/../datasets/MNIST/t10k-images-idx3-ubyte'
# TEST_LABELS = os.path.dirname(__file__) + '/../datasets/MNIST/t10k-labels-idx1-ubyte'
with open(TRAIN_IMAGES, 'rb') as f:
train_images = extract_images(f)
with open(TRAIN_LABELS, 'rb') as f:
train_labels = extract_labels(f, one_hot=one_hot)
# with open(TEST_IMAGES, 'rb') as f:
# test_images = extract_images(f)
#
# with open(TEST_LABELS, 'rb') as f:
# test_labels = extract_labels(f, one_hot=one_hot)
# print('mnist train labels:')
# print(train_labels)
#
# N1 = sizes[0]
# N2 = sizes[0]+sizes[1]
# N3 = sizes[0]+sizes[1]+sizes[2]
#
# print('N1, N2, N3')
# print(N1)
# print(N2)
# print(N3)
train_images_tmp = train_images[:size]
train_labels_tmp = train_labels[:size]
# validation_images = train_images[N1:N2]
# validation_labels = train_labels[N1:N2]
# test_images = test_images[:sizes[2]]
# test_labels = test_labels[:sizes[2]]
return train_images_tmp, train_labels_tmp
def read_USPS_dataset(sizes=[1000, 100, 100], one_hot=False, dtype=dtypes.float32, Lresize=None):
IMAGES = 'datasets/USPS/usps.mat'
usps_dataset = sio.loadmat(IMAGES)
usps_images = usps_dataset['xtest']
usps_labels = usps_dataset['ytest']
usps_labels = usps_labels - 1
if Lresize:
print('resizing usps images (L=%d)' % Lresize)
usps_images_resized = np.zeros((usps_images.shape[0], Lresize * Lresize))
usps_images_rs = np.reshape(usps_images, (usps_images.shape[0], 16, 16))
for i in xrange(usps_images.shape[0]):
image_resized = scipy.misc.imresize(usps_images_rs[i, :, :], size=(Lresize, Lresize), interp='bilinear',
mode=None)
usps_images_resized[i, :] = np.reshape(image_resized, (Lresize * Lresize))
usps_images = usps_images_resized
print('usps image shape:')
print(usps_images.shape)
# print(np.reshape(usps_images[100,:], (Lresize, Lresize)))
# plt.imshow(np.reshape(usps_images[100,:], (Lresize, Lresize)))
# plt.show()
print('usps labels:')
print(usps_labels)
N1 = sizes[0]
N2 = sizes[0] + sizes[1]
N3 = sizes[0] + sizes[1] + sizes[2]
train_images = usps_images[:N1]
train_labels = usps_labels[:N1]
validation_images = usps_images[N1:N2]
validation_labels = usps_labels[N1:N2]
test_images = usps_images[N2:N3]
test_labels = usps_labels[N2:N3]
reshape = False
train = GrayscaleImageDataSet(train_images, train_labels, dtype=dtype, reshape=reshape)
validation = GrayscaleImageDataSet(validation_images, validation_labels, dtype=dtype, reshape=reshape)
test = GrayscaleImageDataSet(test_images, test_labels, dtype=dtype, reshape=reshape)
return train, validation, test
class GrayscaleImageDataSet(object):
def __init__(self, images, labels, L=28, one_hot=False, dtype=dtypes.float64, reshape=True, whitening=False):
"""Construct a DataSet.
one_hot arg is used only if fake_data is true. `dtype` can be either
`uint8` to leave the input as `[0, 255]`, or `float32` to rescale into
`[0, 1]`.
"""
self.L = L
self.d = 2
x = np.arange(0, self.L, 1).astype(dtype=np.float64)
y = np.arange(0, self.L, 1).astype(dtype=np.float64)
xv, yv = np.meshgrid(x, y)
self.vectors = np.vstack((xv.ravel(), yv.ravel()))
dtype = dtypes.as_dtype(dtype).base_dtype
if dtype not in (dtypes.uint8, dtypes.float32, dtypes.float64):
raise TypeError('Invalid image dtype %r, expected uint8 or float32 or float64' % dtype)
assert images.shape[0] == labels.shape[0], ('images.shape: %s labels.shape: %s' % (images.shape, labels.shape))
self.num_examples = images.shape[0]
print('self.num_examples = %d' % self.num_examples)
# Convert shape from [num examples, rows, columns, depth]
# to [num examples, rows*columns] (assuming depth == 1)
if reshape:
assert images.shape[3] == 1
images = images.reshape(images.shape[0], images.shape[1] * images.shape[2])
# if dtype == dtypes.float64:
# Convert from [0, 255] -> [0.0, 1.0].
# images = images.astype(np.float64)
# images = np.multiply(images, 1.0 / 255.0)
self.images = images
self.labels = labels
self.epochs_completed = 0
self.index_in_epoch = 0
def indices_for_label(self, label):
return np.where(self.labels == label)[0]
def features_for_label(self, label):
I = self.indices_for_label(label)
return self.images[I, :]
def next_classif_batch(self, batch_size):
batch_start = self.index_in_epoch
if batch_start + batch_size > self.num_examples:
batch_start = 0
if batch_start == 0:
# print('shuffle')
perm0 = np.arange(self.num_examples)
np.random.shuffle(perm0)
self.images = self.images[perm0]
self.labels = self.labels[perm0]
images_batch = self.images[batch_start:(batch_start + batch_size), :]
labels_batch = self.labels[batch_start:(batch_start + batch_size)]
self.index_in_epoch = batch_start + batch_size
return images_batch, labels_batch
def _read32(bytestream):
dt = np.dtype(np.uint32).newbyteorder('>')
return np.frombuffer(bytestream.read(4), dtype=dt)[0]
def extract_images(f):
"""Extract the images into a 4D uint8 np array [index, y, x, depth].
Args:
f: A file object that can be passed into a gzip reader.
Returns:
data: A 4D uint8 np array [index, y, x, depth].
Raises:
ValueError: If the bytestream does not start with 2051.
"""
print('Extracting', f.name)
with f as bytestream:
magic = _read32(bytestream)
if magic != 2051:
raise ValueError('Invalid magic number %d in MNIST image file: %s' %
(magic, f.name))
num_images = _read32(bytestream)
rows = _read32(bytestream)
cols = _read32(bytestream)
buf = bytestream.read(rows * cols * num_images)
data = np.frombuffer(buf, dtype=np.uint8)
data = data.reshape(num_images, rows, cols, 1)
return data
def dense_to_one_hot(labels_dense, num_classes):
"""Convert class labels from scalars to one-hot vectors."""
num_labels = labels_dense.shape[0]
index_offset = np.arange(num_labels) * num_classes
labels_one_hot = np.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
return labels_one_hot
def extract_labels(f, one_hot=False, num_classes=10):
"""Extract the labels into a 1D uint8 np array [index].
Args:
f: A file object that can be passed into a gzip reader.
one_hot: Does one hot encoding for the result.
num_classes: Number of classes for the one hot encoding.
Returns:
labels: a 1D uint8 np array.
Raises:
ValueError: If the bystream doesn't start with 2049.
"""
print('Extracting', f.name)
with f as bytestream:
magic = _read32(bytestream)
if magic != 2049:
raise ValueError('Invalid magic number %d in MNIST label file: %s' %
(magic, f.name))
num_items = _read32(bytestream)
buf = bytestream.read(num_items)
labels = np.frombuffer(buf, dtype=np.uint8)
if one_hot:
return dense_to_one_hot(labels, num_classes)
return labels
| [
"vivienseguy@gmail.com"
] | vivienseguy@gmail.com |
c433d7fe29d312b80fbac7fc3888a4c7c7dd2223 | 39c861da8f362874baac3f7e4aab089b18125dab | /ghostwriter/modules/exceptions.py | be1d30a317dbe958adf73cba0a39823fd06cbd43 | [
"BSD-3-Clause"
] | permissive | chrismaddalena/Ghostwriter | 47cdc2111695e19335430326cdf4f880b728be22 | f197be35497ae97c6b90ba17a820ec04e4254c53 | refs/heads/master | 2022-07-09T02:14:12.382165 | 2022-06-07T23:19:15 | 2022-06-07T23:19:15 | 202,816,974 | 3 | 0 | BSD-3-Clause | 2022-03-09T21:07:37 | 2019-08-17T00:37:18 | Python | UTF-8 | Python | false | false | 761 | py | """This contains all of the custom exceptions for the Ghostwriter application."""
class MissingTemplate(Exception):
"""
Exception raised when a report template is missing for a report.
**Attributes**
``message``
Error message to be displayed
"""
def __init__(self, message="No report template selected"):
self.message = message
super().__init__(self.message)
class InvalidFilterValue(Exception):
"""
Exception raised when an invalid value is passed to a report template filter.
**Attributes**
``message``
Error message to be displayed
"""
def __init__(self, message="Invalid value provided to filter"):
self.message = message
super().__init__(self.message)
| [
"chris.maddalena@protonmail.com"
] | chris.maddalena@protonmail.com |
306f6c9833017af5befb5b3012a0ced97bc1dbe8 | daa963f18a7ea5bd963c48cebf0aaa4677ff6e72 | /api_project/settings.py | f6f8915b037a23479d19e379c1c8ad40925b44e4 | [] | no_license | sedefbostanci/garden_at_home | 72ad66032e762545e790bb3954f0eae59b91042c | 913cdcb9c778652981605d498b886de7f6c8365e | refs/heads/master | 2022-12-16T08:38:36.371858 | 2020-09-22T15:40:23 | 2020-09-22T15:40:23 | 288,391,495 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,743 | py | """
Django settings for api_project project.
Generated by 'django-admin startproject' using Django 3.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3hlyj=8#5cfk2-2z2g7p&gsz7yobxcca-v#74p=70ms75xk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["192.168.88.17", "127.0.0.1","sedefbostanci.pythonanywhere.com"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'users_new',
'device',
'userDevices',
'devicePlants',
'deviceSlots',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'api_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'api_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication',
],
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
}
THIRD_PARTY_APPS = ['rest_framework','rest_framework.authtoken'] # this list shall contain many more of those useful apps and stuff.
INSTALLED_APPS += THIRD_PARTY_APPS # Boom.. the things goes skraa.. pop.. pop..
AUTH_USER_MODEL = 'users_new.CustomUser'
| [
"sedefbostanci.SB@gmail.com"
] | sedefbostanci.SB@gmail.com |
7810863a36f0e2655681cc40c069bec877371a85 | 2e042bc67b296c0e65441caaae08c1ada1a29d5e | /0x0A-python-inheritance/6-base_geometry.py | 39283225927fb79e9f0fbc96de52dcbbfd50b1b8 | [] | no_license | nhlshstr/holbertonschool-higher_level_programming | 2329a68bf86c070df0ac1048e7c090446de5e4fc | 68f1ea581207800d76fd9f6932209d787dd80fdb | refs/heads/master | 2020-12-22T05:28:30.756229 | 2020-05-15T02:28:08 | 2020-05-15T02:28:08 | 226,978,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | #!/usr/bin/python3
""" Class with method area()"""
class BaseGeometry:
""" Class BaseGeometry """
def area(self):
"""
Calculates area
"""
return Exception("area() is not implemented")
| [
"nhlshstr@gmail.com"
] | nhlshstr@gmail.com |
c1ecba608b38e7e151190d9428b136119b3a8902 | 3b9b4049a8e7d38b49e07bb752780b2f1d792851 | /src/third_party/skia/gyp/icu.gyp | 4a985032c26d61b2145ef092b2b838626d4a11de | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-public-domain"
] | permissive | webosce/chromium53 | f8e745e91363586aee9620c609aacf15b3261540 | 9171447efcf0bb393d41d1dc877c7c13c46d8e38 | refs/heads/webosce | 2020-03-26T23:08:14.416858 | 2018-08-23T08:35:17 | 2018-09-20T14:25:18 | 145,513,343 | 0 | 2 | Apache-2.0 | 2019-08-21T22:44:55 | 2018-08-21T05:52:31 | null | UTF-8 | Python | false | false | 3,713 | gyp | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'common_variables.gypi',
],
'variables': {
'component%': 'static_library',
'icu_directory': '../third_party/externals/icu'
},
'targets': [
{
'target_name': 'icuuc',
'type': '<(component)',
'sources': [
'<!@(python find.py ../third_party/externals/icu/source/common "*.c*")'
],
'defines': [
'U_COMMON_IMPLEMENTATION',
'U_HIDE_DATA_SYMBOL',
'U_USING_ICU_NAMESPACE=0',
'HAVE_DLOPEN=0',
'UCONFIG_NO_NON_HTML5_CONVERSION=1',
],
'include_dirs': [ '<(icu_directory)/source/common', ],
'direct_dependent_settings': {
'defines': [
'U_USING_ICU_NAMESPACE=0',
'U_ENABLE_DYLOAD=0',
],
'include_dirs': [ '<(icu_directory)/source/common', ],
'conditions': [
[
'component=="static_library"', {
'defines': [
'U_STATIC_IMPLEMENTATION',
],
}
],
],
},
'cflags': [ '-w' ],
'cflags_cc': [ '-frtti', ],
'conditions': [
[
'component=="static_library"', {
'defines': [ 'U_STATIC_IMPLEMENTATION', ],
}
],
[
'OS == "win"', {
'sources': [
'<(icu_directory)/source/stubdata/stubdata.c',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': [ '<(icu_directory)/windows/icudt.dll', ],
},
],
'msvs_disabled_warnings': [4005, 4068, 4244, 4355, 4996, 4267],
'msvs_settings': {
'VCCLCompilerTool': {
'AdditionalOptions': [ '/EHsc', ],
},
},
'configurations': {
'Debug': {
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeTypeInfo': 'true', # /GR
},
},
},
'Release': {
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeTypeInfo': 'true', # /GR
},
},
},
},
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'advapi32.lib',
],
},
},
},
}
],
[
'OS == "win" and skia_clang_build', {
'msvs_settings': {
'VCCLCompilerTool': {
'AdditionalOptions': [
# See http://bugs.icu-project.org/trac/ticket/11122
'-Wno-inline-new-delete',
'-Wno-implicit-exception-spec-mismatch',
],
},
},
}
],
[
'skia_os == "android"', {
'sources': [ '<(icu_directory)/android/icudtl_dat.S', ],
}
],
[
'skia_os == "linux"', {
'sources': [ '<(icu_directory)/linux/icudtl_dat.S', ],
}
],
[
'skia_os == "mac"', {
'sources': [ '<(icu_directory)/mac/icudtl_dat.S', ],
'xcode_settings': {
'GCC_ENABLE_CPP_RTTI': 'YES', # -frtti
'WARNING_CFLAGS': [ '-w' ],
},
}
],
], # conditions
},
], # targets
}
| [
"changhyeok.bae@lge.com"
] | changhyeok.bae@lge.com |
5bd17d058acc2ae9b0e419a939ef83d286a002a5 | c752eb3a6cbf6a3d60e602fade7cd573cedee7a6 | /getSheetsStatus.py | bd530dfbc05bcde6732b13d355c475b614185fe6 | [
"MIT"
] | permissive | mkortekaas/RainBypass | 32f672462a7b6a23851b9174452a823880e00e1c | b00e783a1004288b7ca9a51f0dcbeb4da156fade | refs/heads/master | 2021-01-18T22:59:52.382970 | 2019-08-12T15:31:51 | 2019-08-12T15:31:51 | 11,034,938 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,035 | py | #!/usr/bin/env python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START sheets_quickstart]
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
# The ID and range of a sample spreadsheet.
SAMPLE_SPREADSHEET_ID = '13u6jXHs5JQTGIUQP-SeFxQo7_cb5y7f-RGzRtKa8NXo'
SAMPLE_RANGE_NAME = 'Sheet1!A1:A1'
PICKLE_FILE = '/home/pi/token.pickle'
CRED_FILE = '/home/pi/credentials.json'
def main():
"""Shows basic usage of the Sheets API.
Prints values from a sample spreadsheet.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(PICKLE_FILE):
with open(PICKLE_FILE, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
CRED_FILE, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('sheets', 'v4', credentials=creds)
# Call the Sheets API
sheet = service.spreadsheets()
result = sheet.values().get(spreadsheetId=SAMPLE_SPREADSHEET_ID,
range=SAMPLE_RANGE_NAME).execute()
values = result.get('values', [])
if not values:
print('No data found.')
else:
for row in values:
status = row[0]
if status == "Enabled":
print( "turning on sprinkler")
os.system('/home/pi/git/RainBypass/setSprinkler.pl -e')
elif status == "Disabled":
print( "turn off")
os.system('/home/pi/git/RainBypass/setSprinkler.pl -d')
else:
print( "something different...")
# print('%s' % (row[0]))
if __name__ == '__main__':
main()
# [END sheets_quickstart]
| [
"mark@kortekaas.com"
] | mark@kortekaas.com |
2b5ef836da37b1cb9ee9a6a289de15d8f0aed26a | 8a4cc536a1a9704f5cd095ea3a89dc6613592d67 | /python/57.py | 2da2c5fffc0171c8c83574aebb833a3dc4c1e6e4 | [] | no_license | Edwin-DEV-py/trabajo-30-abril | 07cb8067be0745672f7d8d78c5c092ec34578da8 | 4dfbb6cae93005ed5c7d2d5379378c409f3eca27 | refs/heads/master | 2023-05-25T22:59:52.623818 | 2021-03-29T01:36:39 | 2021-03-29T01:36:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | #ejercicio 57
cadena = str(input("escriba="))
print((cadena)[::-1]) | [
"nicolas.tarazona.morales@gmail.com"
] | nicolas.tarazona.morales@gmail.com |
dcedde4b907c8e0439a37501b304f4fd5677bd95 | c131e2b20b44040d388b40835dee43ba47d5b367 | /Python/KpiToolBox/mysql/select.py | 32446ae2cad23f02029fbd2522e8ac71c6386d87 | [] | no_license | kwanghyun/Showcases | 9b37b94626e1e98390dc9798283bcfd66094ca43 | 778d65b1313919f0034e3612c3f53fc0b3f0b001 | refs/heads/master | 2020-05-21T23:34:03.876453 | 2017-01-26T18:19:51 | 2017-01-26T18:19:51 | 19,902,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 811 | py | #!/usr/bin/python
import MySQLdb
# Open database connection
db = MySQLdb.connect("localhost","root","root","test" )
# prepare a cursor object using cursor() method
cursor = db.cursor()
# Prepare SQL query to INSERT a record into the database.
sql = "SELECT * FROM EMPLOYEE \
WHERE INCOME > '%d'" % (1000)
try:
# Execute the SQL command
cursor.execute(sql)
# Fetch all the rows in a list of lists.
results = cursor.fetchall()
for row in results:
fname = row[0]
lname = row[1]
age = row[2]
sex = row[3]
income = row[4]
# Now print fetched result
print "fname=%s,lname=%s,age=%d,sex=%s,income=%d" % \
(fname, lname, age, sex, income )
except:
print "Error: unable to fecth data"
# disconnect from server
db.close() | [
"kwjang@KWJANG-WS01.cisco.com"
] | kwjang@KWJANG-WS01.cisco.com |
85ff0e08501d35318743b11b853830f6643c962f | f0a715bab7c810d2728cd8ab0c4b673316d7f0ae | /venv/bin/rst2html4.py | 132cd58b76ffa9684bbd03db03f0234bf8a54a33 | [] | no_license | bopopescu/topic-modeling-with-LDA | 7eab17db4a4dae8b25b05bc2a1c9e2f0b8b7d323 | 89d272ab97bdc4024714dbf344ef8c1620a55d9e | refs/heads/master | 2022-11-22T05:18:55.955376 | 2018-09-29T11:10:07 | 2018-09-29T11:10:07 | 282,549,848 | 0 | 0 | null | 2020-07-26T00:48:46 | 2020-07-26T00:48:45 | null | UTF-8 | Python | false | false | 763 | py | #!/home/kasumi/PycharmProjects/TopicModelingWithLDA/venv/bin/python
# $Id: rst2html4.py 7994 2016-12-10 17:41:45Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing (X)HTML.
The output conforms to XHTML 1.0 transitional
and almost to HTML 4.01 transitional (except for closing empty tags).
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
publish_cmdline(writer_name='html4', description=description)
| [
"kasumisanchika@gmail.com"
] | kasumisanchika@gmail.com |
1e1d5ccfdb2caa614c32a09ee07729393624758c | 4c672231bd8b7c23bd5773ef990404cc3146712a | /shipmaster/server/celery.py | 8e24f72855c7e156d14e3e37290140aeabcf16b0 | [
"BSD-3-Clause"
] | permissive | AzureCloudMonk/shipmaster | b0e82f93308ecc829e6f6b3cb3156f11dcfbadd4 | cf596be7ea689c26c4bf47acb67dfd15169d3c46 | refs/heads/master | 2020-11-30T01:51:32.010852 | 2018-03-03T21:47:17 | 2018-03-03T21:47:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'shipmaster.server.settings')
from celery import Celery
from django.conf import settings
app = Celery('shipmaster.server')
app.config_from_object(settings)
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
| [
"lex@damoti.com"
] | lex@damoti.com |
af515b669ae560887ad5dc37d62f08810616b261 | ee0cd5c10058a0fc2e29b27139b89706755ca53e | /html/shell/firefox.sh | 18f27d1faa6545cd843bcc49ea8e1b4821958b20 | [] | no_license | meetann/finalcloudproject | 44ac1e36b27cedfc43f6f24035f8477f876709c9 | d8bce6f4fe18d4155900caf0f63eae737ae25309 | refs/heads/master | 2020-06-16T10:07:53.544074 | 2019-07-06T11:57:10 | 2019-07-06T11:57:10 | 195,534,054 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | sh | #!/usr/bin/python
import os
os.system('sshpass -p t ssh -X -o StrictHostKeyChecking=no root@192.168.1.5 firefox')
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
12cb92ef2e5e7234c3018be90e228a46961390dc | 3f3187918934bf08741ee398c82016142bdc8e34 | /test_es.py | 5984794f914a264571bb028a10e1cab0df6d045d | [] | no_license | satish107/reference | 381f9bce44eab622aefb28665a293505299b4ca7 | 7d9cb9c733be23ecb62cfd1afc25b9f4921b4e70 | refs/heads/master | 2023-04-13T07:07:21.557738 | 2023-01-06T07:16:40 | 2023-01-06T07:16:40 | 284,040,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10 | py | test_es.py | [
"satish.kumar@careers360.com"
] | satish.kumar@careers360.com |
c4bfbdba8ddbc1deef38d0aae1718ac01b3d20a6 | c7b622d1ee634ac02817546f0a250b4668f19f57 | /con2xyz.py | dba257c0f24cbdbd271046ba84cceebe88507591 | [] | no_license | erdeq-upenn/vasp_tools | 1f7003a8b7d4a962d9e14ca6b5328b215e18cda6 | f62c64bde01305dabbfdbf23cefb3f5f03fe29e3 | refs/heads/master | 2020-04-24T01:25:14.704311 | 2019-02-20T04:15:13 | 2019-02-20T04:15:13 | 171,597,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,830 | py | # only works for cubic structure
import sys
class Atom:
def __init__(self, symbol, xyz):
self.symbol = symbol
self.x = xyz[0]
self.y = xyz[1]
self.z = xyz[2]
def __str__(self):
line = ('%s\t%16.12f\t%16.12f\t%16.12f') \
% (self.symbol, self.x, self.y, self.z)
return line
ATOMS = ["Li", "Mn", "O"]
NATOMS = [0, 0, 0]
f = open(sys.argv[1])
lines = f.readlines()
f.close()
latt_scale = float(lines[1])
latt_vec = [[0,0,0], [0,0,0],[0,0,0]]
n = 0
for nline in range(2,5):
line = lines[nline]
sline = line.split()
for i in range(3):
latt_vec[n][i] = float(sline[i])
n = n + 1
line = lines[5]
sline = line.split()
for i in range(len(sline)):
NATOMS[i] = int(sline[i])
nline = 7
ucell = []
for na in range(len(NATOMS)):
for ia in range(NATOMS[na]):
line = lines[nline]
sline = line.split()
xyz = [0,0,0]
for i in range(3):
xyz[i] = float(sline[i])
ucell.append(Atom(ATOMS[na],xyz))
nline = nline + 1
#make supercell
scell = []
nx = int(sys.argv[2])
ny = int(sys.argv[3])
nz = int(sys.argv[4])
for ix in range(nx):
for iy in range(ny):
for iz in range(nz):
for atom in ucell:
x = ix+atom.x
y = iy+atom.y
z = iz+atom.z
scell.append(Atom(atom.symbol,[x,y,z]))
#transform from fractional to cartesion
#for atom in scell:
# x = atom.x * latt_vec[0][0]
# y = atom.y * latt_vec[1][1]
# z = atom.z * latt_vec[2][2]
# atom.x = x
# atom.y = y
# atom.z = z
#output the atoms in xyz format
print len(scell)
print latt_vec[0][0]*nx, latt_vec[1][1]*ny, latt_vec[2][2]*nz, "Cubic,lattice vectors"
for atom in scell:
print atom
| [
"noreply@github.com"
] | noreply@github.com |
bf77466fc9d42438623ab91fe345fb7f007eef5d | cca70e45645d5b96f98b1328833d5b4ebb1c882e | /P20/P06.py | cf4525656b07a2c7601c33937201708a72cf69c6 | [] | no_license | webturing/Python3Programming_19DS12 | 9613a9808407b6abef0bc89ad8f74fc3920e789f | 5bbc1e10cec0ebf7d5dfb415a9d4bb07ce0b32ca | refs/heads/master | 2020-08-01T10:23:09.474316 | 2019-12-27T11:52:34 | 2019-12-27T11:52:34 | 210,964,665 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | '''
赵、钱、孙、李、周五人围着一张圆桌吃饭。饭后,周回忆说:“吃饭时,赵坐在钱旁边,钱的左边是孙或李”;李回忆说:“钱坐在孙左边,我挨着孙坐”。
结果他们一句也没有说对。请问,他们在怎样坐的?
'''
def left(a, b):
return a + 1 == b or a == 5 and b == 1
def right(a, b):
return left(b, a)
def adj(a, b):
return right(a, b) or left(a, b)
zhao, qian, sun, li, zhou = 1, 1, 1, 1, 1
for qian in range(2, 6):
for sun in range(2, 6):
if sun == qian: continue
for li in range(2, 6):
if li == qian or li == sun:
continue
zhou = 15 - zhao - qian - sun - li
if adj(zhao, qian) or left(qian, sun) or left(qian, li):
continue
if left(sun, qian) or adj(sun, li):
continue
print("%d %d %d %d %d" % (zhao, qian, sun, li, zhou))
| [
"zj@webturing.com"
] | zj@webturing.com |
3621c26d643b968526ee66b6ca84b611670b510a | f75b15158c10291c79f07c312114ec7f974086e0 | /catkin_ws/build/rosserial/rosserial_client/catkin_generated/pkg.develspace.context.pc.py | d232cc65b6b7a9b7e336428470972d1230b15c04 | [] | no_license | nag92/Omnibot | fb1b79dfb0a093b5d00077593d31cdd53ca8c585 | 20ba63ac3f3a1cafd309de11214092d22a4a68ed | refs/heads/master | 2020-03-21T21:51:36.577330 | 2016-11-23T18:28:59 | 2016-11-23T18:28:59 | 74,600,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rosserial_client"
PROJECT_SPACE_DIR = "/home/nathaniel/Documents/Omnibot/code/catkin_ws/devel"
PROJECT_VERSION = "0.7.1"
| [
"ngoldfar@stevens.edu"
] | ngoldfar@stevens.edu |
f63b2195ee52616587fc6164d332acc72661dd95 | 58cefea9ef21d4822b692ce89471a657bcaa87eb | /tests/system/item_test.py | 3d19bf3b76bc9c3891e0d7ccaebd971099b8c9e4 | [] | no_license | py-guy/test-repo | d1d98d08412e8c6e38d4180987c1d88d6183ff66 | 46e133fad6875e2913ffccdc665ece9db5863eb1 | refs/heads/master | 2020-03-27T11:12:24.303338 | 2018-08-28T17:56:57 | 2018-08-28T17:56:57 | 146,472,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,817 | py | from models.store import StoreModel
from models.user import UserModel
from models.item import ItemModel
from tests.base_test import BaseTest
import json
class ItemTest(BaseTest):
def setUp(self):
super(ItemTest, self).setUp()
with self.app() as client:
with self.app_context():
UserModel('test', '1234').save_to_db()
auth_request = client.post('/auth',
data=json.dumps({'username': 'test', 'password': '1234'}),
headers={'Content-Type': 'application/json'})
auth_token = json.loads(auth_request.data)['access_token']
self.access_token = 'JWT ' + auth_token
def test_get_item_no_auth(self):
with self.app() as client:
with self.app_context():
resp = client.get("/item/test")
self.assertEqual(resp.status_code, 401)
def test_get_item_not_found(self):
with self.app() as client:
with self.app_context():
header = {'Authorization': self.access_token}
resp = client.get('/item/test', headers=header)
self.assertEqual(resp.status_code, 404)
def test_get_item(self):
with self.app() as client:
with self.app_context():
StoreModel('test').save_to_db()
ItemModel('test', 19, 1).save_to_db()
header = {'Authorization': self.access_token}
resp = client.get('/item/test', headers=header)
self.assertEqual(resp.status_code, 200)
def test_delete_item(self):
with self.app() as client:
with self.app_context():
StoreModel('test').save_to_db()
ItemModel('test', 19, 1).save_to_db()
header = {'Authorization': self.access_token}
resp = client.delete('/item/test', headers=header)
self.assertEqual(resp.status_code, 200)
self.assertDictEqual({'message': 'Item deleted'},
json.loads(resp.data))
def test_create_item(self):
with self.app() as client:
with self.app_context():
StoreModel('test').save_to_db()
resp = client.post('/item/test', data={'price': 19, 'store_id':1})
self.assertEqual(resp.status_code, 201)
self.assertDictEqual({'name': 'test', 'price': 19},
json.loads(resp.data))
def test_create_duplicate_item(self):
with self.app() as client:
with self.app_context():
StoreModel('test').save_to_db()
ItemModel('test', 19, 1).save_to_db()
resp = client.post('/item/test', data={'price': 19, 'store_id':1})
self.assertEqual(resp.status_code, 400)
self.assertDictEqual({'message': "An item with name 'test' already exists."},
json.loads(resp.data))
def test_put_item(self):
with self.app() as client:
with self.app_context():
StoreModel('test').save_to_db()
#ItemModel('test', 19, 1).save_to_db()
resp = client.put('/item/test', data={'price': 19, 'store_id':1})
self.assertEqual(resp.status_code, 200)
self.assertDictEqual({'name': 'test', 'price': 19},
json.loads(resp.data))
#verify that the item exissts in the db
self.assertEqual(ItemModel.find_by_name('test').price, 19)
def test_put_update_item(self):
with self.app() as client:
with self.app_context():
StoreModel('test').save_to_db()
ItemModel('test', 19, 1).save_to_db()
resp = client.put('/item/test', data={'price': 21, 'store_id':1})
self.assertEqual(ItemModel.find_by_name('test').price, 21)
self.assertEqual(resp.status_code, 200)
self.assertDictEqual({'name': 'test', 'price': 21},
json.loads(resp.data))
def test_item_list(self):
with self.app() as client:
with self.app_context():
StoreModel('test').save_to_db()
ItemModel('test', 19, 1).save_to_db()
ItemModel('test2', 21, 1).save_to_db()
resp = client.get('/items')
self.assertEqual(resp.status_code, 200)
self.assertDictEqual({'items': [{'name': 'test', 'price': 19},
{'name': 'test2', 'price': 21}]},
json.loads(resp.data)) | [
"youssef.bezrati@gmail.com"
] | youssef.bezrati@gmail.com |
0b048e3eb17caaa260995c929cb0ae618afc02f9 | 81dd984a9f287e1235e50ccc3461f19af48190a4 | /__ref/retina_coco/test.py | d0e866e6718cee653e76cdfd4ebf1c66c0bc3dd3 | [
"MIT"
] | permissive | CVML-Detection/Detection_Party | ca6939e70aa0959130b9d62459f8aad8b1a4e38d | b1e6a32748d9771584a40958e72c1d77560327c8 | refs/heads/master | 2023-03-12T00:02:14.559236 | 2021-03-03T04:08:03 | 2021-03-03T04:08:03 | 333,676,431 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,658 | py | import os
import time
import torch
from utils import detect
from evaluator import Evaluator
from config import device, device_ids
def test(epoch, vis, test_loader, model, criterion, coder, opts):
# ---------- load ----------
print('Validation of epoch [{}]'.format(epoch))
model.eval()
check_point = torch.load(os.path.join(opts.save_path, opts.save_file_name) + '.{}.pth.tar'.format(epoch),
map_location=device)
state_dict = check_point['model_state_dict']
model.load_state_dict(state_dict)
tic = time.time()
sum_loss = 0
is_coco = hasattr(test_loader.dataset, 'coco') # if True the set is COCO else VOC
if is_coco:
print('COCO dataset evaluation...')
else:
print('VOC dataset evaluation...')
evaluator = Evaluator(data_type=opts.data_type)
with torch.no_grad():
for idx, datas in enumerate(test_loader):
images = datas[0]
boxes = datas[1]
labels = datas[2]
# ---------- cuda ----------
images = images.to(device)
boxes = [b.to(device) for b in boxes]
labels = [l.to(device) for l in labels]
# ---------- loss ----------
pred = model(images)
loss, (loc, cls) = criterion(pred, boxes, labels)
sum_loss += loss.item()
# ---------- eval ----------
pred_boxes, pred_labels, pred_scores = detect(pred=pred,
coder=coder,
opts=opts)
if opts.data_type == 'voc':
img_name = datas[3][0]
img_info = datas[4][0]
info = (pred_boxes, pred_labels, pred_scores, img_name, img_info)
elif opts.data_type == 'coco':
img_id = test_loader.dataset.img_id[idx]
img_info = test_loader.dataset.coco.loadImgs(ids=img_id)[0]
coco_ids = test_loader.dataset.coco_ids
info = (pred_boxes, pred_labels, pred_scores, img_id, img_info, coco_ids)
evaluator.get_info(info)
toc = time.time()
# ---------- print ----------
if idx % 1000 == 0 or idx == len(test_loader) - 1:
print('Epoch: [{0}]\t'
'Step: [{1}/{2}]\t'
'Loss: {loss:.4f}\t'
'Time : {time:.4f}\t'
.format(epoch,
idx, len(test_loader),
loss=loss,
time=toc - tic))
mAP = evaluator.evaluate(test_loader.dataset)
mean_loss = sum_loss / len(test_loader)
if vis is not None:
# loss plot
vis.line(X=torch.ones((1, 2)).cpu() * epoch, # step
Y=torch.Tensor([mean_loss, mAP]).unsqueeze(0).cpu(),
win='test_loss',
update='append',
opts=dict(xlabel='step',
ylabel='test',
title='test loss',
legend=['test Loss', 'mAP']))
if __name__ == "__main__":
from dataset.voc_dataset import VOC_Dataset
from dataset.coco_dataset import COCO_Dataset
from loss import Focal_Loss
from model import Resnet_50, RetinaNet
from coder import RETINA_Coder
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--epoch', type=int, default=0)
parser.add_argument('--save_path', type=str, default='./saves')
parser.add_argument('--save_file_name', type=str, default='retina_res_50_coco')
parser.add_argument('--conf_thres', type=float, default=0.05)
# parser.add_argument('--data_root', type=str, default='D:\data\\voc')
# parser.add_argument('--data_root', type=str, default='D:\data\coco')
# parser.add_argument('--data_root', type=str, default='/home/cvmlserver3/Sungmin/data/voc')
parser.add_argument('--data_root', type=str, default='/home/cvmlserver3/Sungmin/data/coco')
parser.add_argument('--data_type', type=str, default='coco', help='choose voc or coco')
parser.add_argument('--num_classes', type=int, default=80)
test_opts = parser.parse_args()
print(test_opts)
# 2. device
# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# 3. visdom
vis = None
# 4. data set
if test_opts.data_type == 'voc':
test_set = VOC_Dataset(root=test_opts.data_root, split='test', resize=600)
test_opts.num_classes = 20
if test_opts.data_type == 'coco':
test_set = COCO_Dataset(root=test_opts.data_root, set_name='val2017', split='test', resize=600)
test_opts.num_classes = 80
# 5. data loader
test_loader = torch.utils.data.DataLoader(test_set,
batch_size=1,
collate_fn=test_set.collate_fn,
shuffle=False,
num_workers=0)
# 6. network
model = RetinaNet(base=Resnet_50(pretrained=True), num_classes=test_opts.num_classes).to(device)
model = torch.nn.DataParallel(module=model, device_ids=device_ids)
coder = RETINA_Coder(test_opts.data_type)
# 7. loss
criterion = Focal_Loss(coder)
test(epoch=test_opts.epoch,
vis=vis,
test_loader=test_loader,
model=model,
criterion=criterion,
coder=coder,
opts=test_opts)
| [
"jinugpaeng@gmail.com"
] | jinugpaeng@gmail.com |
2c6f44399105c6eaf015fa79e82a8722f392705f | e13c98f36c362717fdf22468b300321802346ef5 | /home/migrations/0005_auto_20161130_1514.py | a8eda78bd3708cdb4cd0d223a5be51a7bbc35b45 | [] | no_license | alexmon1989/libraries_portal | 2415cc49de33459266a9f18ed8bb34ac99d3eb7c | 277081e09f6347c175775337bffba074a35f3b92 | refs/heads/master | 2021-01-23T07:25:53.884795 | 2018-12-25T14:29:29 | 2018-12-25T14:29:29 | 80,501,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-30 13:14
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20161130_1402'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='city',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='home.City', verbose_name='Город'),
),
]
| [
"alex.mon1989@gmail.com"
] | alex.mon1989@gmail.com |
aa1241b114819b571860ab9d789c7c7dc515c7be | 87bd0cd3352b272cd59b840956d429d31195cf0d | /trydjango/bin/python-config | feb7a242e00263bf2cc90b9e9d54fddc87922a8a | [] | no_license | foteini91/Django_sample | 56d11ad4f1c5232b5f756cce19dba1587e67efb9 | ab50c32543f3da58bfb362d721c455d85688cff9 | refs/heads/master | 2020-07-10T18:44:58.361392 | 2019-08-25T20:15:33 | 2019-08-25T20:15:33 | 204,338,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,353 | #!/home/foteinip/DjangoTest/trydjango/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"foteinip@projectagora.com"
] | foteinip@projectagora.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.