blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
25c58725bf4411b1273ac8755b39e4197126e5fb
|
61210d713cdedec15e100315af0105266bc619e6
|
/Tests/test_fblogin.py
|
bd14a4412cde7a6627e3c1166330fba4bb6de2e2
|
[] |
no_license
|
Raghavan80007/Facebookloginid
|
8f07a170c6a5825bad71b650134800ce46c14421
|
48546705f378e4629b39f15bb9207746f1713bdb
|
refs/heads/master
| 2022-11-30T18:39:45.190765
| 2020-08-07T07:58:14
| 2020-08-07T07:58:14
| 285,615,882
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 950
|
py
|
import pytest
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from Utilities.BaseClass import BaseClass
from PageObject.LoginPage import Login
from Data.LoginData import LoginCredential
import time
class TestLogin(BaseClass):
def test_fblogin(self,getData):
log = self.getLogger()
login = Login(self.driver)
time.sleep(2)
login.userID().send_keys(getData["username"])
log.info("need to print userID")
login.password().send_keys(getData["password"])
log.info("Need to print the password")
login.loginBtn().click()
log.info("Click the button")
@pytest.fixture(params= LoginCredential.testdata)
def getData(self,request):
return request.param
# pytest --html = report.html for taking reports.
#html file will be generated --> coply the file and paste it in the url. it will show the record.
|
[
"sraghavan8007@gmail.com"
] |
sraghavan8007@gmail.com
|
81ed055089f4093a3797d536989022acd1f81658
|
adc7dc785c43bb5f48069f551f156f9524011fe6
|
/backend/api/tests/unit/tournament_tests.py
|
d6cca7a05ba0839452ba353b5e7edaa5f4c77c76
|
[] |
no_license
|
juncheong/Sockem-Boppem
|
6a26184e3d3b684d5b18f95c0dda0fa1497bc48d
|
9bf531dcbdf4894d88ad79c6f91398e8923756c6
|
refs/heads/master
| 2023-01-12T00:29:17.706933
| 2020-01-05T14:52:11
| 2020-01-05T14:52:11
| 189,316,821
| 0
| 0
| null | 2023-01-04T01:09:49
| 2019-05-30T00:16:05
|
Python
|
UTF-8
|
Python
| false
| false
| 1,304
|
py
|
"""
Unit tests for the Tournament model in the API app
"""
from datetime import datetime
import pytest
import pytz
import re
from api.tests.factories import UserFactory, TournamentFactory
from api.tests.unit.user_tests import UserTests
from api.models import Tournament, TournamentUser
class TournamentTests:
"""
Class testing Tournament model
"""
@pytest.mark.django_db
def test_tournament_insert(self):
"""Test that we can insert a Tournament"""
tournament = TournamentFactory(
users=[UserFactory()]
)
assert Tournament.objects.count() == 1
assert tournament.name is not None
assert self.is_valid_generated_tournament_name(tournament.name)
assert tournament.start_date is not None
assert tournament.start_date < datetime.now(tz=pytz.timezone('UTC'))
assert tournament.creator is not None
assert tournament.users is not None
assert TournamentUser.objects.count() == 1
for user in tournament.users.all():
if not UserTests.is_valid_generated_username(user.username):
assert False
@staticmethod
def is_valid_generated_tournament_name(username):
return re.match(r'tournament\d+$', username)
|
[
"juncheong1@gmail.com"
] |
juncheong1@gmail.com
|
9504896097bcea60c14d934448da0886d9fa1f85
|
a0554d8e46e19645699058e5bd2e15bd3ad83216
|
/tests/Node.py
|
6709670a6019394b04bde1648fec56d6e182f00c
|
[
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pacificcode/eos
|
daaacfca8e02e9976780061df6c8cf3a31340094
|
61e1768692e5fa6ecfffc296952476d69bda9d6e
|
refs/heads/master
| 2018-10-07T16:57:40.813035
| 2018-06-18T18:23:41
| 2018-06-18T18:23:41
| 118,769,415
| 0
| 0
| null | 2018-01-24T13:31:43
| 2018-01-24T13:31:43
| null |
UTF-8
|
Python
| false
| false
| 40,713
|
py
|
import decimal
import subprocess
import time
import os
import re
import datetime
import json
from core_symbol import CORE_SYMBOL
from testUtils import Utils
from testUtils import Account
# pylint: disable=too-many-public-methods
class Node(object):
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-arguments
def __init__(self, host, port, pid=None, cmd=None, enableMongo=False, mongoHost="localhost", mongoPort=27017, mongoDb="EOStest"):
self.host=host
self.port=port
self.pid=pid
self.cmd=cmd
self.killed=False # marks node as killed
self.enableMongo=enableMongo
self.mongoSyncTime=None if Utils.mongoSyncTime < 1 else Utils.mongoSyncTime
self.mongoHost=mongoHost
self.mongoPort=mongoPort
self.mongoDb=mongoDb
self.endpointArgs="--url http://%s:%d" % (self.host, self.port)
self.mongoEndpointArgs=""
if self.enableMongo:
self.mongoEndpointArgs += "--host %s --port %d %s" % (mongoHost, mongoPort, mongoDb)
def __str__(self):
#return "Host: %s, Port:%d, Pid:%s, Cmd:\"%s\"" % (self.host, self.port, self.pid, self.cmd)
return "Host: %s, Port:%d" % (self.host, self.port)
@staticmethod
def validateTransaction(trans):
assert trans
assert isinstance(trans, dict), print("Input type is %s" % type(trans))
def printTrans(trans):
Utils.Print("ERROR: Failure in transaction validation.")
Utils.Print("Transaction: %s" % (json.dumps(trans, indent=1)))
assert trans["processed"]["receipt"]["status"] == "executed", printTrans(trans)
# Passes input to stdin, executes cmd. Returns tuple with return code(int),
# stdout(byte stream) and stderr(byte stream).
@staticmethod
def stdinAndCheckOutput(cmd, subcommand):
outs=None
errs=None
ret=0
try:
popen=subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
outs,errs=popen.communicate(input=subcommand.encode("utf-8"))
ret=popen.wait()
except subprocess.CalledProcessError as ex:
msg=ex.output
return (ex.returncode, msg, None)
return (ret, outs, errs)
@staticmethod
def normalizeJsonObject(extJStr):
tmpStr=extJStr
tmpStr=re.sub(r'ObjectId\("(\w+)"\)', r'"ObjectId-\1"', tmpStr)
tmpStr=re.sub(r'ISODate\("([\w|\-|\:|\.]+)"\)', r'"ISODate-\1"', tmpStr)
return tmpStr
@staticmethod
def runMongoCmdReturnJson(cmdArr, subcommand, trace=False):
retId,outs=Node.stdinAndCheckOutput(cmdArr, subcommand)
if retId is not 0:
return None
outStr=Node.byteArrToStr(outs)
if not outStr:
return None
extJStr=Utils.filterJsonObject(outStr)
if not extJStr:
return None
jStr=Node.normalizeJsonObject(extJStr)
if not jStr:
return None
if trace: Utils.Print ("RAW > %s"% (outStr))
#trace and Utils.Print ("JSON> %s"% jStr)
jsonData=json.loads(jStr)
return jsonData
@staticmethod
def getTransId(trans):
"""Retrieve transaction id from dictionary object."""
assert trans
assert isinstance(trans, dict), print("Input type is %s" % type(trans))
#Utils.Print("%s" % trans)
transId=trans["transaction_id"]
return transId
@staticmethod
def byteArrToStr(arr):
return arr.decode("utf-8")
def setWalletEndpointArgs(self, args):
self.endpointArgs="--url http://%s:%d %s" % (self.host, self.port, args)
def validateAccounts(self, accounts):
assert(accounts)
assert(isinstance(accounts, list))
for account in accounts:
assert(account)
assert(isinstance(account, Account))
if Utils.Debug: Utils.Print("Validating account %s" % (account.name))
accountInfo=self.getEosAccount(account.name)
try:
assert(accountInfo)
assert(accountInfo["account_name"] == account.name)
except (AssertionError, TypeError, KeyError) as _:
Utils.Print("account validation failed. account: %s" % (account.name))
raise
# pylint: disable=too-many-branches
def getBlock(self, blockNum, retry=True, silentErrors=False):
"""Given a blockId will return block details."""
assert(isinstance(blockNum, int))
if not self.enableMongo:
cmd="%s %s get block %d" % (Utils.EosClientPath, self.endpointArgs, blockNum)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd)
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get block. %s" % (msg))
return None
else:
for _ in range(2):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.Blocks.findOne( { "block_num": %d } )' % (blockNum)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get db node get block. %s" % (msg))
return None
if not retry:
break
if self.mongoSyncTime is not None:
if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
time.sleep(self.mongoSyncTime)
return None
def getBlockById(self, blockId, retry=True, silentErrors=False):
for _ in range(2):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.Blocks.findOne( { "block_id": "%s" } )' % (blockId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during db get block by id. %s" % (msg))
return None
if not retry:
break
if self.mongoSyncTime is not None:
if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
time.sleep(self.mongoSyncTime)
return None
# def doesNodeHaveBlockNum(self, blockNum):
# """Does node have head_block_num >= blockNum"""
# assert isinstance(blockNum, int)
# assert (blockNum > 0)
# info=self.getInfo(silentErrors=True)
# assert(info)
# head_block_num=0
# try:
# head_block_num=int(info["head_block_num"])
# except (TypeError, KeyError) as _:
# Utils.Print("Failure in get info parsing. %s" % (info))
# raise
# return True if blockNum <= head_block_num else False
def isBlockPresent(self, blockNum):
"""Does node have head_block_num >= blockNum"""
assert isinstance(blockNum, int)
assert (blockNum > 0)
info=self.getInfo(silentErrors=True)
assert(info)
node_block_num=0
try:
node_block_num=int(info["head_block_num"])
except (TypeError, KeyError) as _:
Utils.Print("Failure in get info parsing. %s" % (info))
raise
return True if blockNum <= node_block_num else False
def isBlockFinalized(self, blockNum):
"""Is blockNum finalized"""
assert(blockNum)
assert isinstance(blockNum, int)
assert (blockNum > 0)
info=self.getInfo(silentErrors=True)
assert(info)
node_block_num=0
try:
node_block_num=int(info["last_irreversible_block_num"])
except (TypeError, KeyError) as _:
Utils.Print("Failure in get info parsing. %s" % (info))
raise
return True if blockNum <= node_block_num else False
# pylint: disable=too-many-branches
def getTransaction(self, transId, retry=True, silentErrors=False):
if not self.enableMongo:
cmd="%s %s get transaction %s" % (Utils.EosClientPath, self.endpointArgs, transId)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
if "Failed to connect" in msg:
Utils.Print("ERROR: Node is unreachable. %s" % (msg))
raise
if not silentErrors:
Utils.Print("ERROR: Exception during transaction retrieval. %s" % (msg))
return None
else:
for _ in range(2):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.Transactions.findOne( { $and : [ { "transaction_id": "%s" }, {"pending":false} ] } )' % (transId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get db node get trans. %s" % (msg))
return None
if not retry:
break
if self.mongoSyncTime is not None:
if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
time.sleep(self.mongoSyncTime)
return None
def isTransInBlock(self, transId, blockId):
"""Check if transId is within block identified by blockId"""
assert(transId)
assert(isinstance(transId, str))
assert(blockId)
assert(isinstance(blockId, int))
block=self.getBlock(blockId)
transactions=None
try:
transactions=block["transactions"]
except (AssertionError, TypeError, KeyError) as _:
Utils.Print("Failed to parse block. %s" % (block))
raise
if transactions is not None:
for trans in transactions:
assert(trans)
try:
myTransId=trans["trx"]["id"]
if transId == myTransId:
return True
except (TypeError, KeyError) as _:
Utils.Print("Failed to parse block transactions. %s" % (trans))
return False
def getBlockIdByTransId(self, transId):
"""Given a transaction Id (string), will return block id (int) containing the transaction"""
assert(transId)
assert(isinstance(transId, str))
trans=self.getTransaction(transId)
assert(trans)
refBlockNum=None
try:
refBlockNum=trans["trx"]["trx"]["ref_block_num"]
refBlockNum=int(refBlockNum)+1
except (TypeError, ValueError, KeyError) as _:
Utils.Print("transaction parsing failed. Transaction: %s" % (trans))
return None
headBlockNum=self.getHeadBlockNum()
assert(headBlockNum)
try:
headBlockNum=int(headBlockNum)
except(ValueError) as _:
Utils.Print("Info parsing failed. %s" % (headBlockNum))
for blockNum in range(refBlockNum, headBlockNum+1):
if self.isTransInBlock(str(transId), blockNum):
return blockNum
return None
def isTransInAnyBlock(self, transId):
"""Check if transaction (transId) is in a block."""
assert(transId)
assert(isinstance(transId, str))
blockId=self.getBlockIdByTransId(transId)
return True if blockId else False
def isTransFinalized(self, transId):
"""Check if transaction (transId) has been finalized."""
assert(transId)
assert(isinstance(transId, str))
blockId=self.getBlockIdByTransId(transId)
if not blockId:
return False
assert(isinstance(blockId, int))
return self.isBlockFinalized(blockId)
# Disabling MongodDB funbction
# def getTransByBlockId(self, blockId, retry=True, silentErrors=False):
# for _ in range(2):
# cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
# subcommand='db.Transactions.find( { "block_id": "%s" } )' % (blockId)
# if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
# try:
# trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand, True)
# if trans is not None:
# return trans
# except subprocess.CalledProcessError as ex:
# if not silentErrors:
# msg=ex.output.decode("utf-8")
# Utils.Print("ERROR: Exception during db get trans by blockId. %s" % (msg))
# return None
# if not retry:
# break
# if self.mongoSyncTime is not None:
# if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
# time.sleep(self.mongoSyncTime)
# return None
def getActionFromDb(self, transId, retry=True, silentErrors=False):
for _ in range(2):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.Actions.findOne( { "transaction_id": "%s" } )' % (transId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get db node get message. %s" % (msg))
return None
if not retry:
break
if self.mongoSyncTime is not None:
if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
time.sleep(self.mongoSyncTime)
return None
def getMessageFromDb(self, transId, retry=True, silentErrors=False):
for _ in range(2):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.Messages.findOne( { "transaction_id": "%s" } )' % (transId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get db node get message. %s" % (msg))
return None
if not retry:
break
if self.mongoSyncTime is not None:
if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
time.sleep(self.mongoSyncTime)
return None
# Create & initialize account and return creation transactions. Return transaction json object
def createInitializeAccount(self, account, creatorAccount, stakedDeposit=1000, waitForTransBlock=False):
cmd='%s %s system newaccount -j %s %s %s %s --stake-net "100 %s" --stake-cpu "100 %s" --buy-ram "100 %s"' % (
Utils.EosClientPath, self.endpointArgs, creatorAccount.name, account.name,
account.ownerPublicKey, account.activePublicKey,
CORE_SYMBOL, CORE_SYMBOL, CORE_SYMBOL)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
trans=None
try:
trans=Utils.runCmdReturnJson(cmd)
transId=Node.getTransId(trans)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during account creation. %s" % (msg))
return None
if stakedDeposit > 0:
self.waitForTransInBlock(transId) # seems like account creation needs to be finalized before transfer can happen
trans = self.transferFunds(creatorAccount, account, Node.currencyIntToStr(stakedDeposit, CORE_SYMBOL), "init")
transId=Node.getTransId(trans)
if waitForTransBlock and not self.waitForTransInBlock(transId):
return None
return trans
# Create account and return creation transactions. Return transaction json object
# waitForTransBlock: wait on creation transaction id to appear in a block
def createAccount(self, account, creatorAccount, stakedDeposit=1000, waitForTransBlock=False):
cmd="%s %s create account -j %s %s %s %s" % (
Utils.EosClientPath, self.endpointArgs, creatorAccount.name, account.name,
account.ownerPublicKey, account.activePublicKey)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
trans=None
try:
trans=Utils.runCmdReturnJson(cmd)
transId=Node.getTransId(trans)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during account creation. %s" % (msg))
return None
if stakedDeposit > 0:
self.waitForTransInBlock(transId) # seems like account creation needs to be finlized before transfer can happen
trans = self.transferFunds(creatorAccount, account, "%0.04f %s" % (stakedDeposit/10000, CORE_SYMBOL), "init")
transId=Node.getTransId(trans)
if waitForTransBlock and not self.waitForTransInBlock(transId):
return None
return trans
def getEosAccount(self, name):
assert(isinstance(name, str))
cmd="%s %s get account -j %s" % (Utils.EosClientPath, self.endpointArgs, name)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get account. %s" % (msg))
return None
def getEosAccountFromDb(self, name):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.Accounts.findOne({"name" : "%s"})' % (name)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get account from db. %s" % (msg))
return None
def getTable(self, contract, scope, table):
cmd="%s %s get table %s %s %s" % (Utils.EosClientPath, self.endpointArgs, contract, scope, table)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during table retrieval. %s" % (msg))
return None
def getTableAccountBalance(self, contract, scope):
assert(isinstance(contract, str))
assert(isinstance(scope, str))
table="accounts"
trans = self.getTable(contract, scope, table)
assert(trans)
try:
return trans["rows"][0]["balance"]
except (TypeError, KeyError) as _:
print("Transaction parsing failed. Transaction: %s" % (trans))
raise
def getCurrencyBalance(self, contract, account, symbol=CORE_SYMBOL):
"""returns raw output from get currency balance e.g. '99999.9950 CUR'"""
assert(contract)
assert(isinstance(contract, str))
assert(account)
assert(isinstance(account, str))
assert(symbol)
assert(isinstance(symbol, str))
cmd="%s %s get currency balance %s %s %s" % (Utils.EosClientPath, self.endpointArgs, contract, account, symbol)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnStr(cmd)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get currency stats. %s" % (msg))
return None
def getCurrencyStats(self, contract, symbol=CORE_SYMBOL):
"""returns Json output from get currency stats."""
assert(contract)
assert(isinstance(contract, str))
assert(symbol)
assert(isinstance(symbol, str))
cmd="%s %s get currency stats %s %s" % (Utils.EosClientPath, self.endpointArgs, contract, symbol)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get currency stats. %s" % (msg))
return None
# Verifies account. Returns "get account" json return object
def verifyAccount(self, account):
if not self.enableMongo:
ret=self.getEosAccount(account.name)
if ret is not None:
account_name=ret["account_name"]
if account_name is None:
Utils.Print("ERROR: Failed to verify account creation.", account.name)
return None
return ret
else:
for _ in range(2):
ret=self.getEosAccountFromDb(account.name)
if ret is not None:
account_name=ret["name"]
if account_name is None:
Utils.Print("ERROR: Failed to verify account creation.", account.name)
return None
return ret
if self.mongoSyncTime is not None:
if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
time.sleep(self.mongoSyncTime)
return None
def waitForTransInBlock(self, transId, timeout=None):
"""Wait for trans id to be finalized."""
lam = lambda: self.isTransInAnyBlock(transId)
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForTransFinalization(self, transId, timeout=None):
"""Wait for trans id to be finalized."""
assert(isinstance(transId, str))
lam = lambda: self.isTransFinalized(transId)
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForNextBlock(self, timeout=None):
num=self.getHeadBlockNum()
lam = lambda: self.getHeadBlockNum() > num
ret=Utils.waitForBool(lam, timeout)
return ret
# Trasfer funds. Returns "transfer" json return object
def transferFunds(self, source, destination, amountStr, memo="memo", force=False, waitForTransBlock=False):
assert isinstance(amountStr, str)
assert(source)
assert(isinstance(source, Account))
assert(destination)
assert(isinstance(destination, Account))
cmd="%s %s -v transfer -j %s %s" % (
Utils.EosClientPath, self.endpointArgs, source.name, destination.name)
cmdArr=cmd.split()
cmdArr.append(amountStr)
cmdArr.append(memo)
if force:
cmdArr.append("-f")
s=" ".join(cmdArr)
if Utils.Debug: Utils.Print("cmd: %s" % (s))
trans=None
try:
trans=Utils.runCmdArrReturnJson(cmdArr)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during funds transfer. %s" % (msg))
return None
assert(trans)
transId=Node.getTransId(trans)
if waitForTransBlock and not self.waitForTransInBlock(transId):
return None
return trans
@staticmethod
def currencyStrToInt(balanceStr):
"""Converts currency string of form "12.3456 SYS" to int 123456"""
assert(isinstance(balanceStr, str))
balanceStr=balanceStr.split()[0]
#balance=int(decimal.Decimal(balanceStr[1:])*10000)
balance=int(decimal.Decimal(balanceStr)*10000)
return balance
@staticmethod
def currencyIntToStr(balance, symbol):
"""Converts currency int of form 123456 to string "12.3456 SYS" where SYS is symbol string"""
assert(isinstance(balance, int))
assert(isinstance(symbol, str))
balanceStr="%.04f %s" % (balance/10000.0, symbol)
return balanceStr
def validateFunds(self, initialBalances, transferAmount, source, accounts):
"""Validate each account has the expected SYS balance. Validate cumulative balance matches expectedTotal."""
assert(source)
assert(isinstance(source, Account))
assert(accounts)
assert(isinstance(accounts, list))
assert(len(accounts) > 0)
assert(initialBalances)
assert(isinstance(initialBalances, dict))
assert(isinstance(transferAmount, int))
currentBalances=self.getEosBalances([source] + accounts)
assert(currentBalances)
assert(isinstance(currentBalances, dict))
assert(len(initialBalances) == len(currentBalances))
if len(currentBalances) != len(initialBalances):
Utils.Print("ERROR: validateFunds> accounts length mismatch. Initial: %d, current: %d" % (len(initialBalances), len(currentBalances)))
return False
for key, value in currentBalances.items():
initialBalance = initialBalances[key]
assert(initialBalances)
expectedInitialBalance = value - transferAmount
if key is source:
expectedInitialBalance = value + (transferAmount*len(accounts))
if (initialBalance != expectedInitialBalance):
Utils.Print("ERROR: validateFunds> Expected: %d, actual: %d for account %s" %
(expectedInitialBalance, initialBalance, key.name))
return False
def getEosBalances(self, accounts):
"""Returns a dictionary with account balances keyed by accounts"""
assert(accounts)
assert(isinstance(accounts, list))
balances={}
for account in accounts:
balance = self.getAccountEosBalance(account.name)
balances[account]=balance
return balances
# Gets accounts mapped to key. Returns json object
def getAccountsByKey(self, key):
cmd="%s %s get accounts %s" % (Utils.EosClientPath, self.endpointArgs, key)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during accounts by key retrieval. %s" % (msg))
return None
# Get actions mapped to an account (cleos get actions)
def getActions(self, account, pos=-1, offset=-1):
assert(isinstance(account, Account))
assert(isinstance(pos, int))
assert(isinstance(offset, int))
cmd="%s %s get actions -j %s %d %d" % (Utils.EosClientPath, self.endpointArgs, account.name, pos, offset)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
actions=Utils.runCmdReturnJson(cmd)
return actions
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during actions by account retrieval. %s" % (msg))
return None
# Gets accounts mapped to key. Returns array
def getAccountsArrByKey(self, key):
trans=self.getAccountsByKey(key)
assert(trans)
assert("account_names" in trans)
accounts=trans["account_names"]
return accounts
def getServants(self, name):
cmd="%s %s get servants %s" % (Utils.EosClientPath, self.endpointArgs, name)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during servants retrieval. %s" % (msg))
return None
def getServantsArr(self, name):
trans=self.getServants(name)
servants=trans["controlled_accounts"]
return servants
def getAccountEosBalanceStr(self, scope):
"""Returns SYS currency0000 account balance from cleos get table command. Returned balance is string following syntax "98.0311 SYS". """
assert isinstance(scope, str)
if not self.enableMongo:
amount=self.getTableAccountBalance("eosio.token", scope)
if Utils.Debug: Utils.Print("getNodeAccountEosBalance %s %s" % (scope, amount))
assert isinstance(amount, str)
return amount
else:
if self.mongoSyncTime is not None:
if Utils.Debug: Utils.Print("cmd: sleep %d seconds" % (self.mongoSyncTime))
time.sleep(self.mongoSyncTime)
account=self.getEosAccountFromDb(scope)
if account is not None:
balance=account["eos_balance"]
return balance
return None
def getAccountEosBalance(self, scope):
"""Returns SYS currency0000 account balance from cleos get table command. Returned balance is an integer e.g. 980311. """
balanceStr=self.getAccountEosBalanceStr(scope)
balance=Node.currencyStrToInt(balanceStr)
return balance
def getAccountCodeHash(self, account):
cmd="%s %s get code %s" % (Utils.EosClientPath, self.endpointArgs, account)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
retStr=Utils.checkOutput(cmd.split())
#Utils.Print ("get code> %s"% retStr)
p=re.compile(r'code\shash: (\w+)\n', re.MULTILINE)
m=p.search(retStr)
if m is None:
msg="Failed to parse code hash."
Utils.Print("ERROR: "+ msg)
return None
return m.group(1)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during code hash retrieval. %s" % (msg))
return None
# publish contract and return transaction as json object
def publishContract(self, account, contractDir, wastFile, abiFile, waitForTransBlock=False, shouldFail=False):
cmd="%s %s -v set contract -j %s %s" % (Utils.EosClientPath, self.endpointArgs, account, contractDir)
cmd += "" if wastFile is None else (" "+ wastFile)
cmd += "" if abiFile is None else (" " + abiFile)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
trans=None
try:
trans=Utils.runCmdReturnJson(cmd, trace=False)
except subprocess.CalledProcessError as ex:
if not shouldFail:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during code hash retrieval. %s" % (msg))
return None
else:
retMap={}
retMap["returncode"]=ex.returncode
retMap["cmd"]=ex.cmd
retMap["output"]=ex.output
# commented below as they are available only in Python3.5 and above
# retMap["stdout"]=ex.stdout
# retMap["stderr"]=ex.stderr
return retMap
if shouldFail:
Utils.Print("ERROR: The publish contract did not fail as expected.")
return None
Node.validateTransaction(trans)
transId=Node.getTransId(trans)
if waitForTransBlock and not self.waitForTransInBlock(transId):
return None
return trans
def getTableRows(self, contract, scope, table):
jsonData=self.getTable(contract, scope, table)
if jsonData is None:
return None
rows=jsonData["rows"]
return rows
def getTableRow(self, contract, scope, table, idx):
if idx < 0:
Utils.Print("ERROR: Table index cannot be negative. idx: %d" % (idx))
return None
rows=self.getTableRows(contract, scope, table)
if rows is None or idx >= len(rows):
Utils.Print("ERROR: Retrieved table does not contain row %d" % idx)
return None
row=rows[idx]
return row
def getTableColumns(self, contract, scope, table):
row=self.getTableRow(contract, scope, table, 0)
keys=list(row.keys())
return keys
# returns tuple with transaction and
def pushMessage(self, account, action, data, opts, silentErrors=False):
cmd="%s %s push action -j %s %s" % (Utils.EosClientPath, self.endpointArgs, account, action)
cmdArr=cmd.split()
if data is not None:
cmdArr.append(data)
if opts is not None:
cmdArr += opts.split()
s=" ".join(cmdArr)
if Utils.Debug: Utils.Print("cmd: %s" % (s))
try:
trans=Utils.runCmdArrReturnJson(cmdArr)
return (True, trans)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
if not silentErrors:
Utils.Print("ERROR: Exception during push message. %s" % (msg))
return (False, msg)
def setPermission(self, account, code, pType, requirement, waitForTransBlock=False):
cmd="%s %s set action permission -j %s %s %s %s" % (
Utils.EosClientPath, self.endpointArgs, account, code, pType, requirement)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
trans=None
try:
trans=Utils.runCmdReturnJson(cmd)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during set permission. %s" % (msg))
return None
transId=Node.getTransId(trans)
if waitForTransBlock and not self.waitForTransInBlock(transId):
return None
return trans
def getInfo(self, silentErrors=False):
cmd="%s %s get info" % (Utils.EosClientPath, self.endpointArgs)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
trans=Utils.runCmdReturnJson(cmd, silentErrors=silentErrors)
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get info. %s" % (msg))
return None
def getBlockFromDb(self, idx):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand="db.Blocks.find().sort({\"_id\":%d}).limit(1).pretty()" % (idx)
if Utils.Debug: Utils.Print("cmd: echo \"%s\" | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get db block. %s" % (msg))
return None
def checkPulse(self):
info=self.getInfo(True)
return False if info is None else True
def getHeadBlockNum(self):
"""returns head block number(string) as returned by cleos get info."""
if not self.enableMongo:
info=self.getInfo()
if info is not None:
headBlockNumTag="head_block_num"
return info[headBlockNumTag]
else:
# Either this implementation or the one in getIrreversibleBlockNum are likely wrong.
block=self.getBlockFromDb(-1)
if block is not None:
blockNum=block["block_num"]
return blockNum
return None
def getIrreversibleBlockNum(self):
if not self.enableMongo:
info=self.getInfo()
if info is not None:
return info["last_irreversible_block_num"]
else:
# Either this implementation or the one in getHeadBlockNum are likely wrong.
block=self.getBlockFromDb(-1)
if block is not None:
blockNum=block["block_num"]
return blockNum
return None
def kill(self, killSignal):
if Utils.Debug: Utils.Print("Killing node: %s" % (self.cmd))
assert(self.pid is not None)
try:
os.kill(self.pid, killSignal)
except OSError as ex:
Utils.Print("ERROR: Failed to kill node (%d)." % (self.cmd), ex)
return False
# wait for kill validation
def myFunc():
try:
os.kill(self.pid, 0) #check if process with pid is running
except OSError as _:
return True
return False
if not Utils.waitForBool(myFunc):
Utils.Print("ERROR: Failed to validate node shutdown.")
return False
# mark node as killed
self.pid=None
self.killed=True
return True
# TBD: make nodeId an internal property
# pylint: disable=too-many-locals
def relaunch(self, nodeId, chainArg, newChain=False, timeout=Utils.systemWaitTimeout):
assert(self.pid is None)
assert(self.killed)
if Utils.Debug: Utils.Print("Launching node process, Id: %d" % (nodeId))
cmdArr=[]
myCmd=self.cmd
if not newChain:
skip=False
for i in self.cmd.split():
Utils.Print("\"%s\"" % (i))
if skip:
skip=False
continue
if "--genesis-json" == i or "--genesis-timestamp" == i:
skip=True
continue
cmdArr.append(i)
myCmd=" ".join(cmdArr)
dataDir="var/lib/node_%02d" % (nodeId)
dt = datetime.datetime.now()
dateStr="%d_%02d_%02d_%02d_%02d_%02d" % (
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
stdoutFile="%s/stdout.%s.txt" % (dataDir, dateStr)
stderrFile="%s/stderr.%s.txt" % (dataDir, dateStr)
with open(stdoutFile, 'w') as sout, open(stderrFile, 'w') as serr:
#cmd=self.cmd + ("" if chainArg is None else (" " + chainArg))
cmd=myCmd + ("" if chainArg is None else (" " + chainArg))
Utils.Print("cmd: %s" % (cmd))
popen=subprocess.Popen(cmd.split(), stdout=sout, stderr=serr)
self.pid=popen.pid
def isNodeAlive():
"""wait for node to be responsive."""
try:
return True if self.checkPulse() else False
except (TypeError) as _:
pass
return False
isAlive=Utils.waitForBool(isNodeAlive, timeout)
if isAlive:
Utils.Print("Node relaunch was successfull.")
else:
Utils.Print("ERROR: Node relaunch Failed.")
self.pid=None
return False
self.killed=False
return True
|
[
"johnc@objectcomputing.com"
] |
johnc@objectcomputing.com
|
9e4853bbbe116ed53f13c67fc1d7cd1b9984033b
|
e949e5195507d2029f920b8837f205bf32ef67fa
|
/question2.py
|
9b0edf491fc65fcb5d3971b4031121c22f156145
|
[] |
no_license
|
prithviraj-11/sample-question2
|
6a562c400199cbcecfa6f29f647f2a4f6649eb72
|
aa786fe1c7037f04331fa5a01108c3c3817be452
|
refs/heads/main
| 2023-04-03T05:49:18.921437
| 2021-04-08T13:13:36
| 2021-04-08T13:13:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,605
|
py
|
import flask
from flask import request,jsonify
import requests
import json
from datetime import datetime
from time import strftime,gmtime
app = flask.Flask(__name__)
app.config["DEBUG"] = True
time_format = "%Y-%m-%dT%H:%M:%SZ"
url = 'https://gitlab.com/-/snippets/2094509/raw/master/sample_json_2.json'
r = requests.get(url)
data = r.content
system_data = json.loads(data)
@app.route('/', methods=['GET'])
def home():
return "<h1>Home</h1>"
@app.route('/api/produnit/all', methods=['GET'])
def api_all():
return jsonify(system_data)
@app.route('/api/produnit', methods=['GET'])
def api_time():
if 'start_time'in request.args:
start_time = datetime.strptime(str(request.args['start_time']),time_format)
if 'end_time'in request.args:
end_time = datetime.strptime(str(request.args['end_time']),time_format)
result = {}
runtime = 0
downtime = 0
for data in system_data:
current_time = datetime.strptime(str(data['time']),"%Y-%m-%d %H:%M:%S")
if(start_time <= current_time <= end_time):
if runtime<=1021:
runtime = runtime + data['runtime']
else:
downtime = downtime + data['runtime']
utilisation = round((runtime/(runtime + downtime)) * 100,2)
runtime = strftime('%Hh:%Mm:%Ss',gmtime(runtime))
downtime = strftime('%Hh:%Mm:%Ss',gmtime(downtime))
result['runtime'] = runtime
result['downtime'] = downtime
result['utilisation'] = utilisation
return jsonify(result)
app.run()
|
[
"noreply@github.com"
] |
noreply@github.com
|
ef98cfec035e621902aedc72b76f5850fc6e84ca
|
e380663d6a11d05828a040486c85e2dfae358597
|
/djangove/utils/templatetags/navigation_tags.py
|
bafbc92d8d8e0530401dc4ad20eed4c5aac944d7
|
[
"BSD-3-Clause"
] |
permissive
|
djangove/djangove
|
4a2ad9dcd5236ff746bb09d21f2fab1424dfc2f5
|
1fee878d170e52ee0c5cacd1d2813b045d4cbb77
|
refs/heads/master
| 2021-09-10T05:58:01.868815
| 2020-03-09T22:27:58
| 2020-03-09T22:27:58
| 43,475,979
| 1
| 1
|
BSD-3-Clause
| 2021-09-07T23:57:06
| 2015-10-01T03:13:04
|
Python
|
UTF-8
|
Python
| false
| false
| 4,882
|
py
|
from django import template
from wagtail.wagtailcore.models import Page
register = template.Library()
@register.assignment_tag(takes_context=True)
def get_site_root(context):
return context['request'].site.root_page
def has_menu_children(page):
if page.get_children().filter(live=True, show_in_menus=True):
return True
else:
return False
@register.inclusion_tag(
'utils/tags/navigation/top_menu.html', takes_context=True)
def top_menu(context, parent, calling_page=None):
menuitems = parent.get_children().filter(
live=True,
show_in_menus=True
)
for menuitem in menuitems:
menuitem.show_dropdown = has_menu_children(menuitem)
return {
'calling_page': calling_page,
'menuitems': menuitems,
'request': context['request'],
}
# Retrieves the children of the top menu items for the drop downs
@register.inclusion_tag(
'utils/tags/navigation/top_menu_children.html', takes_context=True)
def top_menu_children(context, parent):
menuitems_children = parent.get_children()
menuitems_children = menuitems_children.filter(
live=True,
show_in_menus=True
)
for menuitem in menuitems_children:
menuitem.show_dropdown = has_menu_children(menuitem)
return {
'parent': parent,
'menuitems_children': menuitems_children,
# required by the pageurl tag that we want to use within this template
'request': context['request'],
}
@register.inclusion_tag(
'utils/tags/navigation/site_menu.html', takes_context=True)
def site_menu(context, parent, calling_page=None):
menuitems = parent.get_children().filter(
live=True,
show_in_menus=True
)
for menuitem in menuitems:
menuitem.show_dropdown = has_menu_children(menuitem)
return {
'calling_page': calling_page,
'menuitems': menuitems,
'request': context['request'],
}
@register.inclusion_tag(
'utils/tags/navigation/site_menu_children.html', takes_context=True)
def site_menu_children(context, parent):
menuitems_children = parent.get_children()
menuitems_children = menuitems_children.filter(
live=True,
show_in_menus=True
)
for menuitem in menuitems_children:
menuitem.show_dropdown = has_menu_children(menuitem)
return {
'parent': parent,
'menuitems_children': menuitems_children,
# required by the pageurl tag that we want to use within this template
'request': context['request'],
}
@register.inclusion_tag(
'utils/tags/navigation/secondary_menu.html', takes_context=True)
def secondary_menu(context, calling_page=None):
pages = []
if calling_page:
pages = calling_page.get_children().filter(
live=True,
show_in_menus=True
)
# If no children, get siblings instead
if len(pages) == 0:
pages = calling_page.get_siblings().filter(
live=True,
show_in_menus=True
)
return {
'pages': pages,
# required by the pageurl tag that we want to use within this template
'request': context['request'],
}
@register.inclusion_tag(
'utils/tags/navigation/breadcrumbs.html', takes_context=True)
def breadcrumbs(context):
self = context.get('self')
if self is None or self.depth <= 2:
# When on the home page, displaying breadcrumbs is irrelevant.
ancestors = ()
else:
ancestors = Page.objects.ancestor_of(
self, inclusive=True).filter(depth__gt=2)
return {
'ancestors': ancestors,
'request': context['request'],
}
@register.inclusion_tag(
'utils/tags/navigation/offcanvas_top_menu.html', takes_context=True)
def offcanvas_top_menu(context, parent, calling_page=None):
menuitems = parent.get_children().filter(
live=True,
show_in_menus=True
)
for menuitem in menuitems:
menuitem.show_dropdown = has_menu_children(menuitem)
return {
'calling_page': calling_page,
'menuitems': menuitems,
'request': context['request'],
}
# Retrieves the children of the top menu items for the drop downs
@register.inclusion_tag(
'utils/tags/navigation/offcanvas_top_menu_children.html', takes_context=True)
def offcanvas_top_menu_children(context, parent):
menuitems_children = parent.get_children()
menuitems_children = menuitems_children.filter(
live=True,
show_in_menus=True
)
for menuitem in menuitems_children:
menuitem.show_dropdown = has_menu_children(menuitem)
return {
'parent': parent,
'menuitems_children': menuitems_children,
# required by the pageurl tag that we want to use within this template
'request': context['request'],
}
|
[
"edwar.baron@gmail.com"
] |
edwar.baron@gmail.com
|
e884838af6919358d5a44231a2d42928d2026a88
|
63b2128d496293f58a78fd3e476c06cfafbd7d51
|
/leetcode/15.py
|
7642f0a0692cf894165383fdf1cb542cf307ba47
|
[] |
no_license
|
wonjong-github/Python_algorithm
|
221bc66f0f144852b3c3982988a351686f903203
|
0bd3d0918f0a8d5edc3e268724ce4a2932814945
|
refs/heads/master
| 2023-07-13T20:44:37.618671
| 2021-08-23T06:46:05
| 2021-08-23T06:46:05
| 389,080,095
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,648
|
py
|
class Solution(object):
def threeSum(self, nums: list[int])->list[list[int]]:
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# 재귀 -> 시간초과
# answer = []
# nums.sort()
# def go(nums: list[int], index: int, sum: int, cnt: int, nowlist: list[int]):
# if sum == 0 and cnt == 3 and nowlist not in answer:
# answer.append(nowlist)
# return
# if index >= len(nums) or cnt >= 3:
# return
# nowlist.append(nums[index])
# go(nums, index+1, sum+nums[index], cnt+1, nowlist[:])
# nowlist.pop()
# go(nums, index+1, sum, cnt, nowlist[:])
#
# go(nums, 0, 0, 0, [])
#
# return answer
answer = []
nums.sort()
for i in range(len(nums)-2):
# 중복 수 제거
if i>0 and nums[i] == nums[i-1]:
continue
start, end = i+1, len(nums)-1
while start<end:
sum = nums[i] + nums[start] + nums[end]
if sum < 0:
start+=1
elif sum > 0:
end-=1
else:
answer.append([nums[i], nums[start], nums[end]])
while start < end and nums[start] == nums[start+1]:
start+=1
while start < end and nums[end] == nums[end-1]:
end-=1
start+=1
end-=1
return answer
a = Solution()
a.threeSum([-1,0,1,2,-1,-4])
|
[
"david8318@naver.com"
] |
david8318@naver.com
|
9993524839c4476d8027fa33d26ed3d1e85f8d24
|
df4b9113ce0976e422b090d209fdd8a98064a4c9
|
/tests/test_update.py
|
9e7e571f37985bdc2f3dc7ce5c6ced14951aba35
|
[
"MIT"
] |
permissive
|
gonzalo123/dbutils
|
6161e0c6643fd52dd05d2c32c39238154e2f9fcb
|
8a9c7ad7a864da52b0a1fe40e3510a9140210e89
|
refs/heads/master
| 2023-06-08T03:13:37.587338
| 2021-06-30T13:49:28
| 2021-06-30T13:49:28
| 309,414,147
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 417
|
py
|
from tests.sql import SQL_SINGLE
def test_update_one_row(db):
assert 1 == db.insert(
table='users',
values={'email': 'user1@email.com', 'name': 'user1'})
assert 1 == db.update(
table='users',
data={'name': 'xxxx'},
identifier={'email': 'user1@email.com'},
)
assert 'xxxx' == db.fetch_one(
sql=SQL_SINGLE,
where={'email': 'user1@email.com'})
|
[
"gonzalo123@gmail.com"
] |
gonzalo123@gmail.com
|
3d13c7a64cd6189498981d30c49c6647587d9fec
|
a95f0dcb34dedcabc60c5e973a88ee128e621e50
|
/L4/Q3.py
|
94f70beb3194bf3ccaed331c549cb20711b6a87b
|
[] |
no_license
|
jacky0313/D002-2019
|
4c55af62ca3b981d15a18daa253a453d458938ba
|
7671d09cb987612249727815e498e05b94831620
|
refs/heads/master
| 2020-06-30T21:51:27.963283
| 2019-08-16T04:01:51
| 2019-08-16T04:01:51
| 200,960,608
| 0
| 0
| null | 2019-08-15T02:33:29
| 2019-08-07T02:54:07
|
Python
|
UTF-8
|
Python
| false
| false
| 217
|
py
|
def factor(x):
result=[]
x = int(x)
for i in range(1,x+1):
if x%i == 0 :
print('%d is a foctor of %d' % (i,x))
result.append(i)
return result
print(factor(40))
|
[
"noreply@github.com"
] |
noreply@github.com
|
243c82d2ac9b47f93c80d5616f675f92d84dc0ea
|
3a69696a2c5debfb24dfacffa6d3b0e311d0375e
|
/src/tests/test_launcher.py
|
80eca716e26f24ebe46b392f600f9e5be84878bf
|
[
"Apache-2.0"
] |
permissive
|
Build-The-Web/bootils
|
7aeab92debc20258d645a70f5595738653ef46a7
|
8ee88f4d0583352f58fbb89c018e7caef8f07ce3
|
refs/heads/master
| 2021-01-17T09:14:18.317535
| 2016-06-03T13:38:01
| 2016-06-03T13:38:01
| 32,890,374
| 3
| 2
|
Apache-2.0
| 2018-03-04T20:46:22
| 2015-03-25T20:57:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,951
|
py
|
# *- coding: utf-8 -*-
# pylint: disable=wildcard-import, missing-docstring, no-self-use, bad-continuation
# pylint: disable=invalid-name, redefined-outer-name, too-few-public-methods
""" Test «some_module».
"""
# Copyright © 2015 1&1 Group <btw-users@googlegroups.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import os
import getpass
import pytest
from bootils import launcher
def test_signal_name_to_int():
assert launcher.signal2int(1) == 1
assert launcher.signal2int('1') == 1
assert launcher.signal2int('pipe') == 13
assert launcher.signal2int('PIPE') == 13
assert launcher.signal2int('sigPIPE') == 13
def test_signal2int_with_bad_name():
with pytest.raises(ValueError):
launcher.signal2int('foobar')
def test_signal2int_with_bad_type():
with pytest.raises(ValueError):
launcher.signal2int(None)
def test_uid_of_root():
assert launcher.check_uid(0) == 0
assert launcher.check_uid('0') == 0
assert launcher.check_uid('root') == 0
def test_uid_of_current_user():
uid_home = os.stat(os.path.expanduser('~')).st_uid
assert launcher.check_uid(getpass.getuser()) == uid_home
def test_gid_of_root():
assert launcher.check_gid(0) == 0
assert launcher.check_gid('0') == 0
assert launcher.check_gid('root') == 0
def test_gid_of_users():
assert launcher.check_gid('users') > 0
|
[
"jh@web.de"
] |
jh@web.de
|
23f1310448fdd93741a876ef5a68b423f2781715
|
c05c39ff844e37843083bc7e6cc6fe83b377c723
|
/movieApp/models.py
|
0950a53823c304e26177d970d146482533ba6c2a
|
[] |
no_license
|
hn-h/movie-heroku
|
a4f262519f7434cb9af5cbd32f6043097abcf606
|
eeadecd73c81e7db0be09eff0375997f14a35790
|
refs/heads/master
| 2023-06-04T23:27:46.763966
| 2023-05-27T00:27:40
| 2023-05-27T00:27:40
| 380,352,406
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 977
|
py
|
from django.db import models
class Movie(models.Model):
"""Movie model which contains the interested data fields of the movie"""
movieID=models.CharField(max_length=30)
name=models.CharField(max_length=30)
year=models.IntegerField()
genres=models.CharField(max_length=30)
image=models.URLField(max_length=200)
link=models.URLField(max_length=200)
plot=models.TextField()
rate=models.FloatField()
votes=models.IntegerField()
def __str__(self):
return self.name
class MovieIDs(models.Model):
"""IDs model is used to save IDs of the filtered movies"""
movId=models.CharField(max_length=30)
def __str__(self):
return self.movId
# class MovieDB(models.Model):
# movieID=models.CharField(max_length=30)
# year=models.IntegerField()
# genres=models.CharField(max_length=30)
# rate=models.FloatField()
# votes=models.IntegerField()
#
# def __str__(self):
# return self.movieID
|
[
"mr.si7s@hotmail.com"
] |
mr.si7s@hotmail.com
|
0ce02c8bde91f17e1b3ba3634015ce0ff8705b24
|
09d76dda74e7f8ae10a0fbfb4dce7ce1ab6b89b2
|
/sql-alchemy/resources/item.py
|
dc1fc3d56cbff537556d35c8516038d433258f0a
|
[] |
no_license
|
kotternaj/django-fullstack
|
5b02fdd5ae90b5fb7c2bdd0afd1b515533694faa
|
e76b671b4aed623d85681d55f5fe239e75cb499f
|
refs/heads/master
| 2020-03-19T08:45:41.544648
| 2018-06-25T17:47:17
| 2018-06-25T17:47:17
| 136,232,689
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,062
|
py
|
from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
from models.item import ItemModel
class Item(Resource):
parser = reqparse.RequestParser()
parser.add_argument('price',
type=float,
required=True,
help="This field cannot be left blank")
parser.add_argument('store_id',
type=int,
required=True,
help="Every item needs a store id")
@jwt_required()
def get(self,name):
item = ItemModel.find_by_name(name)
if item:
return item.json()
return {'message': 'Item not found'},404
def post(self,name):
if ItemModel.find_by_name(name): # or self.find_by_name
return {'message': 'An item with this name {} already exists'.format(name)},400
data = Item.parser.parse_args()
item = ItemModel(name, **data) # for the sake of brevity we
# changed from item = ItemModel(name, data['price', data['store_id']])
try:
ItemModel.save_to_db(item)
except:
return {'message': 'An error occured inserting the item.'}, 500
return item.json(), 201
def delete(self,name):
item = ItemModel.find_by_name(name)
if item:
item.delete_from_db()
return {'message': 'Item has been deleted'}
def put(self,name):
data = Item.parser.parse_args()
item = ItemModel.find_by_name(name)
if item is None:
item = ItemModel(name, **data) # see line 31
else:
item.price = data['price'] #store_id can be added here too or sub both with (**data)
item.save_to_db()
return item.json()
class ItemList(Resource):
def get(self):
return {'items': [item.json() for item in ItemModel.query.all()]}
# return {'items': list(map(lambda x: x.json(), ItemModel.query.all()))}
# alternative way using lambda
|
[
"john.d.reddock@gmail.com"
] |
john.d.reddock@gmail.com
|
c8a41a9fd2264a175999475b482f5c7509481456
|
353def93fa77384ee3a5e3de98cfed318c480634
|
/.history/week01/homework02/maoyanspiders/maoyanspiders/spiders/movies_20200627214427.py
|
4ddb627414c18159133e722575fe0e29e8aa2e28
|
[] |
no_license
|
ydbB/Python001-class01
|
d680abc3ea1ccaeb610751e3488421417d381156
|
ad80037ccfc68d39125fa94d2747ab7394ac1be8
|
refs/heads/master
| 2022-11-25T11:27:45.077139
| 2020-07-19T12:35:12
| 2020-07-19T12:35:12
| 272,783,233
| 0
| 0
| null | 2020-06-16T18:28:15
| 2020-06-16T18:28:15
| null |
UTF-8
|
Python
| false
| false
| 932
|
py
|
# -*- coding: utf-8 -*-
import scrapy
from maoyanspiders.items import MaoyanspidersItem
# import xlml.etree
from bs4 import BeautifulSoup as bs
class MoviesSpider(scrapy.Spider):
name = 'movies'
allowed_domains = ['maoyan.com']
start_urls = ['http://maoyan.com/board/4']
# def parse(self, response):
# pass
def start_requests(self):
url = f'https://maoyan.com/board/4'
print(url)
yield scrapy.Request(url=url,callback=self.parse)
def parse(self, response):
soup = bs(response.text,'html.parser')
print(soup.text)
return soup
for i in soup.find_all('div',attrs={'class' : 'movie-item-info'}):\
item = MaoyanspidersItem()
link = i.get('href'.text)
item['films_name'] = 'name'
item['release_time'] = "tiome"
yield scrapy.Request(url=url,callback=self.parse1)
return item
|
[
"31039587+ydbB@users.noreply.github.com"
] |
31039587+ydbB@users.noreply.github.com
|
2ba56d300f998d10473103a90bf6e35b36c49fec
|
a37b756e34fc39c1237fc68997dbef77df9fa6fc
|
/keras/keras56-61/keras59_3_save_npy.py
|
610895f0e6684a04202781a5817cb6ccdfb0c08d
|
[] |
no_license
|
jvd2n/ai-study
|
e20e38493ad295940a3201fc0cc8061ca9052607
|
a82f7c6d89db532f881c76b553b5ab3eea0bdd59
|
refs/heads/main
| 2023-08-06T03:24:39.182686
| 2021-10-06T14:41:01
| 2021-10-06T14:41:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,917
|
py
|
import numpy as np
from tensorflow.keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(
rescale=1./255,
horizontal_flip=True,
vertical_flip=True,
width_shift_range=0.1,
height_shift_range=0.1,
rotation_range=5,
zoom_range=1.2,
shear_range=0.7,
fill_mode='nearest'
)
test_datagen = ImageDataGenerator(rescale=1./255)
xy_train = train_datagen.flow_from_directory(
'../data/brain/train',
target_size=(150, 150),
batch_size=200, # xy_train[0]의 ,5(batch_size) 크기로 생성
class_mode='binary'
)
# Found 160 images belonging to 2 classes.
xy_test = test_datagen.flow_from_directory(
'../data/brain/test',
target_size=(150, 150),
batch_size=200, # xy_train[0]의 ,5(batch_size) 크기로 생성
class_mode='binary',
shuffle=True
)
# Found 120 images belonging to 2 classes.
print(xy_train)
# <tensorflow.python.keras.preprocessing.image.DirectoryIterator object at 0x000002C3A9DB9780>
print(xy_train[0][0]) # x value
print(xy_train[0][1]) # y value
# print(xy_train[0][2]) # None
print(xy_train[0][0].shape, xy_train[0][1].shape) # (160, 150, 150, 3) (160,)
print(xy_test[0][0].shape, xy_test[0][1].shape) # (120, 150, 150, 3) (120,)
# print(xy_train[31][1]) # 마지막 배치 y. 총 32장 * batchsize = 160장의 사진임을 알 수 있다.
# print(xy_train[32][1]) # None
# print(type(xy_train)) # <class 'tensorflow.python.keras.preprocessing.image.DirectoryIterator'>
# print(type(xy_train[0])) # <class 'tuple'>
# print(type(xy_train[0][0])) # <class 'numpy.ndarray'>
# print(type(xy_train[0][1])) # <class 'numpy.ndarray'>
np.save('./_save/_npy/k59_3_train_x.npy', arr=xy_train[0][0])
np.save('./_save/_npy/k59_3_train_y.npy', arr=xy_train[0][1])
np.save('./_save/_npy/k59_3_test_x.npy', arr=xy_test[0][0])
np.save('./_save/_npy/k59_3_test_y.npy', arr=xy_test[0][1])
|
[
"juhnmayer@gmail.com"
] |
juhnmayer@gmail.com
|
0189d5ce409813dd2d8a5ad0d90d568a0a164afe
|
2c2a9f362c7949cbfa989cf79527e63b1616596b
|
/[WEB][누뗄라][talk2slides]/record/naver_api_utils.py
|
3b6787a75fd9512e9701adc02beb933769c4d826
|
[] |
no_license
|
blesk011/AI-Burning-Day_Result
|
5fa1e3c0029293d8d1d8661ffd4cb1f210aee7e1
|
98d19f4fe043e28158efd94da12d452be77f63d6
|
refs/heads/master
| 2022-10-09T02:50:22.849979
| 2020-02-15T04:11:28
| 2020-02-15T04:11:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,209
|
py
|
import sys
import requests
import io
import getpass
def parse_audio(client_id, client_secret, audio_bytes, lang="Kor"):
# 언어 코드 ( Kor, Jpn, Eng, Chn )
url = "https://naveropenapi.apigw.ntruss.com/recog/v1/stt?lang=" + lang
data = io.BytesIO(audio_bytes) # made it to read in bytes object
headers = {
"X-NCP-APIGW-API-KEY-ID": client_id,
"X-NCP-APIGW-API-KEY": client_secret,
"Content-Type": "application/octet-stream"
}
response = requests.post(url, data=data, headers=headers)
rescode = response.status_code
if(rescode == 200):
print (response.json())
return response.json()['text']
else:
print("Error : " + response.text)
return None
def generate_image(search_text, client_id, client_secret):
url = "https://openapi.naver.com/v1/search/image"
headers = {
"X-Naver-Client-Id": client_id,
"X-Naver-Client-Secret": client_secret
}
response = requests.get(url, headers=headers,
params={
'query':search_text.encode('utf8')
})
rescode = response.status_code
if(rescode == 200):
return "\n![" + response.json()['items'][0]['title'] + ']' + '({})\n'.format(response.json()['items'][0]['link'])
# print('<!-- .slide: data-background="{}" -->'.format(response.json()['items'][0]['link']))
else:
print("Error : " + response.text)
return None
def generate_background(search_text, client_id, client_secret):
url = "https://openapi.naver.com/v1/search/image"
headers = {
"X-Naver-Client-Id": client_id,
"X-Naver-Client-Secret": client_secret
}
response = requests.get(url, headers=headers,
params={
'query': search_text.encode('utf8')
})
rescode = response.status_code
if (rescode == 200):
return '\n<!-- .slide: data-background="{}" -->\n'.format(response.json()['items'][0]['link'])
else:
print("Error : " + response.text)
return None
|
[
"noreply@github.com"
] |
noreply@github.com
|
dcb5697c77e97b629e00c40b32d677c450dc69c1
|
4ba3f44a648b0c08565c281e9ae49ec4ec5c0fd0
|
/tests/test_score_scraper.py
|
1d2cf7d84b28b75b49d323b724c286c76e09464a
|
[] |
no_license
|
UncleMarko922/ScoreScraper
|
ebd93cce83515dfe2a61ef4f3f13d4f8cb451471
|
b93ad80eb5ccea4e913777b5db848b6ac7483bb6
|
refs/heads/master
| 2020-04-16T06:07:05.088571
| 2019-01-12T01:38:01
| 2019-01-12T01:38:01
| 165,333,476
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 597
|
py
|
from score_alerts.scrapers.score_scraper import parse_results
def test_announce_results():
expected = True
mock_team_data = (['team 1', 'team 2'], ['300', '0'])
result = parse_results(mock_team_data)
assert result == expected
def test_announce_results_no_scores():
expected = False
mock_team_data = (['team 1', 'team 2'], [])
result = parse_results(mock_team_data)
assert result == expected
def test_announce_results_no_teams():
expected = False
mock_team_data = ([], ['300', '0'])
result = parse_results(mock_team_data)
assert result == expected
|
[
"markopavlovic922@outlook.com"
] |
markopavlovic922@outlook.com
|
222cb6f389f53e330640ae89c5008911a6264ddc
|
52cdf6669748d6f7b0572ee8f6308f6291ad6425
|
/macros/DIFFICULTY_LEVEL.py
|
1b5ba0a1635b25af65b8189c7ac54658eac53b00
|
[] |
no_license
|
Bedpaw/roguelike-game
|
d92151d95c0ebec82cc340a46484fefeded2b408
|
3ecd1f6826d34f87cf9a2eb9e8d470adc2f7a4c2
|
refs/heads/master
| 2022-11-26T00:28:30.743710
| 2020-08-05T16:49:15
| 2020-08-05T16:49:15
| 285,349,930
| 0
| 0
| null | 2020-08-05T16:47:52
| 2020-08-05T16:44:52
|
Python
|
UTF-8
|
Python
| false
| false
| 48
|
py
|
IMPOSSIBLE = 2
HARD = 1.2
NORMAL = 1
EASY = 0.8
|
[
"bedpaw97@gmail.com"
] |
bedpaw97@gmail.com
|
bd15bafc40367b2af7347e1b7292f75b5842ba92
|
859297026361fd0cbd519f4d9bd862a487f69289
|
/check_n7k_route.py
|
0e1b5bb61a39f3dbbe4a47edd4807cbd218271a3
|
[
"Apache-2.0"
] |
permissive
|
xmanta/pythontesting
|
d14182923608aa4d6156ba173e6ef6f3940c9d28
|
d2ed5be63de344cb689fe00c04cf75f89d91f28a
|
refs/heads/master
| 2022-11-17T13:27:00.941022
| 2020-07-02T00:36:37
| 2020-07-02T00:36:37
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,535
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/5/27 15:44
# @Author : Marion
import paramiko
import re
class CheckN7kRoute:
def __init__(self, ip, username, passwd, port=22):
self.ip = ip
self.user = username
self.passwd = passwd
self.port = port
def conn(self):
'''创建ssh连接'''
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(self.ip, port=self.port, username=self.user, password=self.passwd, look_for_keys=False,
timeout=None, )
return ssh
except Exception as e:
return e
def get_resutl(self, command):
'''执行命令获取结果'''
ssh = self.conn()
if isinstance(ssh, paramiko.SSHClient):
stdin, stdout, stderr = ssh.exec_command(command)
result = stdout.read()
ssh.close()
return result
else:
print(type(ssh), ssh)
def parse_result(self):
'''从命令中获取的结果,解析 *via 10.0.7.121, '''
pattern = '(?<=via\s)(?:\d+.){3}\d+'
regx = re.compile(pattern)
cmd = 'show ip route 0.0.0.0'
tmp_result = self.get_resutl(cmd).strip().decode()
result = regx.search(tmp_result)
if result:
return result.group()
if __name__ == '__main__':
a = CheckN7kRoute('x.x.x.x', username='xxx', passwd='xxx')
print(a.parse_result())
|
[
"yangc-k@glodon.com"
] |
yangc-k@glodon.com
|
297233c28a62383162933f0b8921d932ebce1b16
|
a48524f7fec6168096cff25023f302cd1f59d287
|
/deploy/players_by_date_with_stats.py
|
8ad01ef47be3ade45c03d66663be3099e34bdcf4
|
[] |
no_license
|
Mastakey/python-hockeyscraper
|
b3cb0987b505afd2fd499c744719d29cc90e1f61
|
8774d19e713ad7c0fabf4aef0286c67bb75190ad
|
refs/heads/master
| 2021-01-10T16:16:58.858300
| 2015-11-09T18:53:43
| 2015-11-09T18:53:43
| 45,067,460
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,225
|
py
|
from __future__ import division
from operator import itemgetter
from lib.SQ3Reader import SQ3Reader
from jinja2 import Environment, PackageLoader
env = Environment(loader=PackageLoader('tmpl', ''))
template = env.get_template('body/players_by_date_with_stats.html')
import pdb
import time
import os
import math
def getDkdata(csv):
f = open(csv, 'r')
dk_lines = f.readlines()
count = 0
dkdata = []
#print len(salary_lines)
for line in dk_lines:
#ignore header line
if count > 0:
mydict = {}
mycsvlist = line.split(',')
mydict['position'] = mycsvlist[0].replace("\"", "")
mydict['player'] = mycsvlist[1].replace("\"", "")
mydict['salary'] = mycsvlist[2].replace("\"", "")
#print position
dkdata.append(mydict)
count += 1
return dkdata
def getPlayerSalary(player, dkdata):
for data in dkdata:
if player == data['player']:
return data['salary']
def getPlayerPosition(player, dkdata):
for data in dkdata:
if player == data['player']:
return data['position']
def getSeconds(myStr):
minutes = int(myStr.split(':')[0])
seconds = int(myStr.split(':')[1])
return minutes*60+seconds
def getTimeStr(seconds):
myMin = int(seconds/60)
mySec = seconds % 60
return str(myMin)+':'+str(mySec)
def getPlayerSummaryStats(player, team, player_data):
player_stats = {}
GP = 0
goals = 0
assists = 0
points = 0
shots = 0
time_on_ice_s = 0
blocks = 0
shp = 0
so_goals = 0
dk_points = 0
dk_points_per_min = 0
dk_sd_sum = 0
dk_sd = 0
for data in player_data:
GP += 1
goals += data['goals']
assists += data['assists']
points += data['points']
shots += data['shots']
time_on_ice_s += getSeconds(data['time_on_ice'])
temp_dk_points = getDkpoints(data['goals'], data['assists'], data['shots'], 0, 0)
dk_points += temp_dk_points
dk_sd_sum += temp_dk_points**2
#player_stats['player'] = player_data['player']
#player_stats['team'] = player_data['team']
dk_sd = math.sqrt(dk_sd_sum/GP)
time_on_ice_s = int(time_on_ice_s/GP)
fppg = dk_points/GP
player_stats['player'] = player
player_stats['team'] = team
player_stats['GP'] = GP
player_stats['goals'] = goals
player_stats['assists'] = assists
player_stats['points'] = points
player_stats['shots'] = shots
player_stats['time_on_ice'] = getTimeStr(time_on_ice_s)
player_stats['time_on_ice_s'] = time_on_ice_s
player_stats['blocks'] = blocks
player_stats['shp'] = shp
player_stats['so_goals'] = so_goals
player_stats['dk_points'] = dk_points
player_stats['fppg'] = fppg
player_stats['variance'] = dk_sd
return player_stats
def getPlayersFromTeam(player_list, teamstr):
team_player_list = []
for player in player_list:
if player['team'] == teamstr:
team_player_list.append({'player':player['player'],'team':player['team']})
return team_player_list
def getDkpoints(goals, assists, shots, blocks, shp):
dk_points = 0
dk_points += goals * 3.0
dk_points += assists * 2
dk_points += shots * 0.5
dk_points += blocks * 0.5
dk_points += shp * 0.2
if goals > 2:
dk_points += 1.2
return dk_points
def outputToTemplate(mylist, myFile):
f = open(myFile, "w")
f.write(template.render(player_stats=mylist))
myYear = '2016' #2015-2016 season
myDate = '2015-10-27' #current date
#GET All players for the year
myPlayers = []
sq3reader = SQ3Reader('db/boxscores.db', {'logging':'on'})
player_list = sq3reader.executeQueryDict("""SELECT distinct d.player, d.team
from boxscore_data d, boxscore b WHERE d.boxscore = b.id
AND b.season='"""+myYear+"""'
""")
#print len(player_list)
boxscores = sq3reader.executeQueryDict("""SELECT id, vteamstr, hteamstr
from boxscore where gamedate='"""+myDate+"""'
""")
#print len(boxscores)
todays_players = []
for boxscore in boxscores:
#print boxscore['vteamstr']+" VS "+boxscore['hteamstr']
vteam_players = getPlayersFromTeam(player_list, boxscore['vteamstr'])
hteam_players = getPlayersFromTeam(player_list, boxscore['hteamstr'])
#print len(vteam_players)
todays_players.extend(vteam_players)
todays_players.extend(hteam_players)
#print len(todays_players)
player_stats = []
center_stats = []
wing_stats = []
defence_stats = []
#DK data
dkdata = getDkdata('input/10272015_DKSalaries.csv')
for p in todays_players:
player = {}
playerName = p['player'].replace('\'', '\'\'')
salary = getPlayerSalary(p['player'], dkdata)
position = getPlayerPosition(p['player'], dkdata)
player_data = sq3reader.executeQueryDict("""SELECT d.id, d.boxscore,
d.team, d.player, d.shots, d.goals, d.assists, d.points,
d.time_on_ice FROM boxscore_data d, boxscore b
WHERE d.player='"""+playerName+"""' AND
b.id=d.boxscore AND b.season='2016';
""")
data = getPlayerSummaryStats(p['player'], p['team'], player_data)
data['position'] = position
data['salary'] = salary
#print type(salary)
if type(salary) == str:
data['fppgsal'] = (data['fppg']*1000)/float(salary)
else:
data['fppgsal'] = data['fppg']
if salary != None:
player_stats.append(data)
if position == "LW" or position == "RW":
wing_stats.append(data)
elif position == "C":
center_stats.append(data)
elif position == "D":
defence_stats.append(data)
player_stats_sorted = sorted(player_stats, key=itemgetter('time_on_ice_s'), reverse=True)
center_stats_sorted = sorted(center_stats, key=itemgetter('time_on_ice_s'), reverse=True)
wing_stats_sorted = sorted(wing_stats, key=itemgetter('time_on_ice_s'), reverse=True)
defence_stats_sorted = sorted(defence_stats, key=itemgetter('time_on_ice_s'), reverse=True)
outputToTemplate(center_stats_sorted, 'out/10272015_center.html')
outputToTemplate(wing_stats_sorted, 'out/10272015_wing.html')
outputToTemplate(defence_stats_sorted, 'out/10272015_defence.html')
print template.render(player_stats=player_stats_sorted)
|
[
"HanK@corp.espn.pvt"
] |
HanK@corp.espn.pvt
|
45b2bf92770339b2ab99a1f99e248e3ef1a3feaf
|
b01d704733d042704bb5f2f14cad6eda58f2fcd9
|
/mysite/polls/admin.py
|
937cd67da159040345c6e8f6951cf55ea5858445
|
[] |
no_license
|
TarasTD/django_test
|
093126d22a80c3c684c166403a2dfa24e4a23c44
|
c0cbfa5364d493b94c67cff0a2f8385e5b86f160
|
refs/heads/master
| 2020-05-18T10:00:01.854105
| 2013-11-08T16:43:24
| 2013-11-08T16:43:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 546
|
py
|
from django.contrib import admin
from polls.models import Poll, Choice
class ChoiceInLine(admin.TabularInline):
model = Choice
extra = 3
class PollAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question']}),
('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
]
inlines = [ChoiceInLine]
list_display = ('question','pub_date', 'was_published_recently')
list_filter = ['pub_date']
search_fields = ['question']
date_hierarchy = 'pub_date'
admin.site.register(Poll, PollAdmin)
|
[
"tarasdmytrus@gmail.com"
] |
tarasdmytrus@gmail.com
|
1ed91a8e5264f4a9bf8af550d1865238475211f4
|
a8a5dca8dd935ef62030eba899da6b811ef97454
|
/SENG590/lib/pos_extractor.py
|
0a919acd5ce3e886d5c59b61365dd98fc2b7bda2
|
[] |
no_license
|
jordanell/School
|
5d2f1430c374c4ca54d02eb3de9d3b9e56279d79
|
2bdda6f4d366862ad541a3d38ac751ff41e12fbb
|
refs/heads/master
| 2016-09-11T06:55:19.599619
| 2014-04-15T00:50:08
| 2014-04-15T00:50:08
| 2,425,861
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 769
|
py
|
from topia.termextract import tag
from topia.termextract import extract
class pos_extractor:
tagger = tag.Tagger()
extractor = extract.TermExtractor()
text = None
def __init__(self, text):
self.tagger = tag.Tagger()
self.tagger.initialize()
self.extractor = extract.TermExtractor(self.tagger)
self.text = text
if self.text is not None:
self.tagger.tokenize(self.text)
def set_text(self, text):
self.text = text
self.tagger.tokenize(self.text)
def extract_pos(self):
if self.text is not None:
return self.tagger(self.text)
return None
def main():
pos = pos_extractor("I think that setting mind control to 9 mana would be good for preists.")
print pos.extract_pos()
print pos.extract_keywords()
if __name__ == '__main__':
main()
|
[
"jordan.ell7@gmail.com"
] |
jordan.ell7@gmail.com
|
2fa1259867d2b816faef8df2d0a6e2011167016f
|
308573217aefbd4b467847dc161e16c246808647
|
/tools/member-payment-report.py
|
7299dc16824a8dd6937ed46577d7e91fc4631967
|
[] |
no_license
|
Denhac/ApplicationAPI
|
bc8427e924b726a7bba55ecff90e50d09d3c7bed
|
9c4435f4f0ea288cf53c098108e8c4b49b145de8
|
refs/heads/master
| 2020-12-25T16:54:25.398814
| 2017-07-19T12:23:00
| 2017-07-19T12:23:00
| 26,699,832
| 5
| 1
| null | 2015-10-25T21:29:46
| 2014-11-16T01:18:02
|
Python
|
UTF-8
|
Python
| false
| false
| 626
|
py
|
#!/usr/bin/python
# Python includes
import sys
# Our own includes go here
# insert() makes our path the first searched entry, as opposed to append()
sys.path.insert(0, '/var/www/denhacpkg')
from DenhacDbLibrary import DenhacMemberDb
def xstr(s):
if s is None:
return ''
return str(s)
def istr(s):
if s is None:
return '0'
return str(s)
memberDb = DenhacMemberDb()
print "ID, Name, Balance"
for member in memberDb.getActiveMembers():
bal = memberDb.getBalance(member['id'])
print xstr(member['id']) + ', ' + xstr(member['firstName']) + ' ' + xstr(member['lastName']) + ', ' + istr(bal)
|
[
"anthony.stonaker@gmail.com"
] |
anthony.stonaker@gmail.com
|
9de6ca0a08f48294cb58547043edba688eeb8506
|
9d7cc6f6a8190a7ac6e8924db8a11ac64ba2c1ad
|
/trydjango/urls.py
|
e6d4e3bd981e52c8a52256e0e58e27fc7972b071
|
[] |
no_license
|
ManasVardhan/SampleDjangoProject
|
fa8bee546d235a256ff18486cb5e4ffcc5aacbbd
|
d683a260c4ed2a7feff7f168b645116c2c977943
|
refs/heads/main
| 2023-03-26T19:17:56.587921
| 2021-03-27T10:56:44
| 2021-03-27T10:56:44
| 352,048,789
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,093
|
py
|
"""trydjango URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from pages.views import *
from dataentry.views import gpaView, createView
urlpatterns = [
path('admin/', admin.site.urls),
path('', home_view, name='home'),
path('about', about_view, name='about'),
path('child', child_view, name='child'),
path('getgrades', gpaView, name='gpa'),
path('getgrades__/<int:id>/', createView, name='gpa_as_form')
]
|
[
"noreply@github.com"
] |
noreply@github.com
|
8276ea44c6c7b1aee03afa234cd46dcbdb7bda79
|
4abaf70e97cfe05eac6a9947121f553a23c3b8a3
|
/Day 2/solution2.py
|
d6b203dfa0c21d7e2d3c0826d459d7956924d0a9
|
[] |
no_license
|
anthonynoakes/AoC-2017
|
07f446164f695181094fa9f6885ded4876f2a06e
|
2141033ade126b14eda0e2f38b9d3ff0f20b0578
|
refs/heads/master
| 2021-08-30T22:49:29.019306
| 2017-12-19T17:56:44
| 2017-12-19T17:56:44
| 113,075,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 656
|
py
|
#For each row
# find the only two numbers in each row where one evenly divides the other
# find those numbers on each line, divide them, and add up each line's result
import re
import sys
input = open('Day 2\input.txt', 'r').readlines()
total = 0
for line in input:
x = 0
y = 0
segs = re.split(r'\t', line)
for i, nseg in enumerate(segs):
n = int(nseg)
for p, mseg in enumerate(segs):
if i != p:
m = int(mseg)
if(m / n > 0 and m % n == 0):
x = n
y = m
a = x if x > y else y
b = x if x < y else y
total += a / b
print total
|
[
"asn.noakes@gmail.com"
] |
asn.noakes@gmail.com
|
a1fc96319df2096c7e0f64f5737540c26e0037f8
|
7eed749ce5278f857dca84e24b8013c57bb09329
|
/blog/migrations/0001_initial.py
|
6e413e99188e0560a9c5c908bf5c951fe25df3a1
|
[] |
no_license
|
zdomcia/djangogirls
|
5ab79125255b575ef0ba90e5efb64d993337950c
|
4f3a82e190f8c50ba9bc2f5c69f020d8ab82587f
|
refs/heads/master
| 2021-04-24T17:58:42.150781
| 2014-11-11T16:10:57
| 2014-11-11T16:10:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 976
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True, serialize=False)),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(null=True, blank=True)),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
|
[
"dominikazajac00@gmail.com"
] |
dominikazajac00@gmail.com
|
8ba2cbaceeb6ecd1f79a0aaa8ad6322d5c9d3954
|
c489a910d1533f0e03a86f3cc483fdba352dc481
|
/tests/platform_tests/cli/test_show_chassis_module.py
|
a16ca7d6baf021ba84013997b905255f0fcb61c9
|
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
chenkelly/sonic-mgmt
|
1b6dab6e34dac2ac8cb475c4ded1329e53ad31d4
|
7bf848d84af017b0275f75c3a383b6fc63f0ab43
|
refs/heads/master
| 2023-03-08T11:14:22.071818
| 2023-03-02T02:26:05
| 2023-03-02T02:26:05
| 212,235,644
| 0
| 1
|
NOASSERTION
| 2019-10-02T01:53:59
| 2019-10-02T01:53:58
| null |
UTF-8
|
Python
| false
| false
| 3,664
|
py
|
import logging
import pytest
from tests.common.helpers.assertions import pytest_assert
from util import get_field_range, get_fields, get_skip_mod_list
logger = logging.getLogger('__name__')
pytestmark = [
pytest.mark.topology('t2')
]
CMD_SHOW_CHASSIS_MODULE = "show chassis modules"
def parse_chassis_module(output, expected_headers):
assert len(output) > 2
f_ranges = get_field_range(output[1])
headers = get_fields(output[0], f_ranges)
for header_v in expected_headers:
pytest_assert(header_v in headers, "Missing header {}".format(header_v))
result = {}
for a_line in output[2:]:
field_val = get_fields(a_line, f_ranges)
mod_idx = field_val[0]
result[mod_idx] = {}
cur_field = 1
for a_header in headers[1:]:
result[mod_idx][a_header] = field_val[cur_field]
cur_field += 1
return result
def test_show_chassis_module_status(duthosts, enum_rand_one_per_hwsku_hostname):
cmd = " ".join([CMD_SHOW_CHASSIS_MODULE, "status"])
logger.info("verifying output of cli command {}".format(cmd))
duthost = duthosts[enum_rand_one_per_hwsku_hostname]
exp_headers = ["Name", "Description", "Physical-Slot", "Oper-Status", "Admin-Status"]
skip_mod_list = get_skip_mod_list(duthost)
output = duthost.command(cmd)
res = parse_chassis_module(output['stdout_lines'], exp_headers)
# by default will assume all modules should be shown online except in skip_module_list
for mod_idx in res.keys():
if mod_idx in skip_mod_list:
pytest_assert(res[mod_idx]['Oper-Status'] == 'Empty',
"Oper-status for slot {} should be Empty but it is {}".format(
mod_idx, res[mod_idx]['Oper-Status']))
else:
pytest_assert(res[mod_idx]['Oper-Status'] == 'Online',
"Oper-status for slot {} should be Online but it is {}".format(
mod_idx, res[mod_idx]['Oper-Status']))
def test_show_chassis_module_midplane_status(duthosts, enum_rand_one_per_hwsku_hostname):
"""
@summary: Verify output of `show chassis-module midplane-status`
"""
cmd = " ".join([CMD_SHOW_CHASSIS_MODULE, "midplane-status"])
logger.info("verifying output of cli command {}".format(cmd))
expected_headers = ["Name", "IP-Address", "Reachability"]
duthost = duthosts[enum_rand_one_per_hwsku_hostname]
output = duthost.command(cmd)
res_mid_status = parse_chassis_module(output['stdout_lines'], expected_headers)
mod_key= ['line-cards', 'supervisor']
skip_mod_list = get_skip_mod_list(duthost, mod_key)
for mod_idx in res_mid_status:
mod_mid_status = res_mid_status[mod_idx]['Reachability']
if mod_idx not in skip_mod_list:
pytest_assert(mod_mid_status == "True",
"midplane reachability of line card {} expected true but is {}".format(mod_idx,
mod_mid_status))
else:
# There are cases where the chassis is logically divided where some LCs belongs to another chassis and needs to be skipped
# and for those cases we should not assume if skipped means it must be offline.
if "LINE-CARD" in mod_idx:
logger.info("skip checking midplane status for {} since it is on skip_mod_list".format(mod_idx))
else:
pytest_assert(mod_mid_status == "False",
"reachability of {} expected false but is {}".format(mod_idx, mod_mid_status))
|
[
"noreply@github.com"
] |
noreply@github.com
|
f9d1afa2bf4f71a2e8882d5141f1231acf40c7a2
|
afd79a82801dbe91071a3b72aa0f27daf3e61378
|
/settings/__init__.py
|
8cf8dd9e3b3dca5a239012a7f1d318173b0ca066
|
[] |
no_license
|
crazyministr/testForm
|
668ff9e051f797d6d53a20baf87e7c782923edce
|
fccf7a8f7659cfc799884142084787b7226e8188
|
refs/heads/master
| 2016-09-09T21:17:32.729080
| 2015-03-18T17:35:04
| 2015-03-18T17:35:04
| 32,471,626
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,550
|
py
|
"""
Django settings for testForm project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
here = lambda *x: os.path.join(os.path.abspath(os.path.dirname(__file__)), *x)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cye+26ram(du*q50b&(8v=w6x-lg$%6@%6p&q24d6gw(^fu462'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'main',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'urls'
WSGI_APPLICATION = 'wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = here('..', 'nginx_static')
STATICFILES_DIRS = (
here('..', 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
TEMPLATE_DIRS = (
here('..', 'templates'),
)
try:
from local_settings import *
except ImportError:
pass
|
[
"crazyministr@gmail.com"
] |
crazyministr@gmail.com
|
ce54fc26fa83f4b27c13a5b0b4941e208f6a4ecc
|
115566febb1c43c0cf6054c375ee1fa0d64c43f3
|
/stochastic_block_model.py
|
f226321a5c4bcff15224b6748bd5e7e670c3305a
|
[] |
no_license
|
gottacatchenall/stirring_it_up
|
01c049e90ef43cf14a5181158f136c1dca0df3f5
|
c76b1de7b4fe72231e973715768d03c0f72ca994
|
refs/heads/master
| 2022-11-14T05:21:57.966412
| 2020-06-29T22:39:31
| 2020-06-29T22:39:31
| 275,940,828
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,614
|
py
|
import numpy as np
def get_community_ids(num_indivs, community_size):
num_communities = int(np.floor(num_indivs/community_size))
community_ids = []
for i in range(num_communities):
community_ids += [i for x in range(community_size)]
return community_ids
def initialize_sbm_probability_matrix(parameters):
num_indivs = parameters['num_indivs']
community_size = parameters['community_size']
edge_density = parameters['edge_density']
modularity = parameters["community_modularity"]
num_communities = int(np.floor(num_indivs/community_size))
probability_matrix = np.zeros((num_indivs, num_indivs))
community_ids = get_community_ids(num_indivs, community_size)
p_between_communities = (edge_density * num_indivs * num_communities) / (modularity * community_size**2)
p_within_community = (edge_density * num_indivs * num_communities) / (community_size**2)
for i in range(num_indivs):
for j in range(num_indivs):
if (i == j):
probability_matrix[i,j] = 0.0
elif (community_ids[i] == community_ids[j]):
probability_matrix[i,j] = min(1.0, p_within_community)
else:
probability_matrix[i,j] = p_between_communities
return probability_matrix
def draw_from_sbm(probability_matrix):
num_indivs= len(probability_matrix)
adjacency_matrix = np.zeros((num_indivs, num_indivs))
for i in range(num_indivs):
for j in range(num_indivs):
adjacency_matrix[i,j] = np.random.binomial(1, probability_matrix[i,j])
return adjacency_matrix
|
[
"mdcatchen@gmail.com"
] |
mdcatchen@gmail.com
|
85cd6a7fc3fd30414c7549565e1cf56245d15e74
|
29841982e9d3a70d24faa6bed2397d07419fb409
|
/aula_5/dashboard/routes/auth.py
|
572c567ffdc274cd6a81933ea061133c5086aa66
|
[] |
no_license
|
HiroEu/python-521
|
40d6950b19b6c5a9850177739b3e72be0c0e0ae7
|
d279cb5dac771e11681cdfa91bfe363a2fbaa356
|
refs/heads/master
| 2022-02-24T15:43:49.085686
| 2022-02-08T19:08:58
| 2022-02-08T19:08:58
| 202,603,500
| 0
| 0
| null | 2021-06-02T00:20:44
| 2019-08-15T19:58:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,593
|
py
|
import flask
import ldap3
import logging
blueprint = flask.Blueprint('auth', __name__)
@blueprint.route('/sign-in', methods=[ 'GET', 'POST' ])
def sign_in():
context = {
'title': 'Python | Sysadmin',
}
EMAIL = 'admin@admin'
PASSWORD = 'admin'
if flask.request.method == 'POST':
email = flask.request.form.get('email')
password = flask.request.form.get('password')
if email == EMAIL and password == PASSWORD:
logging.info('Usuário logado')
else:
logging.warning('Falha na autenticação' + email)
# if flask.request.method == 'POST':
# form = flask.request.form
# email = form.get('email')
# password = form.get('password')
# conn = ldap3.Connection(
# ldap3.Server('ldap://127.0.0.1'),
# 'cn=admin,dc=dexter,dc=com,dc=br',
# '4linux'
# )
# conn.bind()
# conn.search(
# 'uid={},dc=dexter,dc=com,dc=br'.format(email),
# '(objectClass=person)',
# attributes=[
# 'sn',
# 'userPassword'
# ]
# )
# user = None
# try:
# user = conn.entries[0]
# except IndexError:
# return flask.redirect('/sign-in')
# saved_password = user.userPassword[0].decode()
# if saved_password == password:
# flask.session['is-logged'] = True
# return flask.redirect('/docker')
return flask.render_template('sign-in.html', context=context)
|
[
"leonardo.mendes@4linux.com.br"
] |
leonardo.mendes@4linux.com.br
|
e5bd94b2f301e8eb6477bc04c154577734d13f27
|
6e50ad3f27670265ffde0f13b062ba339eb047cf
|
/python_41a/final_exam-2 (1).py
|
f60e3a531c1fd0fc11dcf4ee161d2c3716d63b6e
|
[] |
no_license
|
syuta-sugawara/41a_python
|
7a8f7b97ada70fe03079a73d5272c2dc8f6b76d9
|
9cc2586fae7eda9d4ae7fa1d794584f62d06509d
|
refs/heads/master
| 2020-03-22T02:32:03.885684
| 2018-07-02T01:37:53
| 2018-07-02T01:37:53
| 139,376,030
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,478
|
py
|
#class Read():
# def __init
import csv
import re
class Book():
def __init__(self,idnum,isbn,title,name,year,ty,pages,price):
self.idnum=idnum
self.isbn=isbn
self.title=title
self.name=name
self.year=year
self.type=ty
self.pages=pages
self.price=price
def __str__(self):
return str(self.idnum)+","+str(self.isbn)+","+str(self.title)+","+str(self.name)+","+str(self.year)+","+str(self.type)+","+str(self.pages)+","+str(self.price)
class CD():
def __init__(self,idnum,isbn,title,year,first,last,price):
self.idnum =idnum
self.isbn =isbn
self.title=title
self.year=year
self.first=first
self.last =last
self.price=price
def __str__(self):
return str(self.idnum)+","+str(self.isbn)+","+str(self.title)+","+str(self.year)+","+str(self.first)+","+str(self.last)+","+str(self.price)
class BookLibrary():
def __init__(self):
self.library=[]
def Read_file(self):
import csv
import re
pattern=r"4.+"
temp=[]
with open('Inventory.csv') as f:
for line in csv.reader(f):
obj1=re.match(pattern,line[0])
if obj1:
temp.append(line)
for line in range(0,len(temp)):
bk=Book(temp[line][0],temp[line][1],
temp[line][2],temp[line][3],
temp[line][4],temp[line][5],
temp[line][6],temp[line][7])
self.library.append(bk)
def __len__(self):
return len(self.library)
def Output(self):
for line in self.library:
print(line)
class CDLibrary():
def __init__(self):
self.library=[]
def Read_file(self):
import csv
import re
pattern=r"7.+"
temp=[]
with open('Inventory.csv') as f:
for line in csv.reader(f):
obj1=re.match(pattern,line[0])
if obj1:
temp.append(line)
for line in range(0,len(temp)):
cd=CD(temp[line][0],temp[line][1],
temp[line][2],temp[line][3],
temp[line][4],temp[line][5],
temp[line][6])
self.library.append(cd)
def __len__(self):
return len(self.library)
def Output(self):
for line in self.library:
print(line)
x=CDLibrary()
x.Read_file()
y=BookLibrary()
y.Read_file()
|
[
"sugawarasyuta@MBA.local"
] |
sugawarasyuta@MBA.local
|
2ceab1a4cf0105b3fdcd2e3c2083b29bdbdc45b8
|
84a833897d1ccfd393ce782f6370dfee17611831
|
/python/booleans.py
|
944477ca5273e024dd8a889e4250746927e4c679
|
[] |
no_license
|
rhiafaery/wcc
|
e7bf5c0a26ddca5a632e64996bc2a098e58fe772
|
87a7f2d20dbae1ec892a7d7353b095769889342e
|
refs/heads/master
| 2020-02-26T15:50:58.535926
| 2016-10-19T05:07:30
| 2016-10-19T05:07:30
| 70,862,899
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,697
|
py
|
# print(1 == 1) # Expected output: True
# print(1 == 2) # Expected output: False
# print(1 > 2) # Expected output: False
# print(2 > 1) # Expected output: True
# print(1 >= 1) # Expected output: True
# print(2 == 2) # Expected output: True
# print(2 != 2) # Expected output: False
# age = 30
# print(age > 10) # Expected outcome: True
# print(10 < age) # Expected outcome: True
# print(age > 10 + 20) # Expected outcome: False
# print(age + 20 > 10) # Expected outcome: True
# print('a' > 'z') # Expected outcome: False
# print('z' > 'a') # Expected outcome: True
# print('apples' > 'oranges') # Expected outcome: False
# print('oranges' > 'apples') # Expected outcome: True
# print('cat' > 'car') # Expected outcome: True
# print('car' > 'cat') # Expected outcome: False
# print 'a' > 2 # Outcome: True
# age = 1
# print(age > 12 and age < 19) # Expected outcome: False
# age = 14
# print(age > 12 and age < 19) # Expected outcome: True
# age = 19
# print(age > 12 and age < 19) # Expected outcome: False
# age = 18
# print(age > 12 and age < 19 and age != 5) # Expected outcome: True
# age = 5
# print(age > 12 and age < 19 and age != 5) # Expected outcome: False
# age = -1
# print(age > 12 and age < 19) # Expected outcome: False
# age = 10
# print(age > 25 and age < 15) # Expected outcome: False
# # Could the above expression ever be True? Or?
# gesture = 'rock'
# print(gesture == 'rock' or gesture == 'paper' or gesture == 'scissors') # Expected outcome: True
# gesture = 'paper'
# print(gesture == 'rock' or gesture == 'paper' or gesture == 'scissors') # Expected outcome: True
age = int(raw_input('How old are you?'))
print(age >= 5 and age <= 10)
|
[
"chocolategrl@gmail.com"
] |
chocolategrl@gmail.com
|
2f26a61140e3abba65eb05fab1a0b88f668afbe5
|
ec927712164a99136b35e6ac6a88a2e3dd005c45
|
/FollwDevice/GoogleFindMyDevice.py
|
f4b028b75247eec1750e503da3862faeada42985
|
[
"Apache-2.0"
] |
permissive
|
rrooggiieerr/FollwDevice.py
|
4ad0be4c7caa6d84f0f18248a548b423b1d76e3c
|
d8ce2165c06ab836ea04f232b34f909f439a791b
|
refs/heads/master
| 2023-05-08T02:23:18.509718
| 2021-05-26T17:33:04
| 2021-05-26T17:33:04
| 371,029,791
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,271
|
py
|
import logging, time, json, urllib, socket
from http.cookiejar import MozillaCookieJar
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class GoogleFindMyDevice():
terminate = False
online = True
username = None
Password = None
deviceId = None
deviceName = None
cookiejar = MozillaCookieJar()
loggedin = False
interval = 5
lastLookup = 0
location = None
# timestamp = time.time()
timestamp = 0
def stop(self):
self.terminate = True
def online(self, online = True):
self.online = online
def offline(self, offline = True):
self.online = not offline
def login(self):
if not self.online:
return False
url = ''.format()
headers = {
}
requestData = ''
requestData = requestData.encode('utf-8')
try:
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookiejar))
urllib.request.install_opener(opener)
request = urllib.request.Request(url, requestData, headers)
with urllib.request.urlopen(request, timeout=5) as response:
responseData = response.read().decode(response.headers.get_content_charset(failobj = 'utf-8'))
logger.debug(responseData)
# responseData = json.loads(responseData)
except urllib.error.HTTPError as e:
logger.error(e.code)
return False
except urllib.error.URLError as e:
logger.error(e)
return False
except socket.timeout as e:
logger.error(e)
return False
self.loggedin = True
return True
def getLocation(self):
if not self.online:
return None
# if not self.loggedin and not self.login():
# return None
elapsedTime = time.time() - self.lastLookup
if elapsedTime < self.interval:
return None
location = None
# curl 'https://maps.googleapis.com/maps/api/js/GeocodeService.Search?5m2&1d52.3660285&2d4.8502509&7sUS&9snl&callback=_xdc_._144ct4&client=google-nova&token=26111' \
# -H 'authority: maps.googleapis.com' \
# -H 'sec-ch-ua: " Not A;Brand";v="99", "Chromium";v="90", "Google Chrome";v="90"' \
# -H 'dnt: 1' \
# -H 'sec-ch-ua-mobile: ?0' \
# -H 'user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36' \
# -H 'accept: */*' \
# -H 'x-client-data: CKG1yQEIlbbJAQijtskBCMG2yQEIqZ3KAQj4x8oBCMvZygEIqJ3LAQigoMsBCNzyywEIqPPLARiOnssB' \
# -H 'sec-fetch-site: cross-site' \
# -H 'sec-fetch-mode: no-cors' \
# -H 'sec-fetch-dest: script' \
# -H 'referer: https://www.google.com/' \
# -H 'accept-language: en-NL,en;q=0.9,nl-NL;q=0.8,nl;q=0.7,en-US;q=0.6' \
# --compressed
url = 'https://maps.googleapis.com/maps/api/js/GeocodeService.Search?5m2&1d52.3660285&2d4.8502509&7sUS&9snl&callback=_xdc_._144ct4&client=google-nova&token=26111'
headers = {
'authority': 'maps.googleapis.com',
'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="90", "Google Chrome";v="90"',
'dnt': '1',
'sec-ch-ua-mobile': '?0',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36',
'accept': '*/*',
'x-client-data': 'CKG1yQEIlbbJAQijtskBCMG2yQEIqZ3KAQj4x8oBCMvZygEIqJ3LAQigoMsBCNzyywEIqPPLARiOnssB',
'sec-fetch-site': 'cross-site',
'sec-fetch-mode': 'no-cors',
'sec-fetch-dest': 'script',
'referer': 'https://www.google.com/',
'accept-language': 'en-NL,en;q=0.9,nl-NL;q=0.8,nl;q=0.7,en-US;q=0.6'
}
requestData = '{}'
requestData = bytes(requestData, 'utf-8')
# logger.debug(requestData)
try:
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookiejar))
urllib.request.install_opener(opener)
request = urllib.request.Request(url, requestData, headers)
with urllib.request.urlopen(request, timeout=1) as response:
responseData = response.read().decode(response.headers.get_content_charset(failobj = 'utf-8'))
responseData = responseData[responseData.index('{'):responseData.index(')')]
logger.debug(responseData)
responseData = json.loads(responseData)
#logger.debug(responseData)
# if device and 'location' in device and device['location']:
# logger.debug(device)
# latitude = None
# longitude = None
# accuracy = None
# altitude = None
# timestamp = None
# location = [latitude, longitude, accuracy, altitude]
# logger.debug(location)
# else:
# logger.error("No location found")
# logger.debug(device)
except urllib.error.HTTPError as e:
if e.code == 404:
logger.warning("No location found for {}".format(url))
else:
logger.error(e.code)
return None
except urllib.error.URLError as e:
logger.error(e)
return None
except socket.timeout as e:
logger.error(e)
return None
self.lastLookup = time.time()
if location and timestamp != self.timestamp:
logger.debug(timestamp - self.timestamp)
self.location = location
self.timestamp = timestamp
return location
return None
|
[
"rogier@batoid.com"
] |
rogier@batoid.com
|
010bf9a10b43cd62696ec9bd482e98b98858d9dd
|
c3cfbcc1bf2ede441218f0d67f18fe387f314b0c
|
/crop/train-crop200,resize224.py
|
c7e9fdc7ec364a67f9f39c4604966fd7266102a9
|
[] |
no_license
|
kaliachka/MPMI-lab4
|
c1d92f1ddbbe85f120793161b3f01298782c2ac2
|
6755b66e7b10104754af48619a206c19fd2547a9
|
refs/heads/main
| 2023-03-27T13:22:11.705071
| 2021-03-30T06:43:13
| 2021-03-30T06:43:13
| 352,760,440
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,578
|
py
|
"""This module implements data feeding and training loop to create model
to classify X-Ray chest images as a lab example for BSU students.
"""
__author__ = 'Alexander Soroka, soroka.a.m@gmail.com'
i__copyright__ = """Copyright 2020 Alexander Soroka"""
import argparse
import glob
import numpy as np
import tensorflow as tf
import time
from tensorflow.python import keras as keras
from tensorflow.python.keras.callbacks import LearningRateScheduler
from tensorflow.keras.applications import EfficientNetB0
import math
# Avoid greedy memory allocation to allow shared GPU usage
gpus = tf.config.experimental.list_physical_devices('GPU')
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
LOG_DIR = 'logs'
BATCH_SIZE = 16
NUM_CLASSES = 20
RESIZE_TO = 224
TRAIN_SIZE = 12786
def parse_proto_example(proto):
keys_to_features = {
'image/encoded': tf.io.FixedLenFeature((), tf.string, default_value=''),
'image/label': tf.io.FixedLenFeature([], tf.int64, default_value=tf.zeros([], dtype=tf.int64))
}
example = tf.io.parse_single_example(proto, keys_to_features)
example['image'] = tf.image.decode_jpeg(example['image/encoded'], channels=3)
example['image'] = tf.image.convert_image_dtype(example['image'], dtype=tf.uint8)
example['image'] = tf.image.resize(example['image'], tf.constant([RESIZE_TO, RESIZE_TO]))
return example['image'], tf.one_hot(example['image/label'], depth=NUM_CLASSES)
def contrast(image, label):
return tf.image.adjust_contrast(image, 0.5), label
def brightness(image, label):
return tf.image.adjust_brightness(image, delta=0.1), label
def create_dataset(filenames, batch_size):
"""Create dataset from tfrecords file
:tfrecords_files: Mask to collect tfrecords file of dataset
:returns: tf.data.Dataset
"""
return tf.data.TFRecordDataset(filenames)\
.map(parse_proto_example, num_parallel_calls=tf.data.AUTOTUNE)\
.cache()\
.batch(batch_size)\
.prefetch(tf.data.AUTOTUNE)
def step_decay(epoch,lr):
initial_lrate = 0.01
k = 0.3
epochs_drop = 5.0
lrate = initial_lrate * math.exp(-k*epoch)
return lrate
def build_model():
inputs = tf.keras.Input(shape=(RESIZE_TO, RESIZE_TO, 3))
x = tf.keras.layers.experimental.preprocessing.RandomCrop(200, 200)(inputs)
x = tf.keras.layers.experimental.preprocessing.Resizing(224, 224)(x)
model = EfficientNetB0(input_tensor=x,include_top=False,pooling='avg',weights='imagenet')
model.trainable=False
x = tf.keras.layers.Flatten()(model.output)
outputs = tf.keras.layers.Dense(NUM_CLASSES, activation=tf.keras.activations.softmax)(x)
return tf.keras.Model(inputs=inputs, outputs=outputs)
def main():
args = argparse.ArgumentParser()
args.add_argument('--train', type=str, help='Glob pattern to collect train tfrecord files, use single quote to escape *')
args = args.parse_args()
dataset = create_dataset(glob.glob(args.train), BATCH_SIZE)
train_size = int(TRAIN_SIZE * 0.7 / BATCH_SIZE)
train_dataset = dataset.take(train_size)
validation_dataset = dataset.skip(train_size)
model = build_model()
print(model.summary())
model.compile(
optimizer=tf.optimizers.Adam(lr=0.1),
loss=tf.keras.losses.categorical_crossentropy,
metrics=[tf.keras.metrics.categorical_accuracy],
)
log_dir='{}/owl-{}'.format(LOG_DIR, time.time())
model.fit(
train_dataset,
epochs=50,
validation_data=validation_dataset,
callbacks=[
tf.keras.callbacks.TensorBoard(log_dir),
LearningRateScheduler(step_decay)
]
)
if __name__ == '__main__':
main()
|
[
"kolyachkoksenia@gmail.com"
] |
kolyachkoksenia@gmail.com
|
6cd09b0b3010608ea53fb4615265f6367614bd67
|
60867b15b83c38da647a8acf7958808feb32cb5b
|
/力扣/0x10 从前序与中序遍历序列构造二叉树/从前序与中序遍历序列构造二叉树.py
|
195fd8d8358cf8b8da53c06f146527af291957d6
|
[] |
no_license
|
hackerAlice/coding-interviews
|
6cce3b927ac935bbd74b3f12bc3d4b7098b35267
|
36868032a7dabae109265ae3b0138762e114e73b
|
refs/heads/main
| 2023-06-10T20:48:13.090015
| 2021-07-06T05:15:01
| 2021-07-06T05:15:01
| 372,359,565
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 791
|
py
|
from typing import List
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def buildTree(self, preorder: List[int], inorder: List[int]) -> TreeNode:
if not preorder or not inorder:
return None
root = TreeNode(preorder[0])
idx = inorder.index(preorder[0])
inorder_left = inorder[:idx]
inorder_right = inorder[idx+1:]
preorder_left = preorder[1:1+len(inorder_left)]
preorder_right = preorder[-len(inorder_right):]
root.left = self.buildTree(preorder_left, inorder_left)
root.right = self.buildTree(preorder_right, inorder_right)
return root
|
[
"sudo_rm@163.com"
] |
sudo_rm@163.com
|
34bc0ccefc528617718b9f3e88e6a4efead4dfd0
|
806bce69deb9e4b2f726104eede8b843ff7ceb77
|
/easyredis/foo/db_command.py
|
5bb55fd7f5612e655e65236dbebea42d994c1beb
|
[] |
no_license
|
zhj1121/easyredis
|
8309fa93cdd95530fbcabbea478f5070aa075799
|
99ee8f31f85f11ae5aa682079e155ce460c1f462
|
refs/heads/master
| 2022-11-05T08:04:50.940451
| 2020-06-26T12:53:15
| 2020-06-26T12:53:15
| 275,155,346
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,131
|
py
|
#-*-coding:utf-8-*-
"""
@author zhj1121
@date 2020.6.24
@desc 一个用于处理关于easyredis的命令文件的类
"""
import json
import os
import shutil
import random
from conf.db_conf import command_path
from conf.db_conf import command_file
class db_command(object):
def __init__(self,command_file=command_file,command_path = command_path):
self.command_file = command_file
self.command_path = command_path
def randomTouchFile(self):
""" 随机生成一个不存在的文件,并返回路径 """
file_list = os.listdir(self.command_path)
str = 'abcdefghijklmnopqrstuvwxyz123456789'
touchFileName = ""
while touchFileName in file_list or touchFileName == "":
touchFileName = ""
for i in range(8):
touchFileName += random.choice(str)
touchFileName += ".txt"
shutil.copyfile(self.command_file, self.command_path+touchFileName)
return self.command_path+touchFileName
def delete(self,filePath):
""" 根据路径,删除一个文件 """
os.remove(filePath)
return True
|
[
"972865726@qq.com"
] |
972865726@qq.com
|
3b8dab3caf5ea5bc26f73ce0a695743fc54d1ebd
|
697af415566ba649502bd18751a6521ac526892c
|
/2022_VERSIONS/rename_er2_hiwrap_ratio_files_with_dates.py
|
103b3076f5e91476a83cc6f59c7e187709d01c5d
|
[] |
no_license
|
srbrodzik/impacts-scripts
|
df44c8f34746499b8397b5b1a4ad09859b4cc8d4
|
263c7545bbb912bbcea563a21d0619e5112b1788
|
refs/heads/master
| 2023-05-31T05:01:09.558641
| 2023-05-22T23:24:52
| 2023-05-22T23:24:52
| 215,638,568
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,026
|
py
|
#!/usr/bin/python3
import os
import sys
import shutil
from datetime import datetime
from datetime import timedelta
import datetime as dt
import pytz
if len(sys.argv) != 2:
#print('Usage: sys.argv[0] [YYYYMMDD]')
print('Usage: {} [YYYYMMDD]'.format(sys.argv[0]))
sys.exit()
else:
date = sys.argv[1]
indir = '/home/disk/bob/impacts/radar/er2/postFlight/realtime/hiwrap_ratio'+'/'+date
prefix = 'aircraft.NASA_ER2'
suffix = 'HIWRAP_DWR'
convertEasternToUTC = True
os.chdir(indir)
for file in os.listdir(indir):
print(file)
(base,ext) = os.path.splitext(file)
(radar,ratio,dateTime) = base.split('_')
dateTimeStr = dateTime[:-2]
if convertEasternToUTC:
dateTimeObj = datetime.strptime(dateTimeStr,"%Y%m%d%H%M")
dateTimeObjUTC = dateTimeObj+timedelta(hours=5)
dateTimeStrUTC = dateTimeObjUTC.strftime("%Y%m%d%H%M")
else:
dateTimeStrUTC = datetimeStr
catName = prefix+'.'+dateTimeStrUTC+'.'+suffix+ext
shutil.move(file,catName)
|
[
"brodzik@uw.edu"
] |
brodzik@uw.edu
|
d1d647caea7b42903b4101674eacc3bb34134e2c
|
611713c67bb5513a2bab3b50603c451b8076f7fc
|
/blender/operators/exporttopng.py
|
e7fd4e66057b7639a74d0448bf2b661e572fdd77
|
[] |
no_license
|
CatmanIta/x-PlantForm-IGA
|
44471191657ff1470f0a87c619be63afa46c6387
|
cc91513adbec7f3ec37f43b5db708684bfba417f
|
refs/heads/master
| 2021-05-15T02:21:00.947868
| 2017-01-23T09:45:55
| 2017-01-23T09:45:55
| 18,171,281
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,035
|
py
|
import blender.operators.abstractoperatorlsystem
import blender.utilities
import imp
imp.reload(blender.operators.abstractoperatorlsystem)
imp.reload(blender.utilities)
from blender.operators.abstractoperatorlsystem import *
from blender.utilities import showOnlyThePlantForm,showPlantAndBackground,selectAllPlantforms,exportToPng
class OBJECT_OT_ExportToPng(OBJECT_OT_AbstractOperatorLSystem):
bl_idname = "object.export_to_png"
bl_label = "Export PlantForm To Png"
bl_description = "Export PlantForm To Png"
#OUTPUT_PATH = "C:/Users/user/Desktop/RewardSystems/PCG/InteractiveEvolutionServer/xplantform/output/png/"
def execute(self, context):
OBJECT_OT_AbstractOperatorLSystem.execute(self,context)
filePath = os.path.dirname(os.path.realpath(__file__))
fileName = os.path.basename(filePath)
folderPath = filePath.replace("\\"+fileName, '')
outputPath = folderPath + "/../output/png/"
exportToPng(context,context.scene,outputPath)
return {'FINISHED'}
|
[
"cat.piro@gmail.com"
] |
cat.piro@gmail.com
|
fb4f4b4a32a4ffd3982c6c91a03226f06f4d7fab
|
80ae8cb1f425532f72efe39ef4b0834ff409a92c
|
/garbage.py
|
b7750b74f49fa9a6339c24b2e74a47cfc50800e8
|
[] |
no_license
|
erhuanCH/garbage-classification
|
89cb2a98aab10a3aa2e254dac78d51300b70076f
|
97829a7da96d860c30a0963fc7146e2f51dc4c6f
|
refs/heads/master
| 2022-09-09T01:24:37.932097
| 2020-05-30T08:04:04
| 2020-05-30T08:04:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,704
|
py
|
import numpy as np
import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator, load_img, img_to_array, array_to_img
from keras.layers import Conv2D, Flatten, MaxPooling2D, Dense
from keras.models import Sequential
import glob, os, random
base_path = 'D:/AI/cnn-garbage/dataset-resized'
#glob.glob获取指定目录下的所有图片
img_list = glob.glob(os.path.join(base_path, '*/*.jpg'))
print(len(img_list))#数据集一共2527个数据
#随机展示六张图片
for i, img_path in enumerate(random.sample(img_list, 6)):
img = load_img(img_path)
img = img_to_array(img, dtype=np.uint8)
# plt.subplot(2, 3, i + 1)
# plt.imshow(img.squeeze())
#对数据进行分组
train_datagen = ImageDataGenerator(
rescale=1. / 225, shear_range=0.1, zoom_range=0.1,
width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True,
vertical_flip=True, validation_split=0.1)
test_datagen = ImageDataGenerator(
rescale=1. / 255, validation_split=0.1)
train_generator = train_datagen.flow_from_directory(
base_path, target_size=(300, 300), batch_size=16,
class_mode='categorical', subset='training', seed=0)
validation_generator = test_datagen.flow_from_directory(
base_path, target_size=(300, 300), batch_size=16,
class_mode='categorical', subset='validation', seed=0)
labels = (train_generator.class_indices)
labels = dict((v, k) for k, v in labels.items())
print(labels)
# 0: 'cardboard', 1: 'glass', 2: 'metal', 3: 'paper', 4: 'plastic', 5: 'trash'
#模型的建立和训练
#MaxPooling2D,epoch=50
model = Sequential([
Conv2D(filters=32, kernel_size=3, padding='same', activation='relu', input_shape=(300, 300, 3)),
MaxPooling2D(pool_size=2),
Conv2D(filters=
, kernel_size=3, padding='same', activation='relu'),
MaxPooling2D(pool_size=2),
Conv2D(filters=32, kernel_size=3, padding='same', activation='relu'),
MaxPooling2D(pool_size=2),
Conv2D(filters=32, kernel_size=3, padding='same', activation='relu'),
MaxPooling2D(pool_size=2),
Flatten(),
#Flatten层用来将输入“压平”,即把多维的输入一维化,
# 常用在从卷积层到全连接层的过渡。Flatten不影响batch的大小。
Dense(64, activation='relu'),
#units=64是正整数,输出空间维度。
#Dense 实现以下操作:output = activation(dot(input, kernel) + bias)
# 其中 activation 是按逐个元素计算的激活函数,kernel 是由网络层创建的权值矩阵,
# 以及 bias 是其创建的偏置向量 (只在 use_bias 为 True 时才有用)。
#如果该层的输入的秩大于2,那么它首先被展平然后再计算与 kernel 的点乘。
Dense(6, activation='softmax')
#units=6,,是正整数,输出空间维度。
])
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['acc'])
#categorical_crossentropy输出张量与目标张量之间的分类交叉熵-∑p(x)logq(x)。
#p代表正确答案,q代表的是预测值。交叉熵值越小,两个概率分布越接近。
history_fit = model.fit_generator(train_generator,
epochs=100, #迭代总轮数
steps_per_epoch=2276//32,#generator 产生的总步数(批次样本)
validation_data=validation_generator,# 验证数据的生成器
validation_steps=251//32)
with open(base_path + "/history_fit.json", "w") as json_file:
json_file.write(str(history_fit))
acc = history_fit.history['acc']
val_acc = history_fit.history['val_acc']
loss = history_fit.history['loss']
epochs = range(1, len(acc) + 1)
plt.figure("acc")
plt.plot(epochs, acc, 'r-', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='validation acc')
plt.title('The comparision of train_acc and val_acc')
plt.legend()
plt.show()
plt.figure("loss")
plt.plot(epochs, loss, 'r-', label='loss')
plt.title('The comparision of loss')
plt.legend()
plt.show()
#结果展示
#下面我们随机抽取validation中的16张图片,展示图片以及其标签,并且给予我们的预测。 我们发现预测的准确度还是蛮高的,对于大部分图片,都能识别出其类别。
test_x, test_y = validation_generator.__getitem__(1)
preds = model.predict(test_x)
plt.figure(figsize=(16, 16))
for i in range(16):
plt.subplot(4, 4, i+1)
plt.title('pred:%s / truth:%s' % (labels[np.argmax(preds[i])], labels[np.argmax(test_y[i])]))
plt.imshow(test_x[i])
plt.show()
print(labels[np.argmax(preds[i])])
|
[
"noreply@github.com"
] |
noreply@github.com
|
4e1e739d6f1ffe00270897126aba906c133cd6d9
|
96adbb43649c0213ba670d8da6e9e0f67bb7acd1
|
/rdkit/Chem/EState/EState.py
|
075bc0c43f543c65922d8ff23d64f0ae9ce47d3f
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
ashwin/rdkit
|
13fb2b4ceee9b1d3ecbbad497719fb3fbe2a5079
|
c2de7f4c941ceab59e20c1e2966781c27ba3ce88
|
refs/heads/master
| 2020-12-25T12:41:48.704677
| 2013-05-09T06:56:28
| 2013-05-09T06:56:28
| 9,954,003
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,801
|
py
|
# $Id$
#
# Copyright (C) 2002-2006 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
""" Basic EState definitions
"""
import numpy
from rdkit import Chem
def GetPrincipleQuantumNumber(atNum):
if atNum<=2: return 1
elif atNum <= 10: return 2
elif atNum <= 18: return 3
elif atNum <= 36: return 4
elif atNum <= 54: return 5
elif atNum <= 86: return 6
else: return 7
def EStateIndices(mol,force=1):
""" returns a tuple of EState indices for the molecule
Reference: Hall, Mohney and Kier. JCICS _31_ 76-81 (1991)
"""
if not force and hasattr(mol,'_eStateIndices'):
return mol._eStateIndices
tbl = Chem.GetPeriodicTable()
nAtoms = mol.GetNumAtoms()
Is = numpy.zeros(nAtoms,numpy.float)
for i in range(nAtoms):
at = mol.GetAtomWithIdx(i)
atNum = at.GetAtomicNum()
d = at.GetDegree()
if d>0:
h = at.GetTotalNumHs()
dv = tbl.GetNOuterElecs(atNum)-h
N = GetPrincipleQuantumNumber(atNum)
Is[i] = (4./(N*N) * dv + 1)/d
dists = Chem.GetDistanceMatrix(mol,useBO=0,useAtomWts=0)
dists += 1
accum = numpy.zeros(nAtoms,numpy.float)
for i in range(nAtoms):
for j in range(i+1,nAtoms):
p = dists[i,j]
if p < 1e6:
tmp = (Is[i]-Is[j])/(p*p)
accum[i] += tmp
accum[j] -= tmp
res = accum+Is
mol._eStateIndices=res
return res
EStateIndices.version='1.0.0'
if __name__ =='__main__':
smis = ['CCCC','CCCCC','CCCCCC','CC(N)C(=O)O','CC(N)C(=O)[O-].[Na+]']
for smi in smis:
m = Chem.MolFromSmiles(smi)
print smi
inds = EStateIndices(m)
print '\t',inds
|
[
"glandrum@a5045d30-4826-4484-9c7f-6e55a06ddc77"
] |
glandrum@a5045d30-4826-4484-9c7f-6e55a06ddc77
|
5078f94c2f41d96ba025aa001d430690b98d6220
|
840415d8cfd668d408d3191056a01db62ee87e59
|
/platformio/commands/debug/helpers.py
|
657e8c48ea1caa6ebfdc73611019d196a00bdfeb
|
[
"Apache-2.0"
] |
permissive
|
MyTeam888/platformio-core
|
a1a397e38ecca5a0b61f39dcfb4273c74a4e1b35
|
65297c24d4ffbc5713a7303b6a38a4cbc7f290e7
|
refs/heads/master
| 2022-12-13T12:30:40.667596
| 2020-09-10T14:46:56
| 2020-09-10T14:46:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,416
|
py
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import time
from fnmatch import fnmatch
from hashlib import sha1
from io import BytesIO
from os.path import isfile
from platformio import fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import is_bytes
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.options import ProjectOptions
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
STDOUT = sys.stdout
def write(self, text):
self.STDOUT.write(escape_gdbmi_stream("~", text))
self.STDOUT.flush()
def is_gdbmi_mode():
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
def escape_gdbmi_stream(prefix, stream):
bytes_stream = False
if is_bytes(stream):
bytes_stream = True
stream = stream.decode()
if not stream:
return b"" if bytes_stream else ""
ends_nl = stream.endswith("\n")
stream = re.sub(r"\\+", "\\\\\\\\", stream)
stream = stream.replace('"', '\\"')
stream = stream.replace("\n", "\\n")
stream = '%s"%s"' % (prefix, stream)
if ends_nl:
stream += "\n"
return stream.encode() if bytes_stream else stream
def get_default_debug_env(config):
default_envs = config.default_envs()
all_envs = config.envs()
for env in default_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
for env in all_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
return default_envs[0] if default_envs else all_envs[0]
def predebug_project(ctx, project_dir, env_name, preload, verbose):
ctx.invoke(
cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose,
)
if preload:
time.sleep(5)
def validate_debug_options(cmd_ctx, env_options):
def _cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
try:
platform = PlatformFactory.new(env_options["platform"])
except UnknownPlatform:
cmd_ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.new(env_options["platform"])
board_config = platform.board_config(env_options["board"])
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
server_options = None
# specific server per a system
if isinstance(tool_settings.get("server", {}), list):
for item in tool_settings["server"][:]:
tool_settings["server"] = item
if util.get_systype() in item.get("system", []):
break
# user overwrites debug server
if env_options.get("debug_server"):
server_options = {
"cwd": None,
"executable": None,
"arguments": env_options.get("debug_server"),
}
server_options["executable"] = server_options["arguments"][0]
server_options["arguments"] = server_options["arguments"][1:]
elif "server" in tool_settings:
server_options = tool_settings["server"]
server_package = server_options.get("package")
server_package_dir = (
platform.get_package_dir(server_package) if server_package else None
)
if server_package and not server_package_dir:
platform.install_packages(
with_packages=[server_package], skip_default_package=True, silent=True
)
server_package_dir = platform.get_package_dir(server_package)
server_options.update(
dict(
cwd=server_package_dir if server_package else None,
executable=server_options.get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
for a in server_options.get("arguments", [])
],
)
)
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
result = dict(
tool=tool_name,
upload_protocol=env_options.get(
"upload_protocol", board_config.get("upload", {}).get("protocol")
),
load_cmds=_cleanup_cmds(
env_options.get(
"debug_load_cmds",
tool_settings.get(
"load_cmds",
tool_settings.get(
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
),
),
)
),
load_mode=env_options.get(
"debug_load_mode",
tool_settings.get(
"load_mode", ProjectOptions["env.debug_load_mode"].default
),
),
init_break=env_options.get(
"debug_init_break",
tool_settings.get(
"init_break", ProjectOptions["env.debug_init_break"].default
),
),
init_cmds=_cleanup_cmds(
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
),
extra_cmds=extra_cmds,
require_debug_port=tool_settings.get("require_debug_port", False),
port=reveal_debug_port(
env_options.get("debug_port", tool_settings.get("port")),
tool_name,
tool_settings,
),
server=server_options,
)
return result
def configure_esp32_load_cmds(debug_options, configuration):
ignore_conds = [
debug_options["load_cmds"] != ["load"],
"xtensa-esp32" not in configuration.get("cc_path", ""),
not configuration.get("flash_extra_images"),
not all(
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
),
]
if any(ignore_conds):
return debug_options["load_cmds"]
mon_cmds = [
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
path=fs.to_unix_path(item["path"]), offset=item["offset"]
)
for item in configuration.get("flash_extra_images")
]
mon_cmds.append(
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
% fs.to_unix_path(configuration["prog_path"][:-4])
)
return mon_cmds
def has_debug_symbols(prog_path):
if not isfile(prog_path):
return False
matched = {
b".debug_info": False,
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
b"__PLATFORMIO_BUILD_DEBUG__": False,
}
with open(prog_path, "rb") as fp:
last_data = b""
while True:
data = fp.read(1024)
if not data:
break
for pattern, found in matched.items():
if found:
continue
if pattern in last_data + data:
matched[pattern] = True
last_data = data
return all(matched.values())
def is_prog_obsolete(prog_path):
prog_hash_path = prog_path + ".sha1"
if not isfile(prog_path):
return True
shasum = sha1()
with open(prog_path, "rb") as fp:
while True:
data = fp.read(1024)
if not data:
break
shasum.update(data)
new_digest = shasum.hexdigest()
old_digest = None
if isfile(prog_hash_path):
with open(prog_hash_path) as fp:
old_digest = fp.read()
if new_digest == old_digest:
return False
with open(prog_hash_path, "w") as fp:
fp.write(new_digest)
return True
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
if set(["*", "?", "[", "]"]) & set(env_debug_port):
return env_debug_port
return None
def _is_match_pattern(port):
pattern = _get_pattern()
if not pattern:
return True
return fnmatch(port, pattern)
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
continue
port = item["port"]
if tool_name.startswith("blackmagic"):
if (
"windows" in util.get_systype()
and port.startswith("COM")
and len(port) > 4
):
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
return port
return None
if env_debug_port and not _get_pattern():
return env_debug_port
if not tool_settings.get("require_debug_port"):
return None
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
return debug_port
|
[
"me@ikravets.com"
] |
me@ikravets.com
|
d70dad5130db6713f3aaf55c51855f70d3cc560e
|
39137ad4fee4dacc6886f05ba88eb6503a2283eb
|
/apse/gencode/gena/GenaParser.py
|
6f54da811740aee470621767d18316430c395e60
|
[] |
no_license
|
Opexy/priv
|
ff8195c70b937a5cdca26d2cb13fcc76e2d473ca
|
a80812e9c7b39878162fb8f0e641cebfb300c1c4
|
refs/heads/master
| 2022-03-31T19:49:18.522838
| 2022-03-05T03:02:06
| 2022-03-05T03:02:06
| 124,963,896
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 48,966
|
py
|
# Generated from Gena.g4 by ANTLR 4.9
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\"")
buf.write("\u00b2\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\3\2\6\2.\n\2\r\2\16\2/")
buf.write("\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3\5\3\5\7\5;\n\5\f\5\16\5")
buf.write(">\13\5\3\6\3\6\3\6\5\6C\n\6\3\6\3\6\5\6G\n\6\5\6I\n\6")
buf.write("\3\7\3\7\3\7\3\7\7\7O\n\7\f\7\16\7R\13\7\5\7T\n\7\3\b")
buf.write("\3\b\3\b\3\b\3\b\5\b[\n\b\3\t\3\t\3\t\3\t\3\n\3\n\3\13")
buf.write("\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\6\13l\n\13\r")
buf.write("\13\16\13m\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\7")
buf.write("\rz\n\r\f\r\16\r}\13\r\3\r\3\r\3\16\3\16\3\16\3\16\7\16")
buf.write("\u0085\n\16\f\16\16\16\u0088\13\16\3\16\3\16\3\17\3\17")
buf.write("\3\17\3\17\3\20\3\20\3\20\3\20\3\21\3\21\3\21\7\21\u0097")
buf.write("\n\21\f\21\16\21\u009a\13\21\3\22\3\22\5\22\u009e\n\22")
buf.write("\3\22\5\22\u00a1\n\22\3\23\3\23\3\24\3\24\3\24\3\24\3")
buf.write("\25\3\25\3\26\3\26\3\26\3\26\3\26\5\26\u00b0\n\26\3\26")
buf.write("\2\2\27\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*")
buf.write("\2\3\3\2\6\7\2\u00b5\2-\3\2\2\2\4\61\3\2\2\2\6\63\3\2")
buf.write("\2\2\b\66\3\2\2\2\nH\3\2\2\2\fS\3\2\2\2\16Z\3\2\2\2\20")
buf.write("\\\3\2\2\2\22`\3\2\2\2\24b\3\2\2\2\26q\3\2\2\2\30u\3\2")
buf.write("\2\2\32\u0080\3\2\2\2\34\u008b\3\2\2\2\36\u008f\3\2\2")
buf.write("\2 \u0093\3\2\2\2\"\u009b\3\2\2\2$\u00a2\3\2\2\2&\u00a4")
buf.write("\3\2\2\2(\u00a8\3\2\2\2*\u00af\3\2\2\2,.\5\4\3\2-,\3\2")
buf.write("\2\2./\3\2\2\2/-\3\2\2\2/\60\3\2\2\2\60\3\3\2\2\2\61\62")
buf.write("\5\6\4\2\62\5\3\2\2\2\63\64\5\b\5\2\64\65\7\3\2\2\65\7")
buf.write("\3\2\2\2\66<\5\n\6\2\67;\5\20\t\28;\5\24\13\29;\5\22\n")
buf.write("\2:\67\3\2\2\2:8\3\2\2\2:9\3\2\2\2;>\3\2\2\2<:\3\2\2\2")
buf.write("<=\3\2\2\2=\t\3\2\2\2><\3\2\2\2?I\7\22\2\2@C\7\23\2\2")
buf.write("AC\5\f\7\2B@\3\2\2\2BA\3\2\2\2CF\3\2\2\2DE\7\4\2\2EG\5")
buf.write("\16\b\2FD\3\2\2\2FG\3\2\2\2GI\3\2\2\2H?\3\2\2\2HB\3\2")
buf.write("\2\2I\13\3\2\2\2JT\7!\2\2KP\7\"\2\2LM\7\5\2\2MO\7\"\2")
buf.write("\2NL\3\2\2\2OR\3\2\2\2PN\3\2\2\2PQ\3\2\2\2QT\3\2\2\2R")
buf.write("P\3\2\2\2SJ\3\2\2\2SK\3\2\2\2T\r\3\2\2\2U[\7\22\2\2V[")
buf.write("\7\25\2\2W[\7\26\2\2X[\7\31\2\2Y[\5\f\7\2ZU\3\2\2\2ZV")
buf.write("\3\2\2\2ZW\3\2\2\2ZX\3\2\2\2ZY\3\2\2\2[\17\3\2\2\2\\]")
buf.write("\7\30\2\2]^\7\4\2\2^_\t\2\2\2_\21\3\2\2\2`a\7\32\2\2a")
buf.write("\23\3\2\2\2bk\7\b\2\2cl\5\26\f\2dl\5\30\r\2el\5\32\16")
buf.write("\2fl\5\34\17\2gl\5\36\20\2hi\5\b\5\2ij\7\3\2\2jl\3\2\2")
buf.write("\2kc\3\2\2\2kd\3\2\2\2ke\3\2\2\2kf\3\2\2\2kg\3\2\2\2k")
buf.write("h\3\2\2\2lm\3\2\2\2mk\3\2\2\2mn\3\2\2\2no\3\2\2\2op\7")
buf.write("\t\2\2p\25\3\2\2\2qr\7\24\2\2rs\5\f\7\2st\7\3\2\2t\27")
buf.write("\3\2\2\2uv\7\27\2\2v{\5\f\7\2wx\7\n\2\2xz\5\f\7\2yw\3")
buf.write("\2\2\2z}\3\2\2\2{y\3\2\2\2{|\3\2\2\2|~\3\2\2\2}{\3\2\2")
buf.write("\2~\177\7\3\2\2\177\31\3\2\2\2\u0080\u0081\7\13\2\2\u0081")
buf.write("\u0086\5\f\7\2\u0082\u0083\7\n\2\2\u0083\u0085\5\f\7\2")
buf.write("\u0084\u0082\3\2\2\2\u0085\u0088\3\2\2\2\u0086\u0084\3")
buf.write("\2\2\2\u0086\u0087\3\2\2\2\u0087\u0089\3\2\2\2\u0088\u0086")
buf.write("\3\2\2\2\u0089\u008a\7\3\2\2\u008a\33\3\2\2\2\u008b\u008c")
buf.write("\7\34\2\2\u008c\u008d\5 \21\2\u008d\u008e\7\3\2\2\u008e")
buf.write("\35\3\2\2\2\u008f\u0090\7\33\2\2\u0090\u0091\5 \21\2\u0091")
buf.write("\u0092\7\3\2\2\u0092\37\3\2\2\2\u0093\u0098\5\"\22\2\u0094")
buf.write("\u0095\7\20\2\2\u0095\u0097\5\"\22\2\u0096\u0094\3\2\2")
buf.write("\2\u0097\u009a\3\2\2\2\u0098\u0096\3\2\2\2\u0098\u0099")
buf.write("\3\2\2\2\u0099!\3\2\2\2\u009a\u0098\3\2\2\2\u009b\u009d")
buf.write("\5$\23\2\u009c\u009e\5&\24\2\u009d\u009c\3\2\2\2\u009d")
buf.write("\u009e\3\2\2\2\u009e\u00a0\3\2\2\2\u009f\u00a1\5*\26\2")
buf.write("\u00a0\u009f\3\2\2\2\u00a0\u00a1\3\2\2\2\u00a1#\3\2\2")
buf.write("\2\u00a2\u00a3\7\"\2\2\u00a3%\3\2\2\2\u00a4\u00a5\7\f")
buf.write("\2\2\u00a5\u00a6\5(\25\2\u00a6\u00a7\7\r\2\2\u00a7\'\3")
buf.write("\2\2\2\u00a8\u00a9\5\f\7\2\u00a9)\3\2\2\2\u00aa\u00ab")
buf.write("\7\b\2\2\u00ab\u00b0\5\34\17\2\u00ac\u00ad\5\36\20\2\u00ad")
buf.write("\u00ae\7\t\2\2\u00ae\u00b0\3\2\2\2\u00af\u00aa\3\2\2\2")
buf.write("\u00af\u00ac\3\2\2\2\u00b0+\3\2\2\2\23/:<BFHPSZkm{\u0086")
buf.write("\u0098\u009d\u00a0\u00af")
return buf.getvalue()
class GenaParser ( Parser ):
grammarFileName = "Gena.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "';'", "':'", "'.'", "'List'", "'One'",
"'{'", "'}'", "','", "'initializes'", "'('", "')'",
"<INVALID>", "<INVALID>", "'->'", "<INVALID>", "'noid'",
"'let'", "'copath'", "'class'", "'singleton'", "'extends'",
"'nary'", "'event'", "'param'", "'callinto'", "'caller'",
"'belongsto'", "'piggyafter'", "'piggyinto'", "'piggybefore'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"WS", "Comment", "EvtPathConnector", "OPCHAR", "Noid",
"Let", "Copath", "Class", "Singleton", "Extends",
"Nary", "Event", "Param", "Callinto", "Caller", "Belongsto",
"PiggyAfter", "PiggyInto", "PiggyBefore", "DataType",
"Iden" ]
RULE_doc = 0
RULE_docStmt = 1
RULE_docStmtNse = 2
RULE_stmtNse = 3
RULE_nseMain = 4
RULE_nseIden = 5
RULE_nseType = 6
RULE_nseNary = 7
RULE_nseParam = 8
RULE_nseCurly = 9
RULE_stmtCopath = 10
RULE_stmtExtends = 11
RULE_stmtInitializes = 12
RULE_stmtCaller = 13
RULE_stmtCallinto = 14
RULE_eventPath = 15
RULE_eventSpec = 16
RULE_eventName = 17
RULE_eventTargetArgs = 18
RULE_eventTarget = 19
RULE_eventCurly = 20
ruleNames = [ "doc", "docStmt", "docStmtNse", "stmtNse", "nseMain",
"nseIden", "nseType", "nseNary", "nseParam", "nseCurly",
"stmtCopath", "stmtExtends", "stmtInitializes", "stmtCaller",
"stmtCallinto", "eventPath", "eventSpec", "eventName",
"eventTargetArgs", "eventTarget", "eventCurly" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
WS=12
Comment=13
EvtPathConnector=14
OPCHAR=15
Noid=16
Let=17
Copath=18
Class=19
Singleton=20
Extends=21
Nary=22
Event=23
Param=24
Callinto=25
Caller=26
Belongsto=27
PiggyAfter=28
PiggyInto=29
PiggyBefore=30
DataType=31
Iden=32
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class DocContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def docStmt(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.DocStmtContext)
else:
return self.getTypedRuleContext(GenaParser.DocStmtContext,i)
def getRuleIndex(self):
return GenaParser.RULE_doc
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDoc" ):
listener.enterDoc(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDoc" ):
listener.exitDoc(self)
def doc(self):
localctx = GenaParser.DocContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_doc)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 43
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 42
self.docStmt()
self.state = 45
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << GenaParser.Noid) | (1 << GenaParser.Let) | (1 << GenaParser.DataType) | (1 << GenaParser.Iden))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DocStmtContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def docStmtNse(self):
return self.getTypedRuleContext(GenaParser.DocStmtNseContext,0)
def getRuleIndex(self):
return GenaParser.RULE_docStmt
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDocStmt" ):
listener.enterDocStmt(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDocStmt" ):
listener.exitDocStmt(self)
def docStmt(self):
localctx = GenaParser.DocStmtContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_docStmt)
try:
self.enterOuterAlt(localctx, 1)
self.state = 47
self.docStmtNse()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DocStmtNseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def stmtNse(self):
return self.getTypedRuleContext(GenaParser.StmtNseContext,0)
def getRuleIndex(self):
return GenaParser.RULE_docStmtNse
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDocStmtNse" ):
listener.enterDocStmtNse(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDocStmtNse" ):
listener.exitDocStmtNse(self)
def docStmtNse(self):
localctx = GenaParser.DocStmtNseContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_docStmtNse)
try:
self.enterOuterAlt(localctx, 1)
self.state = 49
self.stmtNse()
self.state = 50
self.match(GenaParser.T__0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StmtNseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nseMain(self):
return self.getTypedRuleContext(GenaParser.NseMainContext,0)
def nseNary(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.NseNaryContext)
else:
return self.getTypedRuleContext(GenaParser.NseNaryContext,i)
def nseCurly(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.NseCurlyContext)
else:
return self.getTypedRuleContext(GenaParser.NseCurlyContext,i)
def nseParam(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.NseParamContext)
else:
return self.getTypedRuleContext(GenaParser.NseParamContext,i)
def getRuleIndex(self):
return GenaParser.RULE_stmtNse
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStmtNse" ):
listener.enterStmtNse(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStmtNse" ):
listener.exitStmtNse(self)
def stmtNse(self):
localctx = GenaParser.StmtNseContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_stmtNse)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 52
self.nseMain()
self.state = 58
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << GenaParser.T__5) | (1 << GenaParser.Nary) | (1 << GenaParser.Param))) != 0):
self.state = 56
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [GenaParser.Nary]:
self.state = 53
self.nseNary()
pass
elif token in [GenaParser.T__5]:
self.state = 54
self.nseCurly()
pass
elif token in [GenaParser.Param]:
self.state = 55
self.nseParam()
pass
else:
raise NoViableAltException(self)
self.state = 60
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NseMainContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Noid(self):
return self.getToken(GenaParser.Noid, 0)
def Let(self):
return self.getToken(GenaParser.Let, 0)
def nseIden(self):
return self.getTypedRuleContext(GenaParser.NseIdenContext,0)
def nseType(self):
return self.getTypedRuleContext(GenaParser.NseTypeContext,0)
def getRuleIndex(self):
return GenaParser.RULE_nseMain
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNseMain" ):
listener.enterNseMain(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNseMain" ):
listener.exitNseMain(self)
def nseMain(self):
localctx = GenaParser.NseMainContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_nseMain)
self._la = 0 # Token type
try:
self.state = 70
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [GenaParser.Noid]:
self.enterOuterAlt(localctx, 1)
self.state = 61
self.match(GenaParser.Noid)
pass
elif token in [GenaParser.Let, GenaParser.DataType, GenaParser.Iden]:
self.enterOuterAlt(localctx, 2)
self.state = 64
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [GenaParser.Let]:
self.state = 62
self.match(GenaParser.Let)
pass
elif token in [GenaParser.DataType, GenaParser.Iden]:
self.state = 63
self.nseIden()
pass
else:
raise NoViableAltException(self)
self.state = 68
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==GenaParser.T__1:
self.state = 66
self.match(GenaParser.T__1)
self.state = 67
self.nseType()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NseIdenContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def DataType(self):
return self.getToken(GenaParser.DataType, 0)
def Iden(self, i:int=None):
if i is None:
return self.getTokens(GenaParser.Iden)
else:
return self.getToken(GenaParser.Iden, i)
def getRuleIndex(self):
return GenaParser.RULE_nseIden
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNseIden" ):
listener.enterNseIden(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNseIden" ):
listener.exitNseIden(self)
def nseIden(self):
localctx = GenaParser.NseIdenContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_nseIden)
self._la = 0 # Token type
try:
self.state = 81
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [GenaParser.DataType]:
self.enterOuterAlt(localctx, 1)
self.state = 72
self.match(GenaParser.DataType)
pass
elif token in [GenaParser.Iden]:
self.enterOuterAlt(localctx, 2)
self.state = 73
self.match(GenaParser.Iden)
self.state = 78
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==GenaParser.T__2:
self.state = 74
self.match(GenaParser.T__2)
self.state = 75
self.match(GenaParser.Iden)
self.state = 80
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NseTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Noid(self):
return self.getToken(GenaParser.Noid, 0)
def Class(self):
return self.getToken(GenaParser.Class, 0)
def Singleton(self):
return self.getToken(GenaParser.Singleton, 0)
def Event(self):
return self.getToken(GenaParser.Event, 0)
def nseIden(self):
return self.getTypedRuleContext(GenaParser.NseIdenContext,0)
def getRuleIndex(self):
return GenaParser.RULE_nseType
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNseType" ):
listener.enterNseType(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNseType" ):
listener.exitNseType(self)
def nseType(self):
localctx = GenaParser.NseTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_nseType)
try:
self.state = 88
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [GenaParser.Noid]:
self.enterOuterAlt(localctx, 1)
self.state = 83
self.match(GenaParser.Noid)
pass
elif token in [GenaParser.Class]:
self.enterOuterAlt(localctx, 2)
self.state = 84
self.match(GenaParser.Class)
pass
elif token in [GenaParser.Singleton]:
self.enterOuterAlt(localctx, 3)
self.state = 85
self.match(GenaParser.Singleton)
pass
elif token in [GenaParser.Event]:
self.enterOuterAlt(localctx, 4)
self.state = 86
self.match(GenaParser.Event)
pass
elif token in [GenaParser.DataType, GenaParser.Iden]:
self.enterOuterAlt(localctx, 5)
self.state = 87
self.nseIden()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NseNaryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Nary(self):
return self.getToken(GenaParser.Nary, 0)
def getRuleIndex(self):
return GenaParser.RULE_nseNary
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNseNary" ):
listener.enterNseNary(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNseNary" ):
listener.exitNseNary(self)
def nseNary(self):
localctx = GenaParser.NseNaryContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_nseNary)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 90
self.match(GenaParser.Nary)
self.state = 91
self.match(GenaParser.T__1)
self.state = 92
_la = self._input.LA(1)
if not(_la==GenaParser.T__3 or _la==GenaParser.T__4):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NseParamContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Param(self):
return self.getToken(GenaParser.Param, 0)
def getRuleIndex(self):
return GenaParser.RULE_nseParam
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNseParam" ):
listener.enterNseParam(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNseParam" ):
listener.exitNseParam(self)
def nseParam(self):
localctx = GenaParser.NseParamContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_nseParam)
try:
self.enterOuterAlt(localctx, 1)
self.state = 94
self.match(GenaParser.Param)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NseCurlyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def stmtCopath(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.StmtCopathContext)
else:
return self.getTypedRuleContext(GenaParser.StmtCopathContext,i)
def stmtExtends(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.StmtExtendsContext)
else:
return self.getTypedRuleContext(GenaParser.StmtExtendsContext,i)
def stmtInitializes(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.StmtInitializesContext)
else:
return self.getTypedRuleContext(GenaParser.StmtInitializesContext,i)
def stmtCaller(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.StmtCallerContext)
else:
return self.getTypedRuleContext(GenaParser.StmtCallerContext,i)
def stmtCallinto(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.StmtCallintoContext)
else:
return self.getTypedRuleContext(GenaParser.StmtCallintoContext,i)
def stmtNse(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.StmtNseContext)
else:
return self.getTypedRuleContext(GenaParser.StmtNseContext,i)
def getRuleIndex(self):
return GenaParser.RULE_nseCurly
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNseCurly" ):
listener.enterNseCurly(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNseCurly" ):
listener.exitNseCurly(self)
def nseCurly(self):
localctx = GenaParser.NseCurlyContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_nseCurly)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 96
self.match(GenaParser.T__5)
self.state = 105
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 105
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [GenaParser.Copath]:
self.state = 97
self.stmtCopath()
pass
elif token in [GenaParser.Extends]:
self.state = 98
self.stmtExtends()
pass
elif token in [GenaParser.T__8]:
self.state = 99
self.stmtInitializes()
pass
elif token in [GenaParser.Caller]:
self.state = 100
self.stmtCaller()
pass
elif token in [GenaParser.Callinto]:
self.state = 101
self.stmtCallinto()
pass
elif token in [GenaParser.Noid, GenaParser.Let, GenaParser.DataType, GenaParser.Iden]:
self.state = 102
self.stmtNse()
self.state = 103
self.match(GenaParser.T__0)
pass
else:
raise NoViableAltException(self)
self.state = 107
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << GenaParser.T__8) | (1 << GenaParser.Noid) | (1 << GenaParser.Let) | (1 << GenaParser.Copath) | (1 << GenaParser.Extends) | (1 << GenaParser.Callinto) | (1 << GenaParser.Caller) | (1 << GenaParser.DataType) | (1 << GenaParser.Iden))) != 0)):
break
self.state = 109
self.match(GenaParser.T__6)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StmtCopathContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Copath(self):
return self.getToken(GenaParser.Copath, 0)
def nseIden(self):
return self.getTypedRuleContext(GenaParser.NseIdenContext,0)
def getRuleIndex(self):
return GenaParser.RULE_stmtCopath
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStmtCopath" ):
listener.enterStmtCopath(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStmtCopath" ):
listener.exitStmtCopath(self)
def stmtCopath(self):
localctx = GenaParser.StmtCopathContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_stmtCopath)
try:
self.enterOuterAlt(localctx, 1)
self.state = 111
self.match(GenaParser.Copath)
self.state = 112
self.nseIden()
self.state = 113
self.match(GenaParser.T__0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StmtExtendsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Extends(self):
return self.getToken(GenaParser.Extends, 0)
def nseIden(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.NseIdenContext)
else:
return self.getTypedRuleContext(GenaParser.NseIdenContext,i)
def getRuleIndex(self):
return GenaParser.RULE_stmtExtends
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStmtExtends" ):
listener.enterStmtExtends(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStmtExtends" ):
listener.exitStmtExtends(self)
def stmtExtends(self):
localctx = GenaParser.StmtExtendsContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_stmtExtends)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 115
self.match(GenaParser.Extends)
self.state = 116
self.nseIden()
self.state = 121
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==GenaParser.T__7:
self.state = 117
self.match(GenaParser.T__7)
self.state = 118
self.nseIden()
self.state = 123
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 124
self.match(GenaParser.T__0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StmtInitializesContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nseIden(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.NseIdenContext)
else:
return self.getTypedRuleContext(GenaParser.NseIdenContext,i)
def getRuleIndex(self):
return GenaParser.RULE_stmtInitializes
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStmtInitializes" ):
listener.enterStmtInitializes(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStmtInitializes" ):
listener.exitStmtInitializes(self)
def stmtInitializes(self):
localctx = GenaParser.StmtInitializesContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_stmtInitializes)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 126
self.match(GenaParser.T__8)
self.state = 127
self.nseIden()
self.state = 132
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==GenaParser.T__7:
self.state = 128
self.match(GenaParser.T__7)
self.state = 129
self.nseIden()
self.state = 134
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 135
self.match(GenaParser.T__0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StmtCallerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Caller(self):
return self.getToken(GenaParser.Caller, 0)
def eventPath(self):
return self.getTypedRuleContext(GenaParser.EventPathContext,0)
def getRuleIndex(self):
return GenaParser.RULE_stmtCaller
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStmtCaller" ):
listener.enterStmtCaller(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStmtCaller" ):
listener.exitStmtCaller(self)
def stmtCaller(self):
localctx = GenaParser.StmtCallerContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_stmtCaller)
try:
self.enterOuterAlt(localctx, 1)
self.state = 137
self.match(GenaParser.Caller)
self.state = 138
self.eventPath()
self.state = 139
self.match(GenaParser.T__0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StmtCallintoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Callinto(self):
return self.getToken(GenaParser.Callinto, 0)
def eventPath(self):
return self.getTypedRuleContext(GenaParser.EventPathContext,0)
def getRuleIndex(self):
return GenaParser.RULE_stmtCallinto
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStmtCallinto" ):
listener.enterStmtCallinto(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStmtCallinto" ):
listener.exitStmtCallinto(self)
def stmtCallinto(self):
localctx = GenaParser.StmtCallintoContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_stmtCallinto)
try:
self.enterOuterAlt(localctx, 1)
self.state = 141
self.match(GenaParser.Callinto)
self.state = 142
self.eventPath()
self.state = 143
self.match(GenaParser.T__0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EventPathContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def eventSpec(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(GenaParser.EventSpecContext)
else:
return self.getTypedRuleContext(GenaParser.EventSpecContext,i)
def EvtPathConnector(self, i:int=None):
if i is None:
return self.getTokens(GenaParser.EvtPathConnector)
else:
return self.getToken(GenaParser.EvtPathConnector, i)
def getRuleIndex(self):
return GenaParser.RULE_eventPath
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEventPath" ):
listener.enterEventPath(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEventPath" ):
listener.exitEventPath(self)
def eventPath(self):
localctx = GenaParser.EventPathContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_eventPath)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 145
self.eventSpec()
self.state = 150
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==GenaParser.EvtPathConnector:
self.state = 146
self.match(GenaParser.EvtPathConnector)
self.state = 147
self.eventSpec()
self.state = 152
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EventSpecContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def eventName(self):
return self.getTypedRuleContext(GenaParser.EventNameContext,0)
def eventTargetArgs(self):
return self.getTypedRuleContext(GenaParser.EventTargetArgsContext,0)
def eventCurly(self):
return self.getTypedRuleContext(GenaParser.EventCurlyContext,0)
def getRuleIndex(self):
return GenaParser.RULE_eventSpec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEventSpec" ):
listener.enterEventSpec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEventSpec" ):
listener.exitEventSpec(self)
def eventSpec(self):
localctx = GenaParser.EventSpecContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_eventSpec)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 153
self.eventName()
self.state = 155
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==GenaParser.T__9:
self.state = 154
self.eventTargetArgs()
self.state = 158
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==GenaParser.T__5 or _la==GenaParser.Callinto:
self.state = 157
self.eventCurly()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EventNameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Iden(self):
return self.getToken(GenaParser.Iden, 0)
def getRuleIndex(self):
return GenaParser.RULE_eventName
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEventName" ):
listener.enterEventName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEventName" ):
listener.exitEventName(self)
def eventName(self):
localctx = GenaParser.EventNameContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_eventName)
try:
self.enterOuterAlt(localctx, 1)
self.state = 160
self.match(GenaParser.Iden)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EventTargetArgsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def eventTarget(self):
return self.getTypedRuleContext(GenaParser.EventTargetContext,0)
def getRuleIndex(self):
return GenaParser.RULE_eventTargetArgs
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEventTargetArgs" ):
listener.enterEventTargetArgs(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEventTargetArgs" ):
listener.exitEventTargetArgs(self)
def eventTargetArgs(self):
localctx = GenaParser.EventTargetArgsContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_eventTargetArgs)
try:
self.enterOuterAlt(localctx, 1)
self.state = 162
self.match(GenaParser.T__9)
self.state = 163
self.eventTarget()
self.state = 164
self.match(GenaParser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EventTargetContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def nseIden(self):
return self.getTypedRuleContext(GenaParser.NseIdenContext,0)
def getRuleIndex(self):
return GenaParser.RULE_eventTarget
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEventTarget" ):
listener.enterEventTarget(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEventTarget" ):
listener.exitEventTarget(self)
def eventTarget(self):
localctx = GenaParser.EventTargetContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_eventTarget)
try:
self.enterOuterAlt(localctx, 1)
self.state = 166
self.nseIden()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EventCurlyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def stmtCaller(self):
return self.getTypedRuleContext(GenaParser.StmtCallerContext,0)
def stmtCallinto(self):
return self.getTypedRuleContext(GenaParser.StmtCallintoContext,0)
def getRuleIndex(self):
return GenaParser.RULE_eventCurly
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEventCurly" ):
listener.enterEventCurly(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEventCurly" ):
listener.exitEventCurly(self)
def eventCurly(self):
localctx = GenaParser.EventCurlyContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_eventCurly)
try:
self.state = 173
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [GenaParser.T__5]:
self.enterOuterAlt(localctx, 1)
self.state = 168
self.match(GenaParser.T__5)
self.state = 169
self.stmtCaller()
pass
elif token in [GenaParser.Callinto]:
self.enterOuterAlt(localctx, 2)
self.state = 170
self.stmtCallinto()
self.state = 171
self.match(GenaParser.T__6)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
|
[
""
] | |
95e0cb207d679e915f591a3aa4c3d6303e9dddc2
|
03a81ebc980e564ae844403ee1526c963e837447
|
/BeamNG_scenario/van_scenario.py
|
05777e16a335c125f90ec37280d462e202ad2e8c
|
[
"MIT"
] |
permissive
|
Hamza-619/Testing-Robustness-of-Self-Driving-cars
|
96944a701845eb24dba17eff041b21f98f34ade3
|
2d4de6dc5fdf0630be8bf8cf00e32233e63ba191
|
refs/heads/main
| 2023-04-19T03:19:58.777996
| 2021-05-05T06:33:37
| 2021-05-05T06:33:37
| 363,964,875
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 44,373
|
py
|
# Define all the imports
import time
import matplotlib
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from shapely import geometry
import sys
from matplotlib.pyplot import imshow
from time import sleep
import os
import math
import csv
import itertools as it
import random
from beamngpy import BeamNGpy, Vehicle, Scenario, Road, setup_logging
from beamngpy.sensors import Electrics
from beamngpy.sensors import Camera
positions = list()
directions = list()
distance = list()
speed = {9, 10, 11, 13, 14, 16, 17, 19, 20, 22, 23}
# Creation main road for the scenario.
# def simu(speed):
for car_speed in speed:
my_dict = {'van_transmission': ['van_transmission_5M', 'van_transmission_4A', 'van_transmission_6A'],
'tire_R_16x8_alt': ['tire_R_225_75_16_heavy', 'tire_R_225_75_16_standard', 'tire_R_31_12_16_offroad'],
'brakepad_R': ['brakepad_R', 'brakepad_R_race', 'brakepad_R_sport']}
allNames = sorted(my_dict)
combinations = it.product(*(my_dict[Name] for Name in allNames))
# print(list(combinations))
for x in list(combinations):
# Calling BeamngPY
bng = BeamNGpy('localhost', 62240, home='C:/Beamng/trunk')
# Create a scenario in asfault map with scenario name 'indicatorsT1'.
scenario = Scenario('asfault', 'euroT1')
# Properties for list of vehicles.
original_vehicle1 = (803.1582641601562, -
283.4632263183594, 0.20022384822368622)
vehicle1 = Vehicle('ego_vehicle', model='van',
licence='Main01', color='White')
original_vehicle2 = (
530.0204467773438, 8.33481502532959, 0.1997339278459549)
vehicle2 = Vehicle('green_vehicle', model='van',
licence='Main02', colour='Yellow')
# Add vehicles to our scenario at this position and rotation.
scenario.add_vehicle(vehicle1, pos=original_vehicle1, rot=(0, 0, 90))
scenario.add_vehicle(vehicle2, pos=original_vehicle2, rot=(0, 0, 100))
road = Road('road_rubber_sticky', rid='main_road', texture_length=16)
road.nodes = [
(869.5509237188014, -295.7357435150861, 0.01, 7.9999999999999805),
(854.895151969961, -295.8359261634828, 0.01, 8.000000000000004),
(840.286418364192, -294.658392914346, 0.01, 8.000000000000004),
(825.8359041852287, -292.2121055067158, 0.01, 8.000000000000012),
(811.6535865714322, -288.51568166487283, 0.01, 7.999999999999975),
(797.8474015233609, -283.5972534062153, 0.01, 8.000000000000039),
(784.5224224456331, -277.4942529396198, 0.01, 8.000000000000027),
(771.7800604748727, -270.25312778372955, 0.01, 8.000000000000021),
(759.7172926797257, -261.9289872732918, 0.01, 7.9999999999999725),
(748.4259240068113, -252.5851831438444, 0.01, 8.000000000000004),
(737.9918885896456, -242.29282738675565, 0.01, 8.00000000000003),
(728.4945957379957, -231.13025104403135, 0.01, 7.999999999999948),
(720.006325585085, -219.1824080617875, 0.01, 8.000000000000036),
(712.5916789921349, -206.54022873942375, 0.01, 7.999999999999959),
(706.3070858968036, -193.29992769513996, 0.01, 8.0),
(701.2003758472878, -179.56227161459543, 0.01, 7.999999999999917),
(697.31041399058, -165.43181235558666, 0.01, 8.000000000000064),
(694.666805285233, -151.01609124527928, 0.01, 8.000000000000004),
(693.289669189749, -136.42482062577312, 0.01, 8.000000000000032),
(693.1894865413524, -121.76904887693271, 0.01, 7.999999999999888),
(693.1131569044787, -110.60274659210194, 0.01, 8.000000000000032),
(692.0639103555386, -99.48558802485915, 0.01, 8.000000000000005),
(690.0497322943219, -88.50218146462495, 0.01, 8.000000000000068),
(687.0859518320683, -77.73611726728494, 0.01, 8.000000000000012),
(683.1951251276753, -67.26933168210815, 0.01, 7.999999999999983),
(678.4068637217085, -57.18148326741573, 0.01, 8.000000000000059),
(672.7576091746989, -47.54934664085289, 0.01, 7.9999999999999325),
(666.2903557248623, -38.44622817819089, 0.01, 8.000000000000037),
(659.0543230759862, -29.941408107543808, 0.01, 8.00000000000003),
(651.1045818057646, -22.099613245000015, 0.01, 8.000000000000004),
(642.5016342454489, -14.980524384468652, 0.01, 7.9999999999999964),
(633.310954020575, -8.638322090801779, 0.01, 7.999999999999986),
(623.6024877571385, -3.121274352980649, 0.01, 8.000000000000028),
(613.4501227455364, 1.5286307644254435, 0.01, 7.999999999999988),
(602.9311246136725, 5.276004675783568, 0.01, 8.000000000000021),
(592.1255492888752, 8.092327602902003, 0.01, 8.000000000000027),
(581.1156337239507, 9.956165627763141, 0.01, 8.000000000000018),
(569.9851700243172, 10.853333817581643, 0.01, 8.000000000000032),
(558.8188677394864, 10.777004180707962, 0.01, 8.000000000000032),
(547.7017091722437, 9.727757631767815, 0.01, 8.000000000000037),
(536.7183026120094, 7.713579570551076, 0.01, 8.000000000000043),
(525.9522384146694, 4.749799108297523, 0.01, 8.000000000000043),
(515.4854528294927, 0.8589724039044881, 0.01, 7.999999999999984),
(505.39760441480024, -3.9292890020621627, 0.01, 7.999999999999995),
(493.73352968531213, -9.465716252711154, 0.01, 7.999999999999996),
(481.6313088524515, -13.96448462966561, 0.01, 8.000000000000016),
(469.1830471242771, -17.391355789146296, 0.01, 8.000000000000004),
]
road.improved_spline = 0
road.over_object = 1
road.break_angle = 180
# We create Divider for the scenario.
road2 = Road('BlankWhite', rid='divider_1_1', texture_length=16)
road2.nodes = [
(854.895151969961, -295.8359261634828, 0.01, 0.3),
(840.286418364192, -294.658392914346, 0.01, 0.3),
(825.8359041852287, -292.2121055067158, 0.01, 0.3),
(811.6535865714322, -288.51568166487283, 0.01, 0.3),
(797.8474015233609, -283.5972534062153, 0.01, 0.3),
(784.5224224456331, -277.4942529396198, 0.01, 0.3),
(771.7800604748727, -270.25312778372955, 0.01, 0.3),
(759.7172926797257, -261.9289872732918, 0.01, 0.3),
(748.4259240068113, -252.5851831438444, 0.01, 0.3),
(737.9918885896456, -242.29282738675565, 0.01, 0.3),
(728.4945957379957, -231.13025104403135, 0.01, 0.3),
(720.006325585085, -219.1824080617875, 0.01, 0.3),
(712.5916789921349, -206.54022873942375, 0.01, 0.3),
(706.3070858968036, -193.29992769513996, 0.01, 0.3),
(701.2003758472878, -179.56227161459543, 0.01, 0.3),
(697.31041399058, -165.43181235558666, 0.01, 0.3),
(694.666805285233, -151.01609124527928, 0.01, 0.3),
(693.289669189749, -136.42482062577312, 0.01, 0.3),
(693.1894865413524, -121.76904887693271, 0.01, 0.3),
(693.1131569044787, -110.60274659210194, 0.01, 0.3),
(692.0639103555386, -99.48558802485915, 0.01, 0.3),
(690.0497322943219, -88.50218146462495, 0.01, 0.3),
(687.0859518320683, -77.73611726728494, 0.01, 0.3),
(683.1951251276753, -67.26933168210815, 0.01, 0.3),
(678.4068637217085, -57.18148326741573, 0.01, 0.3),
(672.7576091746989, -47.54934664085289, 0.01, 0.3),
(666.2903557248623, -38.44622817819089, 0.01, 0.3),
(659.0543230759862, -29.941408107543808, 0.01, 0.3),
(651.1045818057646, -22.099613245000015, 0.01, 0.3),
(642.5016342454489, -14.980524384468652, 0.01, 0.3),
(633.310954020575, -8.638322090801779, 0.01, 0.3),
(623.6024877571385, -3.121274352980649, 0.01, 0.3),
(613.4501227455364, 1.5286307644254435, 0.01, 0.3),
(602.9311246136725, 5.276004675783568, 0.01, 0.3),
(592.1255492888752, 8.092327602902003, 0.01, 0.3),
(581.1156337239507, 9.956165627763141, 0.01, 0.3),
(569.9851700243172, 10.853333817581643, 0.01, 0.3),
(558.8188677394864, 10.777004180707962, 0.01, 0.3),
(547.7017091722437, 9.727757631767815, 0.01, 0.3),
(536.7183026120094, 7.713579570551076, 0.01, 0.3),
(525.9522384146694, 4.749799108297523, 0.01, 0.3),
(515.4854528294927, 0.8589724039044881, 0.01, 0.3),
(505.39760441480024, -3.9292890020621627, 0.01, 0.3),
(493.73352968531213, -9.465716252711154, 0.01, 0.3),
(481.6313088524515, -13.96448462966561, 0.01, 0.3),
(469.1830471242771, -17.391355789146296, 0.01, 0.3),
(456.4834832890063, -19.72024917242819, 0.01, 0.3),
(443.6292686956319, -20.933440494640223, 0.01, 0.3),
(430.7182316787963, -21.021696637275436, 0.01, 0.3),
(417.848633026095, -19.98434591779785, 0.01, 0.3),
(405.1184181541512, -17.82928320155218, 0.01, 0.3),
(392.62447168485426, -14.57290981707149, 0.01, 0.3),
(380.4618800948866, -10.240008732063714, 0.01, 0.3),
(368.72320805022173, -4.8635559400629305, 0.01, 0.3),
(357.4977939331232, 1.51553050679275, 0.01, 0.3),
(355.3740669379965, 2.7223846994411254, 0.01, 0.3),
(353.1532370917085, 3.7395514438737076, 0.01, 0.3),
(350.8522062503633, 4.5592894869832925, 0.01, 0.3),
(348.4884866480639, 5.175360127290453, 0.01, 0.3),
(346.0800676182367, 5.583074695228829, 0.01, 0.3),
(343.6452786839418, 5.779330236751619, 0.01, 0.3),
(341.20265005913507, 5.762633128685504, 0.01, 0.3),
(338.77077162255074, 5.53311044610485, 0.01, 0.3),
(336.3681514374995, 5.092508995213684, 0.01, 0.3),
(334.01307489433134, 4.44418201909572, 0.01, 0.3),
(331.72346554757394, 3.593063677509747, 0.01, 0.3),
(329.51674870685997, 2.5456314949545362, 0.01, 0.3),
(327.40971881979937, 1.3098570627961763, 0.01, 0.3),
(325.418411656092, -0.10485462935560186, 0.01, 0.3),
(323.557982265638, -1.6877367712972475, 0.01, 0.3),
(316.3820403310295, -7.793139318786457, 0.01, 0.3),
(308.7012841281584, -13.249884417086172, 0.01, 0.3),
(300.57416884949606, -18.01644294112556, 0.01, 0.3),
(292.06254674959933, -22.056538502409957, 0.01, 0.3),
(283.23119641210644, -25.339423534241583, 0.01, 0.3),
(274.1473297456008, -27.840113299268026, 0.01, 0.3),
(264.88008046040324, -29.539576038419668, 0.01, 0.3),
(255.49997791929212, -30.42487781408791, 0.01, 0.3),
(246.07841036646613, -30.489280945200086, 0.01, 0.3),
(236.68708161990034, -29.732295285040763, 0.01, 0.3),
(227.3974653619954, -28.159681951564185, 0.01, 0.3),
(218.28026118169763, -25.783409481808008, 0.01, 0.3),
(209.4048565079375, -22.62156274409962, 0.01, 0.3),
(200.83879852939822, -18.698205301288237, 0.01, 0.3),
(192.64728011962362, -14.043196272501657, 0.01, 0.3),
(184.45576170984907, -9.388187243715066, 0.01, 0.3),
(175.88970373130977, -5.464829800903676, 0.01, 0.3),
(167.01429905754964, -2.302983063195276, 0.01, 0.3),
(157.89709487725187, 0.07328940656091376, 0.01, 0.3),
(148.60747861934695, 1.645902740037501, 0.01, 0.3),
(139.21614987278113, 2.40288840019684, 0.01, 0.3),
(135.04222598541855, 2.739326471378762, 0.01, 0.3),
(130.9135076485719, 3.438265730701684, 0.01, 0.3),
(126.8614169017729, 4.494386828371097, 0.01, 0.3),
(122.91679260232395, 5.899652045130379, 0.01, 0.3),
(119.10965572297314, 7.643366464157658, 0.01, 0.3),
(115.46898087418445, 9.712259365840584, 0.01, 0.3),
(112.02247578985673, 12.090585225965654, 0.01, 0.3),
(108.79637045473835, 14.760243548664906, 0.01, 0.3),
(105.81521747840526, 17.700916622118825, 0.01, 0.3),
(103.10170523507672, 20.890224148611487, 0.01, 0.3),
(100.67648519138797, 24.30389357210973, 0.01, 0.3),
(98.55801473625934, 27.915944807070794, 0.01, 0.3),
(96.76241670902184, 31.69888796258045, 0.01, 0.3),
(95.30335669487445, 35.62393255702173, 0.01, 0.3),
(94.19193902152935, 39.66120663102424, 0.01, 0.3),
(92.15433995373003, 47.0628757666955, 0.01, 0.3),
(89.47939659445981, 54.25879085650453, 0.01, 0.3),
(86.18746687785772, 61.194186641605576, 0.01, 0.3),
(82.30360437678858, 67.81628057236753, 0.01, 0.3),
(77.85736763002585, 74.07467451544764, 0.01, 0.3),
(72.88259518392353, 79.9217383140175, 0.01, 0.3),
(67.4171480606462, 85.31297228201636, 0.01, 0.3),
(61.50262161292919, 90.20734587363165, 0.01, 0.3),
(55.18402895832835, 94.56760995052763, 0.01, 0.3),
(48.50945840221574, 98.36058027027966, 0.01, 0.3),
(41.52970745673928, 101.55739003849634, 0.01, 0.3),
(34.29789624108288, 104.13370960255503, 0.01, 0.3),
(26.869063205284704, 106.06993161494896, 0.01, 0.3),
(19.299746254399196, 107.35132025704098, 0.01, 0.3),
(11.647552460901125, 107.96812338754117, 0.01, 0.3),
(3.970719640079963, 107.9156467621905, 0.01, 0.3),
(-3.672326874899454, 107.19428975979417, 0.01, 0.3),
(-11.223418885060454, 105.80954234270764, 0.01, 0.3),
(-18.62508802073171, 103.77194327490832, 0.01, 0.3),
(-25.821003110540744, 101.09699991563812, 0.01, 0.3),
(-32.75639889564178, 97.80507019903602, 0.01, 0.3),
(-39.37849282640374, 93.92120769796688, 0.01, 0.3),
(-45.63688676948385, 89.47497095120414, 0.01, 0.3),
(-51.4839505680537, 84.50019850510184, 0.01, 0.3),
(-54.673258094546355, 81.7866862617733, 0.01, 0.3),
(-58.086927518044604, 79.36146621808453, 0.01, 0.3),
(-61.69897875300566, 77.24299576295591, 0.01, 0.3),
(-65.48192190851532, 75.4473977357184, 0.01, 0.3),
(-69.4069665029566, 73.98833772157101, 0.01, 0.3),
(-73.44424057695912, 72.87692004822593, 0.01, 0.3),
(-77.56301803704693, 72.12160327526963, 0.01, 0.3),
(-81.73195249976298, 71.72813581941709, 0.01, 0.3),
(-85.91931585657451, 71.69951220558946, 0.01, 0.3),
(-90.0932397439371, 72.03595027677137, 0.01, 0.3),
(-94.22195808078374, 72.7348895360943, 0.01, 0.3),
(-98.27404882758276, 73.7910106337637, 0.01, 0.3),
]
road2.drivability = -1
road2.improved_spline = 0
road2.over_object = 1
road2.break_angle = 180
road2.render_priority = 9
# Creation of Left side Boundary for the scenario.
road3 = Road('strap', rid='boundary_1_l1', texture_length=16)
road3.nodes = [
(854.7590419968316, -299.53313581345355, 0.01, 0.3),
(839.8285932774662, -298.3296707995771, 0.01, 0.3),
(825.0598483102592, -295.82951098572244, 0.01, 0.3),
(810.5652061620216, -292.0516840977104, 0.01, 0.3),
(796.4549798116083, -287.0249416792737, 0.01, 0.3),
(782.8365566015312, -280.78754027544596, 0.01, 0.3),
(769.8135809554484, -273.38695027715625, 0.01, 0.3),
(757.4851655815157, -264.879494642891, 0.01, 0.3),
(745.9451371648282, -255.32992024696836, 0.01, 0.3),
(735.2813222896962, -244.81090511673167, 0.01, 0.3),
(725.5748790263071, -233.40250530887687, 0.01, 0.3),
(716.8996792688073, -221.19154563452378, 0.01, 0.3),
(709.3217465255783, -208.27095886997017, 0.01, 0.3),
(702.8987534404785, -194.7390784821608, 0.01, 0.3),
(697.6795828691943, -180.69889025161928, 0.01, 0.3),
(693.7039558511937, -166.2572484884741, 0.01, 0.3),
(691.002129308634, -151.52406280664957, 0.01, 0.3),
(689.5946657729111, -136.61146164532133, 0.01, 0.3),
(689.4921873573719, -121.61984092694169, 0.01, 0.3),
(689.4182430216505, -110.80248558851187, 0.01, 0.3),
(688.4004651361473, -100.00659991793142, 0.01, 0.3),
(686.4456367685357, -89.3405010392577, 0.01, 0.3),
(683.568635343051, -78.885364405324, 0.01, 0.3),
(679.791356577722, -68.72075991889866, 0.01, 0.3),
(675.1425478447361, -58.92404635767939, 0.01, 0.3),
(669.657589385579, -49.56978262728441, 0.01, 0.3),
(663.3782250460329, -40.729160322960944, 0.01, 0.3),
(656.3522445803059, -32.469461918558544, 0.01, 0.3),
(648.6331199421453, -24.85354870627701, 0.01, 0.3),
(640.2795983309821, -17.9393823842759, 0.01, 0.3),
(631.3552550902717, -11.77958393315343, 0.01, 0.3),
(621.9280098607418, -6.421033138510644, 0.01, 0.3),
(612.0696096709152, -1.904511807476182, 0.01, 0.3),
(601.8550828989012, 1.7356066054706056, 0.01, 0.3),
(591.3621682611349, 4.471618601243636, 0.01, 0.3),
(580.6707231738047, 6.282701476505859, 0.01, 0.3),
(569.8621159896963, 7.155071797014778, 0.01, 0.3),
(559.0186067358964, 7.0820902978797005, 0.01, 0.3),
(548.2227210653159, 6.064312412376524, 0.01, 0.3),
(537.5566221866422, 4.1094840447649785, 0.01, 0.3),
(527.1014855527084, 1.2324826192802605, 0.01, 0.3),
(516.936881066283, -2.544796146048768, 0.01, 0.3),
(507.1401675050639, -7.1936048790345986, 0.01, 0.3),
(495.17268047233347, -12.874048709036112, 0.01, 0.3),
(482.7679274894745, -17.485277607759365, 0.01, 0.3),
(470.00848325716527, -20.997813928532484, 0.01, 0.3),
(456.99145485037633, -23.384925149027147, 0.01, 0.3),
(443.8159097151801, -24.628443911478197, 0.01, 0.3),
(430.58212170566696, -24.71890628724621, 0.01, 0.3),
(417.39080793936915, -23.655623803028924, 0.01, 0.3),
(404.34236227918115, -21.446688680558744, 0.01, 0.3),
(391.53609127544377, -18.10891224990914, 0.01, 0.3),
(379.0694583831336, -13.667697005121948, 0.01, 0.3),
(367.0373422061198, -8.156843275889036, 0.01, 0.3),
(355.5199264659941, -1.611820536930344, 0.01, 0.3),
(353.6995890415998, -0.5773740860888795, 0.01, 0.3),
(351.7727240170872, 0.3064088719720715, 0.01, 0.3),
(349.77616453559193, 1.018891416670355, 0.01, 0.3),
(347.72510562032363, 1.5546511256321, 0.01, 0.3),
(345.6351570680907, 1.9096105439715487, 0.01, 0.3),
(343.52222464932106, 2.0810682161847813, 0.01, 0.3),
(341.40238905554503, 2.067719245857284, 0.01, 0.3),
(339.29178351562297, 1.8696652267135723, 0.01, 0.3),
(337.2064710121322, 1.4884134694276192, 0.01, 0.3),
(335.16232203237047, 0.9268655300784798, 0.01, 0.3),
(333.17489378436443, 0.18929512755651268, 0.01, 0.3),
(331.25931179712364, -0.7186843820179094, 0.01, 0.3),
(329.43015480623086, -1.7901627263238964, 0.01, 0.3),
(327.7013438008621, -3.0169853081849722, 0.01, 0.3),
(326.08603607665276, -4.389815266977436, 0.01, 0.3),
(318.65429459587557, -10.71285603047459, 0.01, 0.3),
(310.71042170089436, -16.356530733364085, 0.01, 0.3),
(302.30489898004294, -21.28637540768193, 0.01, 0.3),
(293.50169753662067, -25.464870958734913, 0.01, 0.3),
(284.3678150491294, -28.86021651233533, 0.01, 0.3),
(274.98564932023356, -31.444208825054087, 0.01, 0.3),
(265.38805202177326, -33.204252015018625, 0.01, 0.3),
(255.68661893884033, -34.11988123092588, 0.01, 0.3),
(245.94230039333684, -34.18649059517086, 0.01, 0.3),
(236.22925653317452, -33.403573170271834, 0.01, 0.3),
(226.62140948702535, -31.777087430570752, 0.01, 0.3),
(217.19188077228725, -29.319411914645684, 0.01, 0.3),
(208.01243479618446, -26.049251017157864, 0.01, 0.3),
(199.1529326852961, -21.991492637114288, 0.01, 0.3),
(190.66941265249451, -17.17054731622475, 0.01, 0.3),
(182.78128381345235, -12.687946029245069, 0.01, 0.3),
(174.50919065668853, -8.897972372805313, 0.01, 0.3),
(165.93825734277834, -5.8433811335082115, 0.01, 0.3),
(157.13371384951165, -3.547419595097436, 0.01, 0.3),
(148.16256806920097, -2.0275614112197715, 0.01, 0.3),
(139.09309583816034, -1.295373620369995, 0.01, 0.3),
(134.58440089869276, -0.9319514138523141, 0.01, 0.3),
(130.1374517736019, -0.1791397483048844, 0.01, 0.3),
(125.77303649236252, 0.9583843955334217, 0.01, 0.3),
(121.52437089057099, 2.4719637720721073, 0.01, 0.3),
(117.42378987887113, 4.350079128331583, 0.01, 0.3),
(113.5025013547607, 6.578436872413563, 0.01, 0.3),
(109.79034869164681, 9.140077856366448, 0.01, 0.3),
(106.31558361275482, 12.015506445541254, 0.01, 0.3),
(103.1046511784558, 15.18283889214295, 0.01, 0.3),
(100.18198852338847, 18.61796988376552, 0.01, 0.3),
(97.56983887511011, 22.294755999373727, 0.01, 0.3),
(95.28808226970301, 26.185214676523852, 0.01, 0.3),
(93.35408425269682, 30.259737175559252, 0.01, 0.3),
(91.78256371678073, 34.48731391999865, 0.01, 0.3),
(90.58200439306668, 38.84839899915205, 0.01, 0.3),
(88.63702346471278, 45.91362862865643, 0.01, 0.3),
(86.07562804450659, 52.80736261971403, 0.01, 0.3),
(82.92315100088527, 59.451623551341925, 0.01, 0.3),
(79.20358458766852, 65.795844585936, 0.01, 0.3),
(74.94523695119648, 71.7917423706776, 0.01, 0.3),
(70.18051668824333, 77.39368450300276, 0.01, 0.3),
(64.9456861970269, 82.55903682073935, 0.01, 0.3),
(59.280585698462424, 87.2484878738244, 0.01, 0.3),
(53.228330028025155, 91.42634810817597, 0.01, 0.3),
(46.83498050581903, 95.06082148474967, 0.01, 0.3),
(40.14919438211803, 98.12424746659471, 0.01, 0.3),
(33.22185452631156, 100.5933115322421, 0.01, 0.3),
(26.105682177544466, 102.4492226132906, 0.01, 0.3),
(18.85483570425321, 103.6778561057837, 0.01, 0.3),
(11.524498426280331, 104.26986136697434, 0.01, 0.3),
(4.170458636489882, 104.22073287936227, 0.01, 0.3),
(-3.151314981827199, 103.53084454040288, 0.01, 0.3),
(-10.38509931042773, 102.2054468169216, 0.01, 0.3),
(-17.475840882692676, 100.25462678589108, 0.01, 0.3),
(-24.369574873750263, 97.69323136568488, 0.01, 0.3),
(-31.01383580537816, 94.54075432206356, 0.01, 0.3),
(-37.35805683997225, 90.82118790884681, 0.01, 0.3),
(-43.35395462471384, 86.56284027237477, 0.01, 0.3),
(-48.95589675703894, 81.79812000942164, 0.01, 0.3),
(-52.401003829700414, 78.86696955008503, 0.01, 0.3),
(-56.07778994530861, 76.25481990180668, 0.01, 0.3),
(-59.9682486224587, 73.97306329639959, 0.01, 0.3),
(-64.04277112149416, 72.03906527939337, 0.01, 0.3),
(-68.2703478659335, 70.4675447434773, 0.01, 0.3),
(-72.61880444407093, 69.27046190883975, 0.01, 0.3),
(-77.05504647567699, 68.45692729867068, 0.01, 0.3),
(-81.54531148021474, 68.03313240257913, 0.01, 0.3),
(-86.05542582970375, 68.00230255561868, 0.01, 0.3),
(-90.55106483066292, 68.36467239154031, 0.01, 0.3),
(-94.99801395575385, 69.11748405708775, 0.01, 0.3),
(-99.35009054235405, 70.25061256345077, 0.01, 0.3),
]
road3.drivability = -1
road3.improved_spline = 0
road3.over_object = 1
road3.break_angle = 180
road3.render_priority = 9
# Creation of Right side Boundary for the scenario.
road4 = Road('strap', rid='boundary_1_r1', texture_length=16)
road4.nodes = [
(869.3773230309554, -292.0395093410404, 0.01, 0.3),
(855.0443599199521, -292.13862697950225, 0.01, 0.3),
(840.7572993894263, -290.98816739971096, 0.01, 0.3),
(826.6248745967787, -288.5968862937354, 0.01, 0.3),
(812.7546418280723, -284.98298275468693, 0.01, 0.3),
(799.252161929753, -280.1739607706323, 0.01, 0.3),
(786.2201969268668, -274.2064199026371, 0.01, 0.3),
(773.7579279420017, -267.1257767400065, 0.01, 0.3),
(761.9602003670561, -258.98591925261724, 0.01, 0.3),
(750.916802032538, -249.84879667092878, 0.01, 0.3),
(740.7117798679592, -239.78394801493312, 0.01, 0.3),
(731.4228002539531, -228.8679728602243, 0.01, 0.3),
(723.1205579342218, -217.18394836897562, 0.01, 0.3),
(715.868237985849, -204.82079702257226, 0.01, 0.3),
(709.7210349427125, -191.87260986783542, 0.01, 0.3),
(704.7257327317533, -178.43793042734166, 0.01, 0.3),
(700.9203486190427, -164.61900472371445, 0.01, 0.3),
(698.3338438754321, -150.52100312565386, 0.01, 0.3),
(696.9859033637947, -136.2512199379272, 0.01, 0.3),
(696.8867857253329, -121.91825682692374, 0.01, 0.3),
(696.8081603213167, -110.4161055725537, 0.01, 0.3),
(695.7285863321375, -98.97761646348887, 0.01, 0.3),
(693.6561904337082, -87.67674533173755, 0.01, 0.3),
(690.6067448101618, -76.59949863026111, 0.01, 0.3),
(686.6034575840005, -65.83018089508728, 0.01, 0.3),
(681.6767961882653, -55.45075313686933, 0.01, 0.3),
(675.8642554909765, -45.54020906811656, 0.01, 0.3),
(669.2100724365503, -36.17397391334455, 0.01, 0.3),
(661.7648893759363, -27.423330377568536, 0.01, 0.3),
(653.5853686477477, -19.354876141875973, 0.01, 0.3),
(644.7337613436588, -12.030017014869339, 0.01, 0.3),
(635.2774335399993, -5.504499597375101, 0.01, 0.3),
(625.2883536012405, 0.1720129828454515, 0.01, 0.3),
(614.8425444572891, 4.956319037483773, 0.01, 0.3),
(604.019505023083, 8.812007108621215, 0.01, 0.3),
(592.9016051638451, 11.709733081908597, 0.01, 0.3),
(581.5734588106767, 13.62744351299422, 0.01, 0.3),
(570.1212799974465, 14.550543467552414, 0.01, 0.3),
(558.6322267199381, 14.472007597545916, 0.01, 0.3),
(547.1937376108735, 13.392433608366808, 0.01, 0.3),
(535.8928664791215, 11.320037709937322, 0.01, 0.3),
(524.8156197776462, 8.270592086391233, 0.01, 0.3),
(514.0463020424713, 4.267304860229461, 0.01, 0.3),
(503.65504132453657, -0.6649731250897255, 0.01, 0.3),
(492.3062118580076, -6.051767206802257, 0.01, 0.3),
(480.50696766519775, -10.439127745200098, 0.01, 0.3),
(468.3702394924049, -13.781421160683637, 0.01, 0.3),
(455.9883951693809, -16.053210582229102, 0.01, 0.3),
(443.45566800778596, -17.237206320594577, 0.01, 0.3),
(430.8674396287873, -17.32439745329492, 0.01, 0.3),
(418.3195140513293, -16.31412040316285, 0.01, 0.3),
(405.907388565701, -14.21406398857182, 0.01, 0.3),
(393.72552694149437, -11.040210906885607, 0.01, 0.3),
(381.8666405012787, -6.816716096480716, 0.01, 0.3),
(370.42098253145554, -1.5757229030802349, 0.01, 0.3),
(359.4756614002523, 4.642881550515844, 0.01, 0.3),
(357.05993278209843, 6.015672035267234, 0.01, 0.3),
(354.54565880346155, 7.167239716931958, 0.01, 0.3),
(351.9405866597737, 8.095291919820955, 0.01, 0.3),
(349.26454252303387, 8.792765606297019, 0.01, 0.3),
(346.5378927049625, 9.254352580459903, 0.01, 0.3),
(343.7813886570711, 9.47653988672239, 0.01, 0.3),
(341.0160090395869, 9.45763654552347, 0.01, 0.3),
(338.2628000611807, 9.19778642270381, 0.01, 0.3),
(335.54271530461125, 8.698967134599862, 0.01, 0.3),
(332.87645625730835, 7.964974997189462, 0.01, 0.3),
(330.2843147605526, 7.001396133834704, 0.01, 0.3),
(327.7860185763131, 5.815563961510899, 0.01, 0.3),
(325.4005812470634, 4.416503379074083, 0.01, 0.3),
(323.14615739124594, 2.814862082332517, 0.01, 0.3),
(321.0299284546232, 1.0143417243829431, 0.01, 0.3),
(314.11976214722245, -4.864934802828888, 0.01, 0.3),
(306.7028244353466, -10.135652067949414, 0.01, 0.3),
(298.85473713264463, -14.739883947411444, 0.01, 0.3),
(290.6352289222948, -18.642589456501064, 0.01, 0.3),
(282.10685522485267, -21.814066649776066, 0.01, 0.3),
(273.30901017096807, -24.236017773481965, 0.01, 0.3),
(264.3849923407778, -25.872537448220584, 0.01, 0.3),
(255.3263772314462, -26.728643640042257, 0.01, 0.3),
(246.22761831645718, -26.791981761219567, 0.01, 0.3),
(237.1579626451347, -26.062069770405753, 0.01, 0.3),
(228.18643577354524, -24.54446273858382, 0.01, 0.3),
(219.38131643833776, -22.250710571622104, 0.01, 0.3),
(210.80961691432955, -19.198270108516617, 0.01, 0.3),
(202.536573010632, -15.41037226430554, 0.01, 0.3),
(194.62514758675272, -10.915845228778563, 0.01, 0.3),
(186.14162755395117, -6.094899907889024, 0.01, 0.3),
(177.2821254430628, -2.0371415278454306, 0.01, 0.3),
(168.10267946695998, 1.2330193696424128, 0.01, 0.3),
(158.6731507522219, 3.6906948855674844, 0.01, 0.3),
(149.0653037060727, 5.317180625268583, 0.01, 0.3),
(139.33920390740192, 6.101150420763675, 0.01, 0.3),
(135.5131070106529, 6.40955198601377, 0.01, 0.3),
(131.70247806012176, 7.053484943682052, 0.01, 0.3),
(127.96247215841305, 8.027085738556996, 0.01, 0.3),
(124.32155300871601, 9.322944680713377, 0.01, 0.3),
(120.80743020420694, 10.931199501140357, 0.01, 0.3),
(117.44684834131354, 12.839610409563678, 0.01, 0.3),
(114.26538347718721, 15.033653246640226, 0.01, 0.3),
(111.28724848046498, 17.496630021580557, 0.01, 0.3),
(108.53510875671898, 20.20979599394135, 0.01, 0.3),
(106.0299097510343, 23.152502332418518, 0.01, 0.3),
(103.79071754052472, 26.302353264921592, 0.01, 0.3),
(101.83457372997346, 29.63537652392223, 0.01, 0.3),
(100.17636575493074, 33.12620578988496, 0.01, 0.3),
(98.82871357933998, 36.74827374427548, 0.01, 0.3),
(97.80187364999202, 40.47401426289643, 0.01, 0.3),
(95.67513293182374, 48.19949440371859, 0.01, 0.3),
(92.88772905078484, 55.69794164352571, 0.01, 0.3),
(89.45739934441404, 62.924916772152535, 0.01, 0.3),
(85.4102506930664, 69.82541814510357, 0.01, 0.3),
(80.77708434171412, 76.3469287802936, 0.01, 0.3),
(75.59316148387303, 82.43981604399337, 0.01, 0.3),
(69.89793490262969, 88.05770938514006, 0.01, 0.3),
(63.734748711139126, 93.15785324323086, 0.01, 0.3),
(57.15050847775217, 97.70143244395462, 0.01, 0.3),
(50.19532424631765, 101.6538676061058, 0.01, 0.3),
(42.922129168492326, 104.98507831155459, 0.01, 0.3),
(35.38627665049318, 107.66971203539273, 0.01, 0.3),
(27.645119080254787, 109.68733709395552, 0.01, 0.3),
(19.757571341125047, 111.02259814227205, 0.01, 0.3),
(11.783662434030362, 111.66533303751196, 0.01, 0.3),
(3.784078620531738, 111.61065017902848, 0.01, 0.3),
(-4.180298436269403, 110.85896573639313, 0.01, 0.3),
(-12.048855017948636, 109.4160004820938, 0.01, 0.3),
(-19.761706657754804, 107.29273625300202, 0.01, 0.3),
(-27.260153897561942, 104.5053323719631, 0.01, 0.3),
(-34.48712902618873, 101.07500266559232, 0.01, 0.3),
(-41.38763039913971, 97.0278540142447, 0.01, 0.3),
(-47.90914103432978, 92.39468766289238, 0.01, 0.3),
(-54.01200437906845, 87.20227700078203, 0.01, 0.3),
(-56.93553627835339, 84.71489077773086, 0.01, 0.3),
(-60.08538721085646, 82.4756985672213, 0.01, 0.3),
(-63.4184104698571, 80.51955475667002, 0.01, 0.3),
(-66.90923973581984, 78.8613467816273, 0.01, 0.3),
(-70.53130769021034, 77.51369460603652, 0.01, 0.3),
(-74.2570482088313, 76.48685467668858, 0.01, 0.3),
(-78.05810615667237, 75.78864186546872, 0.01, 0.3),
(-81.90555318760892, 75.42436999346275, 0.01, 0.3),
(-85.77010790658349, 75.39681138956998, 0.01, 0.3),
(-89.62235871870277, 75.70617579140638, 0.01, 0.3),
(-93.43298766923388, 76.35010874907466, 0.01, 0.3),
(-97.19800711281145, 77.33140870407664, 0.01, 0.3),
(-102.45750617874337, 79.20509565975568, 0.01, 0.3),
]
road4.drivability = -1
road4.improved_spline = 0
road4.over_object = 1
road4.break_angle = 180
road4.render_priority = 9
# Adding Main Road in the sceanrio
scenario.add_road(road4)
# Adding Divider in the sceanrio
scenario.add_road(road3)
# Adding Left Boundary in the sceanrio
scenario.add_road(road2)
# Adding Rigt Boundary in the sceanrio
scenario.add_road(road)
scenario.make(bng)
bng.open()
bng.load_scenario(scenario)
bng.start_scenario()
config_trans = vehicle1.get_part_config()
config_trans['parts']['van_transmission'] = x[2]
set_config_trans = vehicle1.set_part_config(config_trans)
if (x[2] == 'van_transmission_5M'):
trans_mission = 'transmission2'
elif (x[2] == 'van_transmission_4A'):
trans_mission = 'transmission1'
else:
trans_mission = 'transmission3'
config_tire = vehicle1.get_part_config()
config_tire['parts']['tire_R_16x8_alt'] = x[1]
set_config_tire = vehicle1.set_part_config(config_tire)
if (x[1] == 'tire_R_225_75_16_heavy'):
tire_main = 'tire3'
elif (x[1] == 'tire_R_225_75_16_standard'):
tire_main = 'tire2'
else:
tire_main = 'tire1'
config_brake = vehicle1.get_part_config()
config_brake['parts']['brakepad_R'] = x[0]
set_config_brake = vehicle1.set_part_config(config_brake)
if (x[0] == 'brakepad_R'):
brake_main = 'brake1'
elif (x[0] == 'brakepad_R_race'):
brake_main = 'brake3'
else:
brake_main = 'brake2'
config = vehicle1.get_part_config()
print(config)
vehicle1.ai_set_speed(car_speed, 'set')
if car_speed == 9:
car_speed_km = 30
elif car_speed == 10:
car_speed_km = 35
elif car_speed == 11:
car_speed_km = 40
elif car_speed == 13:
car_speed_km = 45
elif car_speed == 14:
car_speed_km = 50
elif car_speed == 16:
car_speed_km = 55
elif car_speed == 17:
car_speed_km = 60
elif car_speed == 19:
car_speed_km = 65
elif car_speed == 20:
car_speed_km = 70
elif car_speed == 22:
car_speed_km = 75
else:
car_speed_km = 80
vehicle1.ai_set_mode('span')
vehicle1.ai_drive_in_lane(True)
positions = list()
directions = list()
distance = list()
for index in range(100000):
time.sleep(0.1)
#vehicle1 = scenario.get_vehicle('ego_vehicle')
vehicle1.update_vehicle() # Synchs the vehicle's "state" variable with the simulator
sensors = bng.poll_sensors(vehicle1)
#vehicle2 = scenario.get_vehicle('green_vehicle')
vehicle2.update_vehicle()
sensors = bng.poll_sensors(vehicle2)
ispass = -1
# Collecting position and direction for the vehicles in the scenario.
directions.append(vehicle1.state['dir'])
direction_car = geometry.Point(vehicle1.state['dir'])
# display(vehicle1.state['dir'])
newdirection_road = geometry.Point(vehicle1.state['dir'])
positions.append(vehicle1.state['pos'])
# display(vehicle1.state['pos'])
positions.append(vehicle2.state['pos'])
# display(vehicle2.state['pos'])
vehiclepos = vehicle1.state['pos']
vehiclepos2 = vehicle2.state['pos']
newposition_Vehicle1 = geometry.Point(vehicle1.state['pos'])
newposition_Vehicle2 = geometry.Point(vehicle2.state['pos'])
vehicle_distance1 = newposition_Vehicle2.distance(
newposition_Vehicle1)
print('distance', vehicle_distance1)
if (vehicle_distance1 <= 20 and car_speed_km == 30) or (vehicle_distance1 <= 20 and car_speed_km == 35) or (vehicle_distance1 <= 25 and car_speed_km == 40):
vehicle1.ai_set_mode('stopping')
time.sleep(5)
if (vehicle_distance1 > 18):
ispass = 1
print('pass')
else:
ispass = 0
print('fail')
if (ispass == 1 or ispass == 0):
path_dir = os.getcwd()
file_name_csv = (path_dir + '\dataset_van_new.csv')
#fileEmpty = os.stat(file_name_csv).st_size == 0
v = random.randint(0, 100)
# with open(r'C:\Users\hamza\OneDrive\Desktop\Boundary\datset.csv', 'a') as f:
with open(file_name_csv, 'a') as f:
headers = ['Brake', 'Transmission',
'Tire', 'Speed', 'Car_model', 'Result']
writer = csv.DictWriter(
f, delimiter=',', lineterminator='\n', fieldnames=headers)
fileEmpty = os.stat(file_name_csv).st_size == 0
if fileEmpty:
writer.writeheader()
writer.writerow({'Tire': tire_main, 'Brake': brake_main, 'Transmission': trans_mission,
'Car_model': 'etk800', 'Speed': car_speed_km, 'Result': ispass})
break
elif (vehicle_distance1 < 20 and car_speed_km == 45) or (vehicle_distance1 <= 25 and car_speed_km == 50) or (vehicle_distance1 <= 25 and car_speed_km == 55):
vehicle1.ai_set_mode('stopping')
print(vehicle_distance1)
time.sleep(5)
if (vehicle_distance1 > 19):
print(vehiclepos)
print(vehiclepos2)
ispass = 1
print('pass')
else:
ispass = 0
print('fail')
if (ispass == 1 or ispass == 0):
path_dir = os.getcwd()
file_name_csv = (path_dir + '\dataset_van_new.csv')
v = random.randint(0, 100)
# with open(r'C:\Users\hamza\OneDrive\Desktop\Boundary\datset.csv', 'a') as f:
with open(file_name_csv, 'a') as f:
headers = ['Brake', 'Transmission',
'Tire', 'Speed', 'Car_model', 'Result']
writer = csv.DictWriter(
f, delimiter=',', lineterminator='\n', fieldnames=headers)
fileEmpty = os.stat(file_name_csv).st_size == 0
if fileEmpty:
writer.writeheader()
writer.writerow({'Tire': tire_main, 'Brake': brake_main, 'Transmission': trans_mission,
'Car_model': 'etk800', 'Speed': car_speed_km, 'Result': ispass})
break
elif (vehicle_distance1 <= 20 and car_speed_km == 60) or (vehicle_distance1 <= 20 and car_speed_km == 65) or (vehicle_distance1 <= 20 and car_speed_km == 70):
vehicle1.ai_set_mode('stopping')
time.sleep(5)
if (vehicle_distance1 > 19):
ispass = 1
print('pass')
else:
ispass = 0
print('fail')
if (ispass == 1 or ispass == 0):
path_dir = os.getcwd()
file_name_csv = (path_dir + '\dataset_van_new.csv')
v = random.randint(0, 100)
# with open(r'C:\Users\hamza\OneDrive\Desktop\Boundary\datset.csv', 'a') as f:
with open(file_name_csv, 'a') as f:
headers = ['Brake', 'Transmission',
'Tire', 'Speed', 'Car_model', 'Result']
writer = csv.DictWriter(
f, delimiter=',', lineterminator='\n', fieldnames=headers)
fileEmpty = os.stat(file_name_csv).st_size == 0
if fileEmpty:
writer.writeheader()
writer.writerow({'Tire': tire_main, 'Brake': brake_main, 'Transmission': trans_mission,
'Car_model': 'etk800', 'Speed': car_speed_km, 'Result': ispass})
break
elif (vehicle_distance1 <= 20 and car_speed_km == 75) or (vehicle_distance1 <= 20 and car_speed_km == 80):
vehicle1.ai_set_mode('stopping')
time.sleep(5)
if (vehicle_distance1 > 19):
ispass = 1
print('pass')
else:
ispass = 0
print('fail')
if (ispass == 1 or ispass == 0):
path_dir = os.getcwd()
file_name_csv = (path_dir + '\dataset_van_new.csv')
v = random.randint(0, 100)
# with open(r'C:\Users\hamza\OneDrive\Desktop\Boundary\datset.csv', 'a') as f:
with open(file_name_csv, 'a') as f:
headers = ['Brake', 'Transmission',
'Tire', 'Speed', 'Car_model', 'Result']
writer = csv.DictWriter(
f, delimiter=',', lineterminator='\n', fieldnames=headers)
fileEmpty = os.stat(file_name_csv).st_size == 0
if fileEmpty:
writer.writeheader()
writer.writerow({'Tire': tire_main, 'Brake': brake_main, 'Transmission': trans_mission,
'Car_model': 'etk800', 'Speed': car_speed_km, 'Result': ispass})
break
bng.stop_scenario()
bng.close()
|
[
"83582835+Hamza-619@users.noreply.github.com"
] |
83582835+Hamza-619@users.noreply.github.com
|
58ac2cc66c7c428322c541ac9109cc3a674994d3
|
7c04d6750b29c565c32c3264c62640d97743cc8b
|
/sobre_mim/apps.py
|
3b6bc00ad677a46eed3932e10ac509d0c1438928
|
[] |
no_license
|
HugoOliveiraSoares/Projeto-Final
|
cd5b5f2ba4978550935925ebd55768b40a1b76ad
|
cfa389b7c25f8db4937ad0b862d7191012301f33
|
refs/heads/main
| 2023-06-02T22:56:45.554279
| 2021-06-26T12:21:35
| 2021-06-26T12:21:35
| 380,493,707
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 149
|
py
|
from django.apps import AppConfig
class SobreMimConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'sobre_mim'
|
[
"hugoliveira.soares@gmail.com"
] |
hugoliveira.soares@gmail.com
|
316d273ebde5f167bd2abf97e30a40989cd0fb8b
|
aa5731d54eb63bc10bafcd2c6d25c7641afacbdb
|
/Vacation_API/api_keys.py
|
4fd6a6b267a39911eea1db7aee3dcbce1aa23217
|
[] |
no_license
|
Gerovrik/python-api-challenge
|
efff6774655a554feb51532819d35d09b0efe7e7
|
08a4a1c94defab5c4a87af5ce517e7cd42b313f2
|
refs/heads/main
| 2023-01-13T20:52:26.980820
| 2020-11-14T21:00:45
| 2020-11-14T21:00:45
| 312,696,535
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 81
|
py
|
# OpenWeatherMap API Key
weather_api_key = "KEY"
# Google API Key
g_key = "KEY"
|
[
"loganlippert@yahoo.com"
] |
loganlippert@yahoo.com
|
3e5f56f3436373330a569dad1bb1f6b35fe1cfe8
|
3d613577d8e5a0b8f128666047043ac672f975af
|
/market/admin.py
|
89dcecee2190326fd074e1a813638735e613a34f
|
[] |
no_license
|
danimaribeiro/bitcoin_market
|
8431773795114706bf482d3b7961ef7e527ead5e
|
6d9256059ed7f35a8a412cb78d3a71a7498d90f9
|
refs/heads/master
| 2016-09-05T12:13:17.741558
| 2014-02-18T13:03:29
| 2014-02-18T13:03:29
| 16,620,172
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,992
|
py
|
from django.contrib import admin
from django.contrib.admin import DateFieldListFilter
from django.contrib.auth.models import User
# Register your models here.
from market.models import Order, Trade, Market, MarketConfiguration, Settings
class TradeAdmin(admin.ModelAdmin):
fields = ['tid', 'date', 'amount', 'price', 'type', 'coin']
list_display = ['tid', 'date', 'amount', 'price', 'type', 'coin']
list_filter = ['type', ('date', DateFieldListFilter), 'coin']
search_fields = ['date']
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class MarketConfigurationAdmin(admin.ModelAdmin):
fields = ['market', 'access_key', 'access_sign']
list_display = ['market', 'access_key', 'access_sign', 'belongs_to']
def save_model(self, request, obj, form, change):
instance = form.save(commit=False)
instance.belongs_to = request.user
instance.save()
form.save_m2m()
return instance
class OrderAdmin(admin.ModelAdmin):
fields = [ 'price', 'amount', 'type','market', 'status', 'sincronized']
readonly_fields = ['status', 'sincronized']
list_display = ['tid','price', 'amount', 'type','market', 'status', 'sincronized', 'belongs_to']
def get_readonly_fields(self, request, obj=None):
if obj: # editing an existing object
return self.readonly_fields + ['market','price','amount', 'type']
return self.readonly_fields
def save_model(self, request, obj, form, change):
instance = form.save(commit=False)
instance.belongs_to = request.user
instance.save()
form.save_m2m()
return instance
admin.site.register(Order, OrderAdmin)
admin.site.register(Market)
admin.site.register(MarketConfiguration, MarketConfigurationAdmin)
admin.site.register(Settings)
admin.site.register(Trade, TradeAdmin)
|
[
"danimaribeiro@gmail.com"
] |
danimaribeiro@gmail.com
|
5f2fa6119c1919877396fc026db69a34216f1723
|
2fceba311199d692f2283357b80be1a487337afc
|
/settings.py
|
cf6253bc97ed508cf6e7038afaf42c0566353128
|
[] |
no_license
|
jamesdoc/VAM-InstaWall
|
8aea152a07fd5a5fd64d851a2186443be8492071
|
409418fb9261255402358764b3d5f096b824fce1
|
refs/heads/master
| 2021-01-16T19:04:30.383200
| 2014-06-10T11:43:25
| 2014-06-10T11:43:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 324
|
py
|
import jinja2
import os
import webapp2
from api import GetImages
from datetime import datetime, timedelta
from importer import ImportHandler, TruncateData
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
|
[
"jharley@vam.ac.uk"
] |
jharley@vam.ac.uk
|
99944c8259914de26a928eab92ca29e7fefb7a70
|
5a38d56fbbbb9c10f4182501c617e01de7d565c0
|
/PY/0068_FCTRL.py
|
f6261e654da4c5f4a82a118fd565ee0ca62c2c50
|
[] |
no_license
|
tanmaysahay94/Algorithm
|
3551a84723bbcc76eb3cc776a9ba3ba256941123
|
142b1e471a467cd2124300f638fb8817d20b7959
|
refs/heads/master
| 2021-07-05T22:11:36.027616
| 2021-06-12T20:08:37
| 2021-06-12T20:08:37
| 20,177,690
| 0
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 227
|
py
|
def fact(number):
product=1
while number>=2:
product=product*number
number=number-1
print product
def testcases(n):
while n>=1:
num=input()
fact(num)
n=n-1
NumberOfTestCases=input()
testcases(NumberOfTestCases)
|
[
"tanmaysahay94@gmail.com"
] |
tanmaysahay94@gmail.com
|
5edce39bcacec358488692bc2bf1f0052c33f265
|
803edc03e99a24b08beed6363c0a16b89e16c6b8
|
/classes.py
|
e1b6f9f512758f3f22192f8c4d6193569c141fc9
|
[] |
no_license
|
Arayondo/Konter-a-Matt
|
5f881fa860120c33fc045249f2ce8ec37e139502
|
852cf61fb1bed78920c2e9def85945f33b8a18bf
|
refs/heads/master
| 2021-08-31T18:40:05.858133
| 2017-12-22T11:35:47
| 2017-12-22T11:35:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 922
|
py
|
from random import randint, shuffle, choice
import json, pprint
import sys, time
from localisation import *
class Player:
def __init__(self, name, team, place):
self.name = name
self.hand = {}
self.streech = []
self.score = 0
self.last_played_card = ""
self.team = team
self.place = place
def __repr__(self):
return self.name
def debug_all(self):
return self.name, self.hand, self.streech, self.score, self.last_played_card, self.team, self.place
def choose_card(self, stack, trump):
first_played = stack['first_played']
Deck_k = [k9,k10,kB,kD,kK]
Deck_r = [r9,r10,rB,rK]
Deck_s = [s9,s10,sB,sK]
Deck_h = [h9,h10,hB,hK]
card = choice(list(self.hand.keys()))
return card
def make_trump(self):
return ["k","r","s","h"][randint(0,3)]
class NPC(Player):
def __init__(self, name, team, place):
super(NPC, self).__init__(name, team, place)
def __repr__(self):
return self.name
|
[
"jeff.uni@hotmail.com"
] |
jeff.uni@hotmail.com
|
5a62770637bd4d85b60e99ec4625656f1713c7f4
|
7c233249e8cd5243582ba0833254af38636b7cdf
|
/conversion_descriptors.py
|
14997d8965cddf8b3845be3260494eea46e45692
|
[
"MIT"
] |
permissive
|
project-schumann/vmf-batch
|
28444e77c22ef0f63cdf839fd8afbc38895512e5
|
e32983af33ccc8b5b01d249fafdc3bc356c8ba20
|
refs/heads/master
| 2021-01-17T15:25:06.040033
| 2016-05-05T02:47:12
| 2016-05-05T02:47:12
| 35,854,086
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,636
|
py
|
import os
from invalid_format_error import InvalidFormatError
class ConversionDescriptor:
"""
Describes the parameters necessary for a conversion job.
"""
def __init__(self, src, target, fmt):
"""
Initializes the Conversion Descriptor
:param src: The absolute path of the source directory.
:param target: The absolute path of the target directory.
:param fmt: The format to convert to.
"""
if not self.__validate_format(fmt):
raise InvalidFormatError()
self.source = src
self.target = target
self.format = fmt
@staticmethod
def __validate_format(format):
"""
Validates that the user selected format is supported.
:param format: The user selected format.
:return: True if the format is supported, False otherwise.
"""
return format.lower() in ['vmf', 'midi', 'xml']
class ConversionTask:
"""
Describes a single conversion task.
"""
def __init__(self, conversion_descriptor, file_name):
"""
Initializes the Conversion Task
:param conversion_descriptor: The conversion descriptor.
:param file_name: The name of the file converted in this task.
"""
self.source = os.path.join(conversion_descriptor.source, file_name)
self.target = os.path.join(conversion_descriptor.target, file_name)
self.format = conversion_descriptor.format
file_name, file_extension = os.path.splitext(file_name)
self.target = self.target.replace(file_extension[1:], conversion_descriptor.format)
|
[
"patrick.ayoup@gmail.com"
] |
patrick.ayoup@gmail.com
|
829a1f1936da717ca8075164a789a7ca1a904583
|
08871111acfec5049c3d4b48b400f84146a29b06
|
/littlelambocoin/util/service_groups.py
|
487e26f049a38819e7d42d5f3980cb97c6f0ddfe
|
[
"Apache-2.0"
] |
permissive
|
AndreAndris/littlelambocoin
|
8d7705b64c018b503bea2c64cec4e15fc4a438ef
|
ffbf98d5d43ae248586aadbb68316c6ed43da7cb
|
refs/heads/main
| 2023-06-29T15:33:33.167616
| 2021-08-05T06:46:16
| 2021-08-05T06:46:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,476
|
py
|
from typing import KeysView, Generator
SERVICES_FOR_GROUP = {
"all": "littlelambocoin_harvester littlelambocoin_timelord_launcher littlelambocoin_timelord littlelambocoin_farmer littlelambocoin_full_node littlelambocoin_wallet".split(),
"node": "littlelambocoin_full_node".split(),
"harvester": "littlelambocoin_harvester".split(),
"farmer": "littlelambocoin_harvester littlelambocoin_farmer littlelambocoin_full_node littlelambocoin_wallet".split(),
"farmer-no-wallet": "littlelambocoin_harvester littlelambocoin_farmer littlelambocoin_full_node".split(),
"farmer-only": "littlelambocoin_farmer".split(),
"timelord": "littlelambocoin_timelord_launcher littlelambocoin_timelord littlelambocoin_full_node".split(),
"timelord-only": "littlelambocoin_timelord".split(),
"timelord-launcher-only": "littlelambocoin_timelord_launcher".split(),
"wallet": "littlelambocoin_wallet littlelambocoin_full_node".split(),
"wallet-only": "littlelambocoin_wallet".split(),
"introducer": "littlelambocoin_introducer".split(),
"simulator": "littlelambocoin_full_node_simulator".split(),
}
def all_groups() -> KeysView[str]:
return SERVICES_FOR_GROUP.keys()
def services_for_groups(groups) -> Generator[str, None, None]:
for group in groups:
for service in SERVICES_FOR_GROUP[group]:
yield service
def validate_service(service: str) -> bool:
return any(service in _ for _ in SERVICES_FOR_GROUP.values())
|
[
"kevin.vercauteren@gmail.com"
] |
kevin.vercauteren@gmail.com
|
fb95b5674453874d8218e5070d12976ce7cde15a
|
c369443df5ff98eccc0eee7f63bb8947f2943605
|
/shop/admin.py
|
950e8733d62ba6d0df356ee67145a865be1b988e
|
[] |
no_license
|
erllan/shop-test
|
d2934f484b25d141a60caa5aca31a61eec48f055
|
1f77de177192ce6a1f8c5ccf1d7ca93ec026acf5
|
refs/heads/master
| 2023-03-06T01:04:38.785383
| 2021-02-27T18:02:07
| 2021-02-27T18:02:07
| 341,929,117
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 141
|
py
|
from django.contrib import admin
from .models import *
admin.site.register(User)
admin.site.register(Product)
admin.site.register(Category)
|
[
"erlan.kubanychbekov.000@gmail.com"
] |
erlan.kubanychbekov.000@gmail.com
|
aac39d593b31aa6a2b5030823f4dd1f6ef94f236
|
6bb9be84ee8113c5155e701d4428c0bb5bdfd439
|
/accounts_app/apps.py
|
5d66d627768a69fc4a6c1554c55c56cace5ba9d1
|
[
"MIT"
] |
permissive
|
sami-mai/Avegang
|
9779a5994f76f1d44a6e7dfcc5a97b1a48758903
|
056605db9b8d22bfca31b819663921af3c94772b
|
refs/heads/master
| 2022-12-22T03:13:50.098422
| 2018-06-02T05:50:05
| 2018-06-02T05:50:05
| 135,448,352
| 0
| 1
|
MIT
| 2022-12-08T02:12:02
| 2018-05-30T13:40:23
|
HTML
|
UTF-8
|
Python
| false
| false
| 98
|
py
|
from django.apps import AppConfig
class AccountsAppConfig(AppConfig):
name = 'accounts_app'
|
[
"samirah.maison@gmail"
] |
samirah.maison@gmail
|
6af8c36a375080c4de74fe3025d78330044af8ab
|
0d58b37b854e4a96d0ef2bdc154ac5dcbe94644d
|
/app_integrated.py
|
1755b41e364e7ac8359fcdc90fd45edbafa79d67
|
[] |
no_license
|
joelr1894/Meal_Magic
|
93f2e2e2bf324ac47084fa0bf89abe3b63166bfd
|
88cfd15c2bf2bc947aa89ae5cac95baf38cce18d
|
refs/heads/master
| 2021-01-10T09:56:34.037513
| 2016-04-07T02:54:01
| 2016-04-07T02:54:01
| 54,327,732
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,278
|
py
|
from flask import Flask, render_template, request, json
from subprocess import call
import csv
from datetime import date, datetime, timedelta
import operator
import sys
import mysql.connector
app = Flask(__name__)
# Reads in the csv file and returns a list of lists of the data where the
# inner list is the row
def import_function():
print "Starting connection"
cnx = mysql.connector.connect(user='joelr', password='seniordesign', host='ec2-54-201-28-29.us-west-2.compute.amazonaws.com', database='Meal_Magic')
cursor = cnx.cursor()
print "Connected"
user_query = ("SELECT * from Users")
cursor.execute(user_query)
user_dict = dict()
for (user_id, user_name) in cursor:
user_dict[user_id] = user_name
rest_query = ("SELECT * from Restaurants")
cursor.execute(rest_query)
rest_dict = dict()
for (rest_id, rest_name) in cursor:
rest_dict[rest_id] = rest_name
rest_list = [0]*(len(rest_dict.keys())+1)
rest_list[0] = ""
for i in rest_dict.keys():
rest_list[i+1] = str(rest_dict[i])
rating_csv = [rest_list]
rate_query = ("SELECT * from ratings")
cursor.execute(rate_query)
rate_dict = dict()
for i in range(len(user_dict)):
rate_dict[i] = []
for (rate_u_id, rate_rest_id, rating) in cursor:
rate_dict[rate_u_id].append((rate_rest_id, rating))
for user in user_dict.keys():
user_rating_list = [0]*(len(rest_dict.keys())+1)
user_rating_list[0] = str(user_dict[user])
for rating_pair in rate_dict[user]:
user_rating_list[rating_pair[0]+1] = rating_pair[1]
rating_csv.append(user_rating_list)
return rating_csv
def readCSVFile(filename):
# with open(filename, 'rb') as csv_file:
# reader = csv.reader(csv_file)
# return list(reader)
return import_function()
def input_function():
print ""
user = raw_input("What is your name? ")
recs = int(raw_input('How many recommendations do you want? '))
inputs = [user, recs, 20, 4, 10]
return inputs
# Counts the number and names of restaurants that the given user has not
# visited (where a 0 in the rating siginifies not visiting)
def count_not_visited(l, restaurants):
count = 0
rest = []
rest_num = []
for i in range(len(l)):
if int(l[i]) == 0:
count = count + 1
rest.append(restaurants[i])
rest_num.append(i)
return count, rest, rest_num
# Given the ratings of two users, distance finds the average distance in score
# between the two users based on the restaurants that they've both visited
# and rated
def distance(user1, user2):
counter = 0
distance = 0
for r in range(len(user1)):
if user1[r] != 0 and user2[r] != 0:
counter = counter + 1
distance = distance + abs(user1[r] - user2[r])
if counter == 0:
return float(100)
return float(distance)/counter
def nearest_neighbors(user1,critics,inputs):
neighbors = []
temp_avg_distance = dict(critics[user1][3])
while len(neighbors) < inputs[2]:
temp_min = min(temp_avg_distance, key=temp_avg_distance.get)
if (len(critics[user1][0]) - critics[temp_min][1]) > inputs[4]:
neighbors.append(temp_min)
temp_avg_distance[temp_min] = float(100)
return neighbors
def neighbor_array(critics,neighbors):
nn_array = dict()
for i in neighbors:
nn_array[i] = critics[i][0]
return nn_array
def nn_restaurant_ratings(nn_array):
neighbor_ratings = []
num_visited = []
for n in nn_array:
temp_key = n
for r in range(len(nn_array[temp_key])):
neighbor_ratings.append(0)
num_visited.append(0)
for i in nn_array:
if nn_array[i][r] != 0:
num_visited[r] = num_visited[r] + 1
neighbor_ratings[r] = neighbor_ratings[r] + nn_array[i][r]
if num_visited[r] != 0:
neighbor_ratings[r] = float(neighbor_ratings[r]) / num_visited[r]
return neighbor_ratings, num_visited
# Gives recommendation after removing restaurants with too few visits and
# restaurants the user has already visited
def recommend(neighbors,inputs,not_visited):
temp_ratings = dict()
# Populates temp_ratings with restaurants not visited by user
for n in range(len(neighbors[2])):
for i in not_visited:
if n == i:
temp_ratings[n] = neighbors[2][n]
# Zeros out restaurants with too few visits
for x in range(len(neighbors[2])):
if neighbors[3][x] < inputs[3]:
temp_ratings[x] = 0
# Gets highest rated restaurants as recommendations
recommendations = []
for i in range(inputs[1]):
recommendations.append(max(temp_ratings, key=temp_ratings.get))
temp_ratings[recommendations[i]] = 0
i = i + 1
return recommendations, temp_ratings
def print_out(recommendation, neighbors, output, inputs):
print ''
print inputs[0] + ', your restaurant recommendations are:'
for i in range(inputs[1]):
print output[inputs[0]][i] + ' with a rating of ' + str(round(neighbors[inputs[0]][2][recommendation[inputs[0]][0][i]],1))
i = i + 1
print ''
print inputs[0] + ', your nearest neighbors are:'
print neighbors[inputs[0]][0]
return
def output_json(recommendation, neighbors, output, inputs):
d_out = {}
for i in range(inputs[1]):
try:
d_out[output[inputs[0]][i]] = round(neighbors[inputs[0]][2][recommendation[inputs[0]][0][i]],1)
except:
print "Name not in database."
return
# sort dictionary by values
sorted_x = sorted(d_out.items(), key=operator.itemgetter(1))
sorted_x.reverse()
# put in descending order
print sorted_x
if len(sorted_x) < inputs[1]:
print "No more recommendations."
def main(_name, _inpNum):
inp_vars=[]
inp_vars[0]=_name
inp_vars[1]=_inpNum
# Gather inputs
# (user name,
# num recommendations,
# num of nearest neighbors,
# min num of visits from neighbors
# min num of restaurants visited for nn calc)
# inputs = input_function()
if len(inp_vars) != 2:
print "Usage:> python meal_magic.py \"user\" recommendations"
exit()
user = inp_vars[0]
recs = int(inp_vars[1])
inputs = [user, recs, 20, 4, 10]
# Import the data
meal_ratings = readCSVFile("MealMagic_data3.csv")
# initialize the map of users to their information
critics = dict()
# initialize the map of users to the restaurant numbers not visited
not_visited = dict()
# initialize the maps of users to their neighbor information
neighbors = dict()
nn = dict()
nn_array = dict()
rr_temp = dict()
# initialize the map of users to their recommendations
recommendation = dict()
# initialize the output of restaurant names
output = dict()
# All of the restaurant names
restaurants = meal_ratings[0][1:]
# for each user...
for i in range(1, len(meal_ratings)):
# Find the number of restaurants not visited and their names
counts = count_not_visited(meal_ratings[i][1:], restaurants)
# cast the strings parsed from the csv into meaningful integers
cast_ints = []
for j in range(1, len(meal_ratings[i])):
cast_ints.append(int(meal_ratings[i][j]))
# populate the initial critics dictionary
# (ratings, # rests not visits, names of restaurants not visited)
critics[meal_ratings[i][0]] = (cast_ints, counts[0], counts[1])
#populate the dictionary of restaurant numbers not visited
not_visited[meal_ratings[i][0]] = (counts[2])
# for each user, find their distance to all of the other users
# k is the base user
for k in critics.keys():
distances = dict()
counter = dict()
# k2 iterates through the rest of the users
for k2 in critics.keys():
if k2 != k:
# find the distances
distances[k2] = distance(critics[k][0], critics[k2][0])
# update the critics map
# (ratings,
# number of restaurants not visited,
# names of restaurants not visited,
# map of other users and this user's distance to them,
# map of other users and the number of restaurants both have rated)
critics[k] = critics[k][0],critics[k][1],critics[k][2], distances, counter
# find nearest neighbors for each user
nn[k] = nearest_neighbors(k,critics,inputs)
# create array of neighbor ratings
nn_array[k] = neighbor_array(critics,nn[k])
# calculate ratings and number of visits for each restaurant
rr_temp[k] = nn_restaurant_ratings(nn_array[k])
# update neighbors map
# (nearest neighbor names,
# array of neighbor ratings,
# average restaurant ratings from neighbors,
# number of visits to each restaurant from neighbors)
neighbors[k] = (nn[k], nn_array[k], rr_temp[k][0], rr_temp[k][1])
# gives recommendation restaurant numbers
recommendation[k] = recommend(neighbors[k],inputs,not_visited[k])
output[k] = []
for i in recommendation[k][0]:
output[k].append(restaurants[i])
# print_out(recommendation, neighbors, output, inputs)
# print critics[inputs[0]][1]
output_json(recommendation, neighbors, output, inputs)
# Testing for inf bug
# print critics['Dan Haroun'][3]['Matt Schulman']
# print distance(critics['Dan Haroun'][0], critics['Matt Schulman'][0])
# print distances['Matt Schulman']
if __name__ == '__main__':
main(sys.argv[1:])
#flask app code starts here w/ website
@app.route('/')
def Hello():
return render_template('homepage.html')
@app.route('/recommend_pg')
def recommend_pg():
return render_template('Recommendations_page.html')
# request code
@app.route('/recommendation_gen',methods=['POST'])
def recommendation_gen():
# read the posted values from the UI
_name = request.form['inputName']
_inpNum = int(request.form['inputNum'])
print _name
print _inpNum
print final_rec
# validate the received values
#parse the output from python code
#maybe use string in between output do dads (use 2d array to hold restaurant names and numbers i.e. data[0][0] = rest name and data [0][1] = rating)
#output in JSON
if _name and _inpNum :
#to get output, run recommendation algorithm above and then reformat output here and then return it!!
#return json.dumps({'html':'<span>' + _name + ' ' + _inpNum + '</span>'})
#final_rec = main(_name, _inpNum)
return json.dumps({'html':'<span>' + _name + _inpNum + '</span>'})
#else:
#return json.dumps({'html':'<span> </span>'})
@app.route('/initialSurvey')
def initialSurvey():
return render_template('InitialSurvey.html')
@app.route('/home')
def home():
return render_template('homepage.html')
@app.route('/aboutUs')
def aboutUs():
return render_template('AboutUs_Success.html')
if __name__ == "__main__":
app.run()
|
[
"joelre@hntvlan569.1345.wlan.wireless-resnet.upenn.edu"
] |
joelre@hntvlan569.1345.wlan.wireless-resnet.upenn.edu
|
609cb68a2b49ae41f10b6294d9026fb5e053c66e
|
8574b4354c866f28db53eaa1d0ff2cc72dde5363
|
/utilities/bot_utilities.py
|
6f6f1801de07f5c2f9065c8a566236b0aa8df254
|
[
"Apache-2.0"
] |
permissive
|
DudeBro249/TaskTrackerBot
|
a0b1042dfa62cee7d7df62feb20b976761bb90f9
|
b4ac677b2ace681a4d1037eba8dfb12281c4ff1b
|
refs/heads/master
| 2023-04-14T03:12:41.221695
| 2021-04-15T18:44:03
| 2021-04-15T18:44:03
| 314,738,868
| 0
| 1
|
Apache-2.0
| 2021-04-15T17:41:56
| 2020-11-21T05:16:24
|
Python
|
UTF-8
|
Python
| false
| false
| 2,333
|
py
|
from typing import List
from .general_utilities import remove_all
import discord
def get_role_from_mention(role_mention: str, roles: List[discord.Role]) -> discord.Role:
role_id = remove_all(role_mention, ['<', '>', '!', '@', '&'])
return get_role_by_attribute('id', role_id, roles)
def get_role_by_attribute(attribute_name, role_attribute, roles: List[discord.Role]) -> discord.Role:
for role in roles:
if str(getattr(role, attribute_name)) == role_attribute:
return role
return None
async def manage_voice_channel_roles(member: discord.Member, before: discord.VoiceState, after: discord.VoiceState) -> None:
if before.channel == None and after.channel != None:
guild: discord.Guild = after.channel.guild
role = get_role_by_attribute('name', after.channel.name, roles=guild.roles)
if role == None:
role = await guild.create_role(name=after.channel.name)
await member.add_roles(role)
elif before.channel != None and after.channel == None:
guild: discord.Guild = before.channel.guild
role = get_role_by_attribute('name', before.channel.name, roles=guild.roles)
await member.remove_roles(role)
elif before.channel != None and after.channel != None and before.channel != after.channel:
guild: discord.Guild = before.channel.guild
before_role = get_role_by_attribute('name', before.channel.name, roles=guild.roles)
after_role = get_role_by_attribute('name', after.channel.name, roles=guild.roles)
if before_role == None:
before_role = await guild.create_role(name=before.channel.name)
if after_role == None:
after_role = await guild.create_role(name=after.channel.name)
await member.remove_roles(before_role)
await member.add_roles(after_role)
async def input_role_from_user(ctx, bot, error_message: str) -> discord.Role:
def check(message: discord.Message) -> bool:
if message.author != ctx.author:
return False
return True
await ctx.send('What is the name of the role?')
role_name = (await bot.wait_for('message', check=check)).content
role = get_role_by_attribute('name', role_name, ctx.guild.roles)
if role == None:
await ctx.send(error_message)
return role
|
[
"yoayush@gmail.com"
] |
yoayush@gmail.com
|
000d286acb1a77838969e04d51a9688d23dd459b
|
4bc19eab7c85fe84bf4045b02f88bf363d6df053
|
/engine.py
|
212b433bfbe73cf9746a1db6cb3ce59a30b4dc1e
|
[] |
no_license
|
bronm98/GroupProject
|
8f3de5d56e47bbb2f4d31ce96f6c43fa74d36b2f
|
454a0f27b2b21102a9c55ae93ea94b0d497a99e4
|
refs/heads/master
| 2021-01-11T01:10:33.311692
| 2016-10-17T11:21:58
| 2016-10-17T11:21:58
| 71,072,643
| 0
| 0
| null | 2016-10-16T19:35:48
| 2016-10-16T19:35:47
|
Python
|
UTF-8
|
Python
| false
| false
| 5,391
|
py
|
#!/usr/bin/python3
from map import rooms
from player import *
from items import *
from removeing import *
import random
import time
def list_of_items(items):
listofitems = []
for i in items:
listofitems.append(i["name"])
str1 = ", ".join(listofitems)
return str1
def print_room_items(room):
var = list_of_items(room["items"])
if var != "":
print("There is "+var+" here.")
print()
def print_inventory_items(items):
print("You have" , list_of_items(inventory) +".") #passed
print()
def print_room(room):
print()
print(room["name"].upper())
print()
# Display room description
print(room["description"])
print()
print_room_items(room)
#
# COMPLETE ME!
#
def exit_leads_to(exits, direction):
return rooms[exits[direction]]["name"]
def print_exit(direction, leads_to):
print("GO " + direction.upper() + " to " + leads_to + ".")
def print_menu(exits, room_items, inv_items):
global current_room
print("You can:")
for direction in exits:
print_exit(direction, exit_leads_to(exits, direction))
for i in room_items:
print("TAKE " + (i["id"]).upper() +" to take a "+(i["name"])+".")
for i in inv_items:
print("DROP " + (i["id"]).upper() +" to drop your "+(i["name"])+".")
if current_room["up"] == 'yes':
print("NEXT FLOOR to go to the next floor.")
print("What do you want to do?")
def is_valid_exit(exits, chosen_exit):
return chosen_exit in exits
def execute_go(direction):
global current_room
if is_valid_exit(current_room["exits"] , direction):
current_room = move(current_room["exits"] , direction)
def execute_take(item_id):
for item in current_room["items"]:
if item["id"]==item_id:
inventory.append(item)
current_room["items"].remove(item)
return
print("you cannot take that")
def execute_drop(item_id):
for item in inventory:
if item["id"]==item_id:
inventory.remove(item)
current_room["items"].append(item)
return
print("you cannot drop that")
def execute_kill(mob):
mob = 0
print(mob)
def execute_nextfloor():
names = ['Big_room',"Small_room","Not_a_room","Nice_room","Better_room","Room_of_rooms","The_room","Final_room","Tahano_room"]
floorup = ["f","a","s","d","yes","g","h","i","j"]
print("------------------------------------------------------------------------------")
print(" You have sucesfully entered to the next floor!")
print("------------------------------------------------------------------------------")
for key in rooms:
# do something with value
name = random.choice(names)
rooms[key]["name"] = name
names.remove(name)
for key in rooms:
# do something with value
up = random.choice(floorup)
rooms[key]["up"] = up
floorup.remove(up)
def execute_command(command):
if 0 == len(command):
return
if command[0] == "go":
if len(command) > 1:
execute_go(command[1])
else:
print("Go where?")
elif command[0] == "take":
if len(command) > 1:
execute_take(command[1])
else:
print("Take what?")
elif command[0] == "drop":
if len(command) > 1:
execute_drop(command[1])
else:
print("Drop what?")
elif command[0] == "kill":
if len(command) > 1:
execute_kill(command[1])
else:
print("Kill what?")
elif command[0] == "next":
if len(command) > 1:
execute_nextfloor(command[1])
else:
print("Which floor?")
else:
print("This makes no sense.")
def menu(exits, room_items, inv_items):
# Display menu
print_menu(exits, room_items, inv_items)
# Read player's input
user_input = input("> ")
# Normalise the input
normalised_user_input = normalise_input(user_input)
return normalised_user_input
def move(exits, direction):
return rooms[exits[direction]]
def main():
# Main game loop
t1 = time.strftime("%H:%M:%S")
(h, m, s) = t1.split(':')
resone = int(h) * 3600 + int(m) * 60 + int(s)
while True:
end = 0
x = 120 #SETTIN THE TIME(in seconds)
while (end < x):
t2 = time.strftime("%H:%M:%S")
(h, m, s) = t2.split(':')
result = int(h) * 3600 + int(m) * 60 + int(s)
end = result - resone
disp = x - end
print()
print()
if disp > 0:
print('You have' , disp , "seconds left.")
elif disp <= 0:
break # if we want the last command to be accepted delete break and write sg else instead
# Display game status (room description, inventory etc.)
print_room(current_room)
print_inventory_items(inventory)
# Show the menu with possible actions and ask the player
command = menu(current_room["exits"], current_room["items"], inventory)
# Execute the player's command
execute_command(command)
print("Sorry your time is over.")
break
if __name__ == "__main__":
main()
|
[
"danibencze"
] |
danibencze
|
8b6563fc75a6729e1edb07c24f9c0b4158e55cf8
|
d0d8a9a84b2b3c41282006f976d510c335f21383
|
/Lista 1/Q1/Q1.3.py
|
0c72df0243ec09bf5fe56e7bd3b1d3a701015e8d
|
[] |
no_license
|
gagbp/ANN
|
cb5cb30ea842fcef2647bd3f6212367d12dc238d
|
2c21caf78398b71192a9c217800a43c7dda9c7e0
|
refs/heads/master
| 2022-12-23T05:52:30.417800
| 2020-09-29T18:17:29
| 2020-09-29T18:17:29
| 280,224,497
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,301
|
py
|
# 3. f(x) = ln(x) + x**2
import numpy as np
def f(x):
return np.log(x) + x**2
print('\n\nmétodo da bisseção\n')
a, b = [0, 2]
n = 50 # número de iterações
for i in range(n):
m = (a + b) / 2
if f(m) == 0:
print('A raiz é:', m)
elif f(a) * f(m) < 0: # teorema de Bolzano
b = m
else:
a = m
print(m, f(m))
print('\n\nmétodo de Newton\n')
# derivada de f
def df(x):
return 1/x + 2*x
x0 = 2
n = 10
itr = {}
itr[0] = x0
for i in range(1, n):
itr[i] = x0 - f(x0) / df(x0)
x0 = itr[i]
for k, v in itr.items():
print(k, v, abs(v - m))
print('\n\nmétodo das secantes\n')
n = 11
x0, x1 = [1, 2]
itr = {}
itr[0] = x0
itr[1] = x1
a, b = x0, x1
for i in range(n):
try:
xn = (a * f(b) - b * f(a)) / (f(b) - f(a)) # a - f(a) / ((f(b) - f(a))/ (b - a))
except:
raise ValueError(f"Divisão por zero para {a}, {b} na iteração {i}")
itr[i + 2] = xn
a, b = b, xn
for k, v in itr.items():
print(k, v, abs(v - m))
print('\n\nmétodo da posição falsa\n')
n = 30
a, b = [1, 2]
for i in range(n):
xn = (a * f(b) - b * f(a)) / (f(b) - f(a))
if f(xn) == 0:
print('A raiz é:', xn)
break
elif f(a) * f(xn) < 0:
b = xn
else:
a = xn
print(i, xn, abs(xn - m))
|
[
"30929090+gagbp@users.noreply.github.com"
] |
30929090+gagbp@users.noreply.github.com
|
44baddc4298db9a76065f76381120299048faae9
|
ccbfc7818c0b75929a1dfae41dc061d5e0b78519
|
/aliyun-openapi-python-sdk-master/aliyun-python-sdk-baas/aliyunsdkbaas/request/v20180731/DescribeBlockchainInfoRequest.py
|
916c4becfd7dd1214ee5a70eab17421fd385f744
|
[
"Apache-2.0"
] |
permissive
|
P79N6A/dysms_python
|
44b634ffb2856b81d5f79f65889bfd5232a9b546
|
f44877b35817e103eed469a637813efffa1be3e4
|
refs/heads/master
| 2020-04-28T15:25:00.368913
| 2019-03-13T07:52:34
| 2019-03-13T07:52:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,119
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeBlockchainInfoRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Baas', '2018-07-31', 'DescribeBlockchainInfo')
def get_Bizid(self):
return self.get_body_params().get('Bizid')
def set_Bizid(self,Bizid):
self.add_body_params('Bizid', Bizid)
|
[
"1478458905@qq.com"
] |
1478458905@qq.com
|
96e56f0038eb3fbd6670dab99b8f1f794987a788
|
27a6e0c133507fd72af01dccec24038709eed6c8
|
/Personal_loan_campaign_Supervised_Learning.py
|
6d8eab924b0a98aaa8af96c569d11f433b8e23ee
|
[] |
no_license
|
KlariVZ/Supervised-Learning-Basic
|
7e903eec4d6c1b96ea545e2314349385fd34d88f
|
30e1b74087e9abd84ca583be00a4ff76a090908a
|
refs/heads/master
| 2022-07-25T04:20:13.537262
| 2020-05-14T14:39:07
| 2020-05-14T14:39:07
| 263,940,295
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,751
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
### ENABLES GRAPGH PLOTTING IN JUPYTER
get_ipython().run_line_magic('matplotlib', 'inline')
# In[2]:
### LIBRARIES
import pandas as pd
import numpy as np
from sklearn import model_selection
from sklearn import metrics
import seaborn as sns
from sklearn.model_selection import cross_val_score
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.ensemble import BaggingClassifier
from sklearn.tree import DecisionTreeClassifier
# In[3]:
### READING CSV FILE
df = pd.read_csv("Bank_Personal_Loan_Modelling.csv")
df
# In[4]:
### DATA TYPES
df.dtypes
# In[5]:
# 1 FLOAT ATTRIBUTE: CCAVG
# 13 INTEGERS
# In[6]:
df.describe()
# In[7]:
# EXPERIENCE CONTAINS NEGATIVE NUMBERS
# In[8]:
### CONVERT EXPERIENCE ATTRIBUTE TO CONTAIN NON-NEGATIVE NUMBERS
### USE .abs FUNCTION
df['Experience'] = df['Experience'].abs()
df.describe().transpose()
# In[9]:
### SEABORN LIBRARY IMPORT: STATISTICAL PLOTS
### COMPARING ALL ATTRIBUTES
import seaborn as sns
df_attr = df.iloc[:,0:12]
sns.pairplot(df_attr)
# In[10]:
# STRONG POSITIVE RELATIONSHIP: AGE & EXPERIENCE
# NO LINEAR RELATIONSHIP BETWEEN AGE & INCOME
# NO LINEAR RELATIONSHIP BETWEEN AGE & EXPERIENCE
# In[11]:
df.dtypes
# In[12]:
df.groupby(["Personal Loan"]).count()
# In[13]:
# DATA IS SKEWED IN TERMS OF TARGET COLUMN
# VERY FEW RECORDS OF PEOPLE WHO PREVIOUSLY TOOK OUT PERSONAL LOANS
# In[15]:
### SEPERATION OF INDEPENDENT ATTRIBUTES: STORE THEM IN X-ARRAY
### STORE TARGET COLUMN IN Y-ARRAY
X_df = df.loc[:, df.columns != 'Personal Loan']
y_df = df.loc[:, df.columns == 'Personal Loan']
# In[16]:
#### MODEL: LOGISTIC
# In[17]:
### TRAINING & TEST DATA: 60:40
### DATA PREPARATION FOR LOGISTIC REGRESSION
features=X_df.iloc[:, 0:10]
features_array = features.values
target_labels = y_df.values
test_size = 0.40
### RANDOM NUMBER SEEDING: REPEATABILITY OF CODE WHEN USING RANDOM FUNCTIONS
seed = 7
X_train, X_test, y_train, y_test = model_selection.train_test_split(features_array, target_labels, test_size=test_size, random_state=seed)
### CONVERT 1 D VECTOR INTO 1 D ARRAY
y_train = np.ravel(y_train)
# In[18]:
### LOGISTIC REGRESSION TP PREDICT PERSONAL LOAN AFFINITY
### REMOVED NUMERIC BINNED COLUMNS
model = LogisticRegression()
model.fit(X_train, y_train)
model_score = model.score(X_test, y_test)
y_predict = model.predict(X_test)
print(model_score)
print(metrics.confusion_matrix(y_test, y_predict))
# In[19]:
# ACCURACY SCORE OF 0.912
# BUT ACCURACY SCORE IS AT MODEL LEVEL, WHICH MAKES IT UNRELIABLE
# In[20]:
### MODEL FIT SUMMARY
print(metrics.classification_report(y_test, y_predict))
# In[21]:
# LOW PRECISION RECALL FOR CLASS 1
# In[22]:
#### MODEL: NAIVE BIAS
# In[23]:
### TRAIN AND TEST DATA SET
### DATA PREP
features=X_df.iloc[:, 0:10]
target_labels = df.loc[:, df.columns == 'Personal Loan']
X_array = features.values
y_array = target_labels.values
test_size = 0.40
seed = 7
X_train, X_test, y_train, y_test = model_selection.train_test_split(X_array, y_array, test_size=test_size, random_state=seed)
y_train = np.ravel(y_train)
# In[24]:
### INVOKING NB GAUSSIAN FUNCTION
### FITTING MODEL IN TRAINING DATA SET
model = GaussianNB()
model.fit(X_train, y_train)
predictions=model.predict(X_test)
### ACCURACY TEST OF MODEL
print(metrics.confusion_matrix(y_test,predictions))
# In[25]:
### PREDICTIONS
expected = y_test
predicted = model.predict(X_test)
# MODEL FIT SUMMARY
print(metrics.classification_report(expected, predicted))
# In[26]:
# CLASS 1 METRICS: NOT IN ACCEPTABLE RANGE (80% & ABOVE)
# In[27]:
#### MODEL: KNN
# In[29]:
from sklearn.neighbors import KNeighborsClassifier
NNH = KNeighborsClassifier(n_neighbors= 3 , weights = 'distance')
NNH.fit(X_train, y_train)
# In[30]:
predicted_labels = NNH.predict(X_test)
# In[31]:
print(metrics.confusion_matrix(y_test, predicted_labels))
# In[32]:
### MODEL FIT SUMMARY
print(metrics.classification_report(y_test, predicted_labels))
# In[33]:
# RECALL FOR CLASS ONE IS THE LEAST
# In[34]:
### SCALING: Z-SCORE
from sklearn import preprocessing
X_train_scaled = preprocessing.scale(X_train)
X_test_scaled = preprocessing.scale(X_test)
NNH.fit(X_train_scaled, y_train)
# In[35]:
predicted_labels = NNH.predict(X_test_scaled)
# In[36]:
print(metrics.confusion_matrix(y_test, predicted_labels))
# In[37]:
### MODEL FIT SUMMARY
print(metrics.classification_report(y_test, predicted_labels))
# In[ ]:
#SCALED KNN HAS PROVIDED THE BEST RESULT
|
[
"noreply@github.com"
] |
noreply@github.com
|
1e082c416f419960cc8822abb7b30e306623c4e7
|
0e7aed5eef2e1d132a7e75dd8f439ae76c87639c
|
/python/523_Continuous_Subarray_Sum.py
|
1c395d7edb3f718bfdf9111d3fd63749240be388
|
[
"MIT"
] |
permissive
|
liaison/LeetCode
|
2a93df3b3ca46b34f922acdbc612a3bba2d34307
|
bf03743a3676ca9a8c107f92cf3858b6887d0308
|
refs/heads/master
| 2022-09-05T15:04:19.661298
| 2022-08-19T19:29:19
| 2022-08-19T19:29:19
| 52,914,957
| 17
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 811
|
py
|
class Solution:
def checkSubarraySum(self, nums: List[int], k: int) -> bool:
# the earliest index with the same module remaider of k
prefix_sum_indices = {}
# a virtual prefix sum index.
# for the test case of [0, 0] k = 0
prefix_sum = 0
prefix_sum_indices[0] = -1
for index, num in enumerate(nums):
prefix_sum += num
# group the prefix sums with modulo
if k != 0:
prefix_sum %= k # normalize the sum
if prefix_sum in prefix_sum_indices:
if index - prefix_sum_indices[prefix_sum] > 1:
return True
else:
prefix_sum_indices[prefix_sum] = index
return False
|
[
"lisong.guo@me.com"
] |
lisong.guo@me.com
|
044041bd2ebf8566281a37ea0a3ab72870830156
|
73fe0ad188e7bddc6c89c0df50f3e34e995e5436
|
/django/someproject/accounts/migrations/0001_initial.py
|
facd79baa256aab11c3c8cbd8eed576942ac985f
|
[] |
no_license
|
Resolt/DjangoLearning
|
f99c4113631db94ee386b2f165975e5b4ca0330b
|
b5201f3aadfa6f58e8c6563f544c25c1ff8ecbe8
|
refs/heads/master
| 2020-09-14T16:45:38.419457
| 2020-01-16T22:13:01
| 2020-01-16T22:13:01
| 223,188,978
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 972
|
py
|
# Generated by Django 3.0 on 2020-01-16 15:03
from django.conf import settings
import django.contrib.auth.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
bases=('auth.user', models.Model),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
|
[
"pdmmichaelsen@gmail.com"
] |
pdmmichaelsen@gmail.com
|
bdd1f2637515df7c1669d846707c8c11c05ac5ae
|
12ce75fc994395c9eb54c6fe30c0fffc6ee19ee1
|
/Testing/assertions.py
|
c5764a194629fdca87bd7d14b65ad6a021fe0ab9
|
[] |
no_license
|
RobinDeHerdt/HackerRank
|
aeb8c1f080b9d8a116f66a0fffb6fbdfd4f79076
|
b7ce29783845d0edd83e7e196ffe599143005a5d
|
refs/heads/master
| 2021-07-10T13:55:34.099852
| 2020-06-13T13:51:38
| 2020-06-13T13:51:38
| 132,801,390
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 191
|
py
|
def assert_equals(expected, actual):
assert expected == actual, "Expected {0} but got {1}".format(expected, actual)
print("\033[94m [SUCCESS] {0} equals {1}".format(expected, actual))
|
[
"robindh95@gmail.com"
] |
robindh95@gmail.com
|
7bd4c978ab4d3fea367ef7e57e7109b7f73253c8
|
5cb9dccbcccb8a2137368dd0615fe3e3c7761707
|
/simulations/kinova/build/moveit_ros_control_interface/catkin_generated/pkg.installspace.context.pc.py
|
40c68e29ded735aa8a303a0fd6910cc281b2e6ca
|
[] |
no_license
|
Simon-Steinmann/sim2real-modular-RL-project
|
b2467a393014e106043f6128a026f5eac934a83d
|
4027590ac94de2d5c914731c09efcf2f318b9ca3
|
refs/heads/master
| 2020-07-29T01:30:56.450919
| 2019-10-12T09:33:00
| 2019-10-12T09:33:00
| 209,605,548
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,301
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/acis/sim2real/simulations/kinova/install/include;/usr/include".split(';') if "/home/acis/sim2real/simulations/kinova/install/include;/usr/include" != "" else []
PROJECT_CATKIN_DEPENDS = "moveit_core;controller_manager_msgs;trajectory_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lmoveit_ros_control_interface_plugin;-lmoveit_ros_control_interface_trajectory_plugin;/usr/lib/x86_64-linux-gnu/libboost_system.so;/usr/lib/x86_64-linux-gnu/libboost_thread.so;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_date_time.so;/usr/lib/x86_64-linux-gnu/libboost_atomic.so;/usr/lib/x86_64-linux-gnu/libpthread.so".split(';') if "-lmoveit_ros_control_interface_plugin;-lmoveit_ros_control_interface_trajectory_plugin;/usr/lib/x86_64-linux-gnu/libboost_system.so;/usr/lib/x86_64-linux-gnu/libboost_thread.so;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_date_time.so;/usr/lib/x86_64-linux-gnu/libboost_atomic.so;/usr/lib/x86_64-linux-gnu/libpthread.so" != "" else []
PROJECT_NAME = "moveit_ros_control_interface"
PROJECT_SPACE_DIR = "/home/acis/sim2real/simulations/kinova/install"
PROJECT_VERSION = "1.0.1"
|
[
"simon.steinmann91@gmail.com"
] |
simon.steinmann91@gmail.com
|
b20f54d18b1a07fc9cafe03f66940967d1ad7385
|
da5511b8f82d1ac099a5f0bf9e18c8d7fd144ce5
|
/apis/urls.py
|
d8c301e0c3a7460983c0e1176680c76dde740aa2
|
[] |
no_license
|
yarafanrui/crop_calendar_lite
|
07d2c5136a72d97f11791443f4687e9f9449193b
|
db43e859ee8e2b92b3db868d8effd1e3e67fe126
|
refs/heads/master
| 2020-04-12T09:16:59.703239
| 2019-01-21T09:06:24
| 2019-01-21T09:06:24
| 162,397,460
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,311
|
py
|
from . import views
from django.urls import path, include
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
# The following URL can be deleted.
# path('test/', views.TestView.as_view(), name='test'),
# The following URL serves as a general entrance of data handling,
# and should not be open to public.
# GET, POST, PUT and DELETE methods are defined.
# Uncomment when necessary.
path('test-2/', csrf_exempt(views.TestView_2.as_view()), name='test_2'),
# Uncomment the following line when necessary
# path('load/', views.load_data, name='load'),
# The following URL is a middle product
# path('latlng2name/', views.latlng2name, name='latlng2name'),
# The following URL can be deleted.
# path('latlng2fourdates/', views.LatLng2FourDates.as_view(), name='latlng2fourdates'),
# The following URL is the 'end product'
path('latlng2fourdates-2/', views.LatLng2FourDates_2.as_view(), name='latlng2fourdates_2'),
# The following is the doc.
path('v0/', views.v0_doc, name='v0_doc'),
# path('docs/', views.yaml2html, name='docs'),
# maybe TODO ?
# perhaps a nicer interface for admins to review the pending Create, Update
# and Delete queue.
# path('task_list/', views.task_list, name='task_list'),
]
|
[
"fan.rui@yara.com"
] |
fan.rui@yara.com
|
40d5a9ec148e9f1f27701264107fbae72bb213c7
|
025b1a25eedf5b03d091f683b0bd07c20eac953d
|
/telethon_generator/tl_generator.py
|
f0a14fbc1571f1f511c55a284b6179c304b792c3
|
[
"MIT"
] |
permissive
|
phuonglm/Telethon
|
64a97259afbf3a9cb09683094f6d8f0b3f1145c1
|
1f1e040af972e6948538d47fab5563303c2243eb
|
refs/heads/master
| 2021-05-08T01:43:42.736010
| 2017-10-22T11:57:02
| 2017-10-22T11:57:02
| 107,896,646
| 0
| 0
| null | 2017-10-22T19:36:14
| 2017-10-22T19:36:14
| null |
UTF-8
|
Python
| false
| false
| 27,624
|
py
|
import os
import re
import shutil
import struct
from zlib import crc32
from collections import defaultdict
from .parser import SourceBuilder, TLParser, TLObject
AUTO_GEN_NOTICE = \
'"""File generated by TLObjects\' generator. All changes will be ERASED"""'
class TLGenerator:
def __init__(self, output_dir):
self.output_dir = output_dir
def _get_file(self, *paths):
return os.path.join(self.output_dir, *paths)
def _rm_if_exists(self, filename):
file = self._get_file(filename)
if os.path.exists(file):
if os.path.isdir(file):
shutil.rmtree(file)
else:
os.remove(file)
def tlobjects_exist(self):
"""Determines whether the TLObjects were previously
generated (hence exist) or not
"""
return os.path.isfile(self._get_file('all_tlobjects.py'))
def clean_tlobjects(self):
"""Cleans the automatically generated TLObjects from disk"""
for name in ('functions', 'types', 'all_tlobjects.py'):
self._rm_if_exists(name)
def generate_tlobjects(self, scheme_file, import_depth):
"""Generates all the TLObjects from scheme.tl to
tl/functions and tl/types
"""
# First ensure that the required parent directories exist
os.makedirs(self._get_file('functions'), exist_ok=True)
os.makedirs(self._get_file('types'), exist_ok=True)
# Step 0: Cache the parsed file on a tuple
tlobjects = tuple(TLParser.parse_file(scheme_file, ignore_core=True))
# Step 1: Group everything by {namespace: [tlobjects]} so we can
# easily generate __init__.py files with all the TLObjects on them.
namespace_functions = defaultdict(list)
namespace_types = defaultdict(list)
# Make use of this iteration to also store 'Type: [Constructors]',
# used when generating the documentation for the classes.
type_constructors = defaultdict(list)
for tlobject in tlobjects:
if tlobject.is_function:
namespace_functions[tlobject.namespace].append(tlobject)
else:
namespace_types[tlobject.namespace].append(tlobject)
type_constructors[tlobject.result].append(tlobject)
# Step 2: Generate the actual code
self._write_init_py(
self._get_file('functions'), import_depth,
namespace_functions, type_constructors
)
self._write_init_py(
self._get_file('types'), import_depth,
namespace_types, type_constructors
)
# Step 4: Once all the objects have been generated,
# we can now group them in a single file
filename = os.path.join(self._get_file('all_tlobjects.py'))
with open(filename, 'w', encoding='utf-8') as file:
with SourceBuilder(file) as builder:
builder.writeln(AUTO_GEN_NOTICE)
builder.writeln()
builder.writeln('from . import types, functions')
builder.writeln()
# Create a constant variable to indicate which layer this is
builder.writeln('LAYER = {}'.format(
TLParser.find_layer(scheme_file))
)
builder.writeln()
# Then create the dictionary containing constructor_id: class
builder.writeln('tlobjects = {')
builder.current_indent += 1
# Fill the dictionary (0x1a2b3c4f: tl.full.type.path.Class)
for tlobject in tlobjects:
constructor = hex(tlobject.id)
if len(constructor) != 10:
# Make it a nice length 10 so it fits well
constructor = '0x' + constructor[2:].zfill(8)
builder.write('{}: '.format(constructor))
builder.write(
'functions' if tlobject.is_function else 'types')
if tlobject.namespace:
builder.write('.' + tlobject.namespace)
builder.writeln('.{},'.format(tlobject.class_name()))
builder.current_indent -= 1
builder.writeln('}')
@staticmethod
def _write_init_py(out_dir, depth, namespace_tlobjects, type_constructors):
# namespace_tlobjects: {'namespace', [TLObject]}
os.makedirs(out_dir, exist_ok=True)
for ns, tlobjects in namespace_tlobjects.items():
file = os.path.join(out_dir, ns + '.py' if ns else '__init__.py')
with open(file, 'w', encoding='utf-8') as f, \
SourceBuilder(f) as builder:
builder.writeln(AUTO_GEN_NOTICE)
# Both types and functions inherit from the TLObject class
# so they all can be serialized and sent, however, only the
# functions are "content_related".
builder.writeln(
'from {}.tl.tlobject import TLObject'.format('.' * depth)
)
if ns:
# Only import the parent types if we're not in such file
builder.writeln(
'from {}.tl import types'.format('.' * depth)
)
# Add the relative imports to the namespaces,
# unless we already are in a namespace.
if not ns:
builder.writeln('from . import {}'.format(', '.join(
x for x in namespace_tlobjects.keys() if x
)))
# Import 'get_input_*' utils
# TODO Support them on types too
if 'functions' in out_dir:
builder.writeln(
'from {}.utils import get_input_peer, '
'get_input_channel, get_input_user, '
'get_input_media, get_input_photo'.format('.' * depth)
)
# Import 'os' for those needing access to 'os.urandom()'
# Currently only 'random_id' needs 'os' to be imported,
# for all those TLObjects with arg.can_be_inferred.
builder.writeln('import os')
# Import struct for the .__bytes__(self) serialization
builder.writeln('import struct')
# Generate the class for every TLObject
for t in sorted(tlobjects, key=lambda x: x.name):
TLGenerator._write_source_code(
t, builder, depth, type_constructors
)
builder.current_indent = 0
@staticmethod
def _write_source_code(tlobject, builder, depth, type_constructors):
"""Writes the source code corresponding to the given TLObject
by making use of the 'builder' SourceBuilder.
Additional information such as file path depth and
the Type: [Constructors] must be given for proper
importing and documentation strings.
"""
builder.writeln()
builder.writeln()
builder.writeln('class {}(TLObject):'.format(tlobject.class_name()))
# Class-level variable to store its Telegram's constructor ID
builder.writeln('CONSTRUCTOR_ID = {}'.format(hex(tlobject.id)))
builder.writeln('SUBCLASS_OF_ID = {}'.format(
hex(crc32(tlobject.result.encode('ascii'))))
)
builder.writeln()
# Flag arguments must go last
args = [
a for a in tlobject.sorted_args()
if not a.flag_indicator and not a.generic_definition
]
# Convert the args to string parameters, flags having =None
args = [
(a.name if not a.is_flag and not a.can_be_inferred
else '{}=None'.format(a.name))
for a in args
]
# Write the __init__ function
if args:
builder.writeln(
'def __init__(self, {}):'.format(', '.join(args))
)
else:
builder.writeln('def __init__(self):')
# Now update args to have the TLObject arguments, _except_
# those which are calculated on send or ignored, this is
# flag indicator and generic definitions.
#
# We don't need the generic definitions in Python
# because arguments can be any type
args = [arg for arg in tlobject.args
if not arg.flag_indicator and
not arg.generic_definition]
if args:
# Write the docstring, to know the type of the args
builder.writeln('"""')
for arg in args:
if not arg.flag_indicator:
builder.writeln(':param {} {}:'.format(
arg.type_hint(), arg.name
))
builder.current_indent -= 1 # It will auto-indent (':')
# We also want to know what type this request returns
# or to which type this constructor belongs to
builder.writeln()
if tlobject.is_function:
builder.write(':returns {}: '.format(tlobject.result))
else:
builder.write('Constructor for {}: '.format(tlobject.result))
constructors = type_constructors[tlobject.result]
if not constructors:
builder.writeln('This type has no constructors.')
elif len(constructors) == 1:
builder.writeln('Instance of {}.'.format(
constructors[0].class_name()
))
else:
builder.writeln('Instance of either {}.'.format(
', '.join(c.class_name() for c in constructors)
))
builder.writeln('"""')
builder.writeln('super().__init__()')
# Functions have a result object and are confirmed by default
if tlobject.is_function:
builder.writeln('self.result = None')
builder.writeln(
'self.content_related = True')
# Set the arguments
if args:
# Leave an empty line if there are any args
builder.writeln()
for arg in args:
TLGenerator._write_self_assigns(builder, tlobject, arg, args)
builder.end_block()
# Write the to_dict(self) method
builder.writeln('def to_dict(self, recursive=True):')
if args:
builder.writeln('return {')
else:
builder.write('return {')
builder.current_indent += 1
base_types = ('string', 'bytes', 'int', 'long', 'int128',
'int256', 'double', 'Bool', 'true', 'date')
for arg in args:
builder.write("'{}': ".format(arg.name))
if arg.type in base_types:
if arg.is_vector:
builder.write('[] if self.{0} is None else self.{0}[:]'
.format(arg.name))
else:
builder.write('self.{}'.format(arg.name))
else:
if arg.is_vector:
builder.write(
'([] if self.{0} is None else [None'
' if x is None else x.to_dict() for x in self.{0}]'
') if recursive else self.{0}'.format(arg.name)
)
else:
builder.write(
'(None if self.{0} is None else self.{0}.to_dict())'
' if recursive else self.{0}'.format(arg.name)
)
builder.writeln(',')
builder.current_indent -= 1
builder.writeln("}")
builder.end_block()
# Write the .__bytes__() function
builder.writeln('def __bytes__(self):')
# Some objects require more than one flag parameter to be set
# at the same time. In this case, add an assertion.
repeated_args = defaultdict(list)
for arg in tlobject.args:
if arg.is_flag:
repeated_args[arg.flag_index].append(arg)
for ra in repeated_args.values():
if len(ra) > 1:
cnd1 = ('self.{}'.format(a.name) for a in ra)
cnd2 = ('not self.{}'.format(a.name) for a in ra)
builder.writeln(
"assert ({}) or ({}), '{} parameters must all "
"be False-y (like None) or all me True-y'".format(
' and '.join(cnd1), ' and '.join(cnd2),
', '.join(a.name for a in ra)
)
)
builder.writeln("return b''.join((")
builder.current_indent += 1
# First constructor code, we already know its bytes
builder.writeln('{},'.format(repr(struct.pack('<I', tlobject.id))))
for arg in tlobject.args:
if TLGenerator.write_to_bytes(builder, arg, tlobject.args):
builder.writeln(',')
builder.current_indent -= 1
builder.writeln('))')
builder.end_block()
# Write the static from_reader(reader) function
builder.writeln('@staticmethod')
builder.writeln('def from_reader(reader):')
for arg in tlobject.args:
TLGenerator.write_read_code(
builder, arg, tlobject.args, name='_' + arg.name
)
builder.writeln('return {}({})'.format(
tlobject.class_name(), ', '.join(
'{0}=_{0}'.format(a.name) for a in tlobject.sorted_args()
if not a.flag_indicator and not a.generic_definition
)
))
builder.end_block()
# Only requests can have a different response that's not their
# serialized body, that is, we'll be setting their .result.
if tlobject.is_function:
builder.writeln('def on_response(self, reader):')
TLGenerator.write_request_result_code(builder, tlobject)
builder.end_block()
# Write the __str__(self) and stringify(self) functions
builder.writeln('def __str__(self):')
builder.writeln('return TLObject.pretty_format(self)')
builder.end_block()
builder.writeln('def stringify(self):')
builder.writeln('return TLObject.pretty_format(self, indent=0)')
# builder.end_block() # No need to end the last block
@staticmethod
def _write_self_assigns(builder, tlobject, arg, args):
if arg.can_be_inferred:
# Currently the only argument that can be
# inferred are those called 'random_id'
if arg.name == 'random_id':
# Endianness doesn't really matter, and 'big' is shorter
code = "int.from_bytes(os.urandom({}), 'big', signed=True)"\
.format(8 if arg.type == 'long' else 4)
if arg.is_vector:
# Currently for the case of "messages.forwardMessages"
# Ensure we can infer the length from id:Vector<>
if not next(a for a in args if a.name == 'id').is_vector:
raise ValueError(
'Cannot infer list of random ids for ', tlobject
)
code = '[{} for _ in range(len(id))]'.format(code)
builder.writeln(
"self.random_id = random_id if random_id "
"is not None else {}".format(code)
)
else:
raise ValueError('Cannot infer a value for ', arg)
# Well-known cases, auto-cast it to the right type
elif arg.type == 'InputPeer' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_peer')
elif arg.type == 'InputChannel' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_channel')
elif arg.type == 'InputUser' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_user')
elif arg.type == 'InputMedia' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_media')
elif arg.type == 'InputPhoto' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_photo')
else:
builder.writeln('self.{0} = {0}'.format(arg.name))
@staticmethod
def write_get_input(builder, arg, get_input_code):
"""Returns "True" if the get_input_* code was written when assigning
a parameter upon creating the request. Returns False otherwise
"""
if arg.is_vector:
builder.write('self.{0} = [{1}(_x) for _x in {0}]'
.format(arg.name, get_input_code))
else:
builder.write('self.{0} = {1}({0})'
.format(arg.name, get_input_code))
builder.writeln(
' if {} else None'.format(arg.name) if arg.is_flag else ''
)
@staticmethod
def get_file_name(tlobject, add_extension=False):
"""Gets the file name in file_name_format.py for the given TLObject"""
# Courtesy of http://stackoverflow.com/a/1176023/4759433
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', tlobject.name)
result = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
if add_extension:
return result + '.py'
else:
return result
@staticmethod
def write_to_bytes(builder, arg, args, name=None):
"""
Writes the .__bytes__() code for the given argument
:param builder: The source code builder
:param arg: The argument to write
:param args: All the other arguments in TLObject same __bytes__.
This is required to determine the flags value
:param name: The name of the argument. Defaults to "self.argname"
This argument is an option because it's required when
writing Vectors<>
"""
if arg.generic_definition:
return # Do nothing, this only specifies a later type
if name is None:
name = 'self.{}'.format(arg.name)
# The argument may be a flag, only write if it's not None AND
# if it's not a True type.
# True types are not actually sent, but instead only used to
# determine the flags.
if arg.is_flag:
if arg.type == 'true':
return # Exit, since True type is never written
elif arg.is_vector:
# Vector flags are special since they consist of 3 values,
# so we need an extra join here. Note that empty vector flags
# should NOT be sent either!
builder.write("b'' if not {} else b''.join((".format(name))
else:
builder.write("b'' if not {} else (".format(name))
if arg.is_vector:
if arg.use_vector_id:
# vector code, unsigned 0x1cb5c415 as little endian
builder.write(r"b'\x15\xc4\xb5\x1c',")
builder.write("struct.pack('<i', len({})),".format(name))
# Cannot unpack the values for the outer tuple through *[(
# since that's a Python >3.5 feature, so add another join.
builder.write("b''.join(")
# Temporary disable .is_vector, not to enter this if again
# Also disable .is_flag since it's not needed per element
old_flag = arg.is_flag
arg.is_vector = arg.is_flag = False
TLGenerator.write_to_bytes(builder, arg, args, name='x')
arg.is_vector = True
arg.is_flag = old_flag
builder.write(' for x in {})'.format(name))
elif arg.flag_indicator:
# Calculate the flags with those items which are not None
builder.write("struct.pack('<I', {})".format(
' | '.join('({} if {} else 0)'.format(
1 << flag.flag_index, 'self.{}'.format(flag.name)
) for flag in args if flag.is_flag)
))
elif 'int' == arg.type:
# struct.pack is around 4 times faster than int.to_bytes
builder.write("struct.pack('<i', {})".format(name))
elif 'long' == arg.type:
builder.write("struct.pack('<q', {})".format(name))
elif 'int128' == arg.type:
builder.write("{}.to_bytes(16, 'little', signed=True)".format(name))
elif 'int256' == arg.type:
builder.write("{}.to_bytes(32, 'little', signed=True)".format(name))
elif 'double' == arg.type:
builder.write("struct.pack('<d', {})".format(name))
elif 'string' == arg.type:
builder.write('TLObject.serialize_bytes({})'.format(name))
elif 'Bool' == arg.type:
# 0x997275b5 if boolean else 0xbc799737
builder.write(
r"b'\xb5ur\x99' if {} else b'7\x97y\xbc'".format(name)
)
elif 'true' == arg.type:
pass # These are actually NOT written! Only used for flags
elif 'bytes' == arg.type:
builder.write('TLObject.serialize_bytes({})'.format(name))
elif 'date' == arg.type: # Custom format
# 0 if datetime is None else int(datetime.timestamp())
builder.write(
r"b'\0\0\0\0' if {0} is None else "
r"struct.pack('<I', int({0}.timestamp()))".format(name)
)
else:
# Else it may be a custom type
builder.write('bytes({})'.format(name))
if arg.is_flag:
builder.write(')')
if arg.is_vector:
builder.write(')') # We were using a tuple
return True # Something was written
@staticmethod
def write_read_code(builder, arg, args, name):
"""
Writes the read code for the given argument, setting the
arg.name variable to its read value.
:param builder: The source code builder
:param arg: The argument to write
:param args: All the other arguments in TLObject same on_send.
This is required to determine the flags value
:param name: The name of the argument. Defaults to "self.argname"
This argument is an option because it's required when
writing Vectors<>
"""
if arg.generic_definition:
return # Do nothing, this only specifies a later type
# The argument may be a flag, only write that flag was given!
was_flag = False
if arg.is_flag:
# Treat 'true' flags as a special case, since they're true if
# they're set, and nothing else needs to actually be read.
if 'true' == arg.type:
builder.writeln(
'{} = bool(flags & {})'.format(name, 1 << arg.flag_index)
)
return
was_flag = True
builder.writeln('if flags & {}:'.format(
1 << arg.flag_index
))
# Temporary disable .is_flag not to enter this if
# again when calling the method recursively
arg.is_flag = False
if arg.is_vector:
if arg.use_vector_id:
# We have to read the vector's constructor ID
builder.writeln("reader.read_int()")
builder.writeln('{} = []'.format(name))
builder.writeln('for _ in range(reader.read_int()):')
# Temporary disable .is_vector, not to enter this if again
arg.is_vector = False
TLGenerator.write_read_code(builder, arg, args, name='_x')
builder.writeln('{}.append(_x)'.format(name))
arg.is_vector = True
elif arg.flag_indicator:
# Read the flags, which will indicate what items we should read next
builder.writeln('flags = reader.read_int()')
builder.writeln()
elif 'int' == arg.type:
builder.writeln('{} = reader.read_int()'.format(name))
elif 'long' == arg.type:
builder.writeln('{} = reader.read_long()'.format(name))
elif 'int128' == arg.type:
builder.writeln(
'{} = reader.read_large_int(bits=128)'.format(name)
)
elif 'int256' == arg.type:
builder.writeln(
'{} = reader.read_large_int(bits=256)'.format(name)
)
elif 'double' == arg.type:
builder.writeln('{} = reader.read_double()'.format(name))
elif 'string' == arg.type:
builder.writeln('{} = reader.tgread_string()'.format(name))
elif 'Bool' == arg.type:
builder.writeln('{} = reader.tgread_bool()'.format(name))
elif 'true' == arg.type:
# Arbitrary not-None value, don't actually read "true" flags
builder.writeln('{} = True'.format(name))
elif 'bytes' == arg.type:
builder.writeln('{} = reader.tgread_bytes()'.format(name))
elif 'date' == arg.type: # Custom format
builder.writeln('{} = reader.tgread_date()'.format(name))
else:
# Else it may be a custom type
if not arg.skip_constructor_id:
builder.writeln('{} = reader.tgread_object()'.format(name))
else:
builder.writeln('{} = types.{}.from_reader(reader)'.format(
name, TLObject.class_name_for(arg.type)))
# End vector and flag blocks if required (if we opened them before)
if arg.is_vector:
builder.end_block()
if was_flag:
builder.current_indent -= 1
builder.writeln('else:')
builder.writeln('{} = None'.format(name))
builder.current_indent -= 1
# Restore .is_flag
arg.is_flag = True
@staticmethod
def write_request_result_code(builder, tlobject):
"""
Writes the receive code for the given function
:param builder: The source code builder
:param tlobject: The TLObject for which the 'self.result = '
will be written
"""
if tlobject.result.startswith('Vector<'):
# Vector results are a bit special since they can also be composed
# of integer values and such; however, the result of requests is
# not parsed as arguments are and it's a bit harder to tell which
# is which.
if tlobject.result == 'Vector<int>':
builder.writeln('reader.read_int() # Vector id')
builder.writeln('count = reader.read_int()')
builder.writeln(
'self.result = [reader.read_int() for _ in range(count)]'
)
elif tlobject.result == 'Vector<long>':
builder.writeln('reader.read_int() # Vector id')
builder.writeln('count = reader.read_long()')
builder.writeln(
'self.result = [reader.read_long() for _ in range(count)]'
)
else:
builder.writeln('self.result = reader.tgread_vector()')
else:
builder.writeln('self.result = reader.tgread_object()')
|
[
"totufals@hotmail.com"
] |
totufals@hotmail.com
|
e3405de232c07e7515801124b11d7fee65e18b3f
|
76af281542524f4bb2c80f928b6b5f0786ca1557
|
/code kata/hunterbcs.py
|
c05f27f4aa57fea8b70cbd59253c6abba2faf7c4
|
[] |
no_license
|
sharmishah/guvi
|
1b910042f101265c7628724f994eca1c23c0f59d
|
51d854e38c2013799b24af43501bc8190b150897
|
refs/heads/master
| 2021-07-02T04:20:03.629762
| 2021-06-28T03:08:10
| 2021-06-28T03:08:10
| 191,355,062
| 0
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,087
|
py
|
class Node:
def __init__(self,d):
self.ddata=d
self.left=None
self.right=None
def insert(root,ins):
if ins.ddata>root.ddata and root.right!=None:
insert(root.right,ins)
elif ins.ddata<=root.ddata and root.left!=None:
insert(root.left,ins)
if ins.ddata>root.ddata and root.right==None:
root.right=ins
return
elif ins.ddata<=root.ddata and root.left==None:
root.left=ins
return
def inorder(root):
if root is None:
return
else:
inorder(root.left)
print(root.ddata,end=" ")
inorder(root.right)
def lcafinder(root,nn1,nn2):
if root is None:
return None
if root.ddata>nn1 and root.ddata>nn2:
return lcafinder(root.left,nn1,nn2)
if root.ddata<nn1 and root.ddata<nn2:
return lcafinder(root.right,nn1,nn2)
return root.ddata
nn=int(input())
az=list(map(int,input().split()))
l,rr1=map(int,input().split())
rr=Node(aa[0])
for i in range(1,nn):
nnn=Node(aa[i])
insert(rr,nnn)
#inorder(rr)
print(lcafinder(rr,l,rr1))
|
[
"noreply@github.com"
] |
noreply@github.com
|
63ee22b92961c1580d54c097f9c343ee6cb8c575
|
ea3f3e917698d341398caf63c3041be7cea481a5
|
/main/migrations/0002_auto_20200908_1312.py
|
f1fb3d4c092d0847b4bc90ec04d46e0b8d5be16d
|
[] |
no_license
|
Urbanino/cats_site
|
25c112b617843258f510acf50c2960ca4db942bb
|
b1a035d5f6ced6a6b66d0d1f78d54a58b9cea061
|
refs/heads/master
| 2022-12-14T18:26:38.040503
| 2020-09-12T18:12:07
| 2020-09-12T18:12:07
| 294,999,286
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 736
|
py
|
# Generated by Django 3.1 on 2020-09-08 10:12
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='cat',
name='parent1',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='Mom', to='main.cat'),
),
migrations.AlterField(
model_name='cat',
name='parent2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='Dad', to='main.cat'),
),
]
|
[
"i_n.n.kalnitskaya@mpt.ru"
] |
i_n.n.kalnitskaya@mpt.ru
|
8cbe4395cfa14effb2b5c91d67fabb7bae51e987
|
d74fba6224b4a3035e5b2411538f2ed5c3636e8e
|
/packer_refresh.py
|
10a6b92d13cf365fbb4301ed37ba9af9a589c7e0
|
[] |
no_license
|
platform9-incubator/packer-refresh
|
e54ebb0a3ae305ca2f79325097e0266f5b009794
|
93ae4363937fe224ac5f67dfb041a2d9665afc49
|
refs/heads/master
| 2020-04-06T12:45:58.851057
| 2018-11-14T01:17:05
| 2018-11-14T01:17:05
| 157,469,700
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,238
|
py
|
import openstack
import json
from operator import itemgetter
from packerpy import PackerExecutable
IMAGE_NAME='centos-7'
IMAGE_SSH_USERNAME='centos'
PACKER_EXECUTABLE_PATH='/usr/local/bin/packer'
PACKER_TEMPLATE_PATH='/root/packer/template.json'
PACKER_BUILD_PATH='/root/packer/build.json'
# Initialize cloud
conn = openstack.connect(cloud='cloud')
print("Getting all images.")
images = conn.image.images()
packer_images = []
print("Looping through images and finding packer created ones")
for image in images:
if IMAGE_NAME in image['name']:
if "packer_created" in image['tags']:
print("Found a packer created image and adding it to the list (Name: {})".format(image['name']))
packer_images.append(image)
sorted_images = sorted(packer_images, key=itemgetter('created_at'), reverse=True)
print("Found {} Image(s)".format(len(sorted_images)))
while len(sorted_images) >= 5:
print("There are too many images to save them all...")
del_image = sorted_images[-1]
print("Deleting image {} (ID: {})".format(del_image['name'], del_image['id']))
conn.image.delete_image(del_image, ignore_missing=False)
del sorted_images[-1]
for image in sorted_images:
if "{}-latest".format(IMAGE_NAME) in image['name']:
print("Renaming latest image to it's created date")
new_image_name="{}-{}.img".format(IMAGE_NAME, image['created_at']).replace(':', '-')
conn.image.update_image(image, name=new_image_name)
print("Seting up the Packer Template files")
with open(PACKER_TEMPLATE_PATH) as f:
s = f.read()
# Safely write the changed content, if found in the file
with open(PACKER_BUILD_PATH, 'w+') as f:
s = s.replace('__SOURCE_IMAGE_NAME__', new_image_name)
s = s.replace('__IMAGE_NAME__', "{}-latest.img".format(IMAGE_NAME))
s = s.replace('__IMAGE_SSH_USERNAME__', IMAGE_SSH_USERNAME)
f.write(s)
print("Building a new Latest Image!")
print("This is very black box and takes a long time!")
PackerExecutable(config={'executable_path': PACKER_EXECUTABLE_PATH}).build(PACKER_BUILD_PATH)
new_image = conn.image.find_image("{}-latest.img".format(IMAGE_NAME))
conn.image.add_tag(new_image,'packer_created')
print("New Image created! ID: {}".format(new_image['id']))
|
[
"cody@platform9.com"
] |
cody@platform9.com
|
d1e8e113daea1d6dbdb0cfd8f3c762d6ac1dd419
|
7948d5fc89b9df1ba61188a3eb19cb26558a196b
|
/python/check_url.py
|
507eed3db62b512858d770f3ca061b52ca38f558
|
[] |
no_license
|
kotgrek/sa
|
52d9bcf143f1455b61e3f79053c39c13f790ec70
|
e580f95720449b4101ea02abf1343a78e56620a3
|
refs/heads/master
| 2020-08-16T15:03:08.814243
| 2019-10-16T18:46:37
| 2019-10-16T18:46:37
| 215,515,493
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,179
|
py
|
#!/usr/bin/env python
# This script will get a specific URL and either return basic information
# about the site or will throw out an error that URL does not exist.
# Seems to work even when using a proxy server, but it times out on some
# sites, not sure why.
import sys, os, urllib2
from urllib2 import Request, urlopen, URLError
def usage(me):
print "Usage: %s <url>" % (me)
exit(0)
# some sanity checking
def verifyurl(url):
str1 = "http://" # http protocol
str2 = "https://" # https protocol
if not url.startswith(str1) and not url.startswith(str2):
print "URL does not start with \"%s\" or \"%s\"" % (str1, str2)
sys.exit(1)
return
def getresp(url):
try:
response = urllib2.urlopen(url)
except URLError, e:
if hasattr(e, 'reason'):
print "Couldn't connect to server. Reason:", e.reason
sys.exit(1)
elif hasattr(e, 'code'):
print 'The server couldn\'t fulfill request. Code: ', e.code
sys.exit(1)
except KeyboardInterrupt: # let's add this for when it hangs
print "Aborted by user."
sys.exit(1)
return response
def printinfo(resp):
print "URL: ", resp.geturl() # Get the URL
print "Return code: ", resp.code # Get the code
# Get the Headers. This returns a dictionary-like object that describes the
# page fetched, particularly the headers sent by the server
print "Headers: ", resp.info()
print "Date: ", resp.info()['date'] # Get the date part of the header
print "Server: ", resp.info()['server'] # Get the server part of the header
html = resp.read()
#print "Data: ", html # Get all data - lots of stuff
print "Length :", len(html) # Get only the length
# Show that the file object is iterable; rstrip strips the trailing
# newlines and carriage returns before printing the output.
for line in resp:
print line.rstrip()
return
def main():
if len(sys.argv) < 2: # Check args
usage(sys.argv[0])
url = sys.argv[1]
verifyurl(url)
resp = getresp(url) # get 'response' from URL
printinfo(resp)
if __name__ == "__main__":
main()
|
[
"test@test.ru"
] |
test@test.ru
|
e2f49aab034b503c5f4b1be009b004f8bdad9b56
|
f28498bc0d7e46d5b1c545191e43459268014700
|
/test.py
|
1bb57a8ba7e1303849eac3489f97d493b1e1e450
|
[] |
no_license
|
chuangg/stylenet-1
|
b82d95241135f0ab1d13b36f3af03b9f367852ba
|
693c00e683062289be5e878e6aa1951103b13549
|
refs/heads/master
| 2021-01-25T14:11:35.999127
| 2018-02-26T09:35:54
| 2018-02-26T09:35:54
| 123,667,903
| 1
| 0
| null | 2018-03-03T06:56:57
| 2018-03-03T06:56:57
| null |
UTF-8
|
Python
| false
| false
| 1,510
|
py
|
import pickle
import torch
from torch.autograd import Variable
from build_vocab import Vocab
from data_loader import get_data_loader
from data_loader import get_styled_data_loader
from models import EncoderCNN
from models import FactoredLSTM
from loss import masked_cross_entropy
def main():
with open("data/vocab.pkl", 'rb') as f:
vocab = pickle.load(f)
img_path = "data/flickr7k_images"
cap_path = "data/factual_train.txt"
styled_path = "data/humor/funny_train.txt"
data_loader = get_data_loader(img_path, cap_path, vocab, 3)
styled_data_loader = get_styled_data_loader(styled_path, vocab, 3)
encoder = EncoderCNN(30)
decoder = FactoredLSTM(30, 40, 40, len(vocab))
if torch.cuda.is_available():
encoder = encoder.cuda()
decoder = decoder.cuda()
# for i, (images, captions, lengths) in enumerate(data_loader):
for i, (captions, lengths) in enumerate(styled_data_loader):
# images = Variable(images, volatile=True)
captions = Variable(captions.long())
if torch.cuda.is_available():
# images = images.cuda()
captions = captions.cuda()
# features = encoder(images)
outputs = decoder(captions, features=None, mode="humorous")
print(lengths - 1)
print(outputs)
print(captions[:, 1:])
loss = masked_cross_entropy(outputs, captions[:, 1:].contiguous(), lengths - 1)
print(loss)
break
if __name__ == '__main__':
main()
|
[
"kakiuchi@crimson.q.t.u-tokyo.ac.jp"
] |
kakiuchi@crimson.q.t.u-tokyo.ac.jp
|
228b7456673d4ffa88f06b9cb379fc6685f1cd98
|
864aa6017cfb5208731bea98b57f863f205895e0
|
/src/facedetect.py
|
fca8334a8538c468792bb9d6f0b2f3d0fce844ee
|
[] |
no_license
|
bharateshwq/youtube-playlist-framedl
|
52ae46a92545b794c482aad6d5ee7ad9007f88e1
|
d2d85e99642dc33121b9ed220c7168ef0cf5cf45
|
refs/heads/master
| 2022-12-01T02:35:15.520424
| 2020-08-10T06:23:17
| 2020-08-10T06:23:17
| 279,625,728
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,682
|
py
|
import cv2
import numpy as np
import sys
input_image = (' '.join(sys.argv[1:]))
imagedir = './frames/'
inputdi = imagedir+input_image
face_cascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
eye_cascade = cv2.CascadeClassifier("haarcascade_eye.xml")
img = cv2.imread(inputdi)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.1, 4)
for (x, y, w, h) in faces:
if w < 200:
print("0")
sys.exit()
try:
roi_gray = gray[y:(y+h), x:(x+w)]
except:
print("0")
sys.exit()
try:
eyes = eye_cascade.detectMultiScale(roi_gray, 1.3, 4)
except:
print("0")
sys.exit()
count = 0
try:
for (ex, ey, ew, eh) in eyes:
if count == 0:
eye_1 = (ex, ey, ew, eh)
elif count == 1:
eye_2 = (ex, ey, ew, eh)
count = count + 1
except:
print("0")
sys.exit()
if eye_1[0] < eye_2[0]:
lefteye = eye_1
righteye = eye_2
else:
lefteye = eye_2
righteye = eye_1
left_eye_center = (int(lefteye[0] + (lefteye[2] / 2)),
int(lefteye[1] + (lefteye[3] / 2)))
lefteye_x = left_eye_center[0]
lefteye_y = left_eye_center[1]
right_eye_center = (
int(righteye[0] + (righteye[2]/2)), int(righteye[1] + (righteye[3]/2)))
righteye_x, righteye_y = right_eye_center[:2]
height, width = img.shape[:2]
ref_y = 960/2
ref_x = 1920/2
trans_x = (ref_x-(x+righteye_x))
trans_y = (ref_y-(y+righteye_y))
delta_x = (righteye_x) - (lefteye_x)
delta_y = (righteye_y) - (lefteye_y)
angle = np.arctan(delta_y/delta_x)
angle = (angle * 180) / np.pi
dist_1 = np.sqrt((delta_x * delta_x) + (delta_y * delta_y))
ratio = (130/(dist_1))
dim = ((int(width * ratio)), (int(height * ratio)))
center = (int(x+righteye_x), int(y+righteye_y))
M = cv2.getRotationMatrix2D(center, (angle), 1.0)
rotated = cv2.warpAffine(img, M, (width, height))
resized = cv2.resize(rotated, dim)
T = np.float32([[1, 0, (trans_x * ratio)], [0, 1, (trans_y * ratio)]])
translation = cv2.warpAffine(resized, T, dim)
heightmar1 = int(int(height*ratio)/4)
heightmar2 = int(int((height*ratio*3)/4))
widthmar1 = int((width*ratio)/4)
widthmar2 = int((width*ratio*3)/4)
crop_img = translation[heightmar1:heightmar2, widthmar1:widthmar2]
final = cv2.resize(crop_img, (1920, 960))
textpos = (10, 950)
color = (192, 192, 192)
fontface = cv2.FONT_HERSHEY_SIMPLEX
text = input_image[:-5]
finaltexted = cv2.putText(final, text, textpos, fontface,
1, color, 1, cv2.LINE_AA, False)
outputdir = './finalframes/'
outputdo = outputdir+input_image
writeStatus = cv2.imwrite(outputdo, finaltexted)
if writeStatus is True:
print("1")
else:
print("0")
|
[
"noreply@github.com"
] |
noreply@github.com
|
36fbbcc092ee36375e5a90dc5fc89d7530c3fe18
|
4736bb1e24b72f820f1097336ad2955483c7269c
|
/src/data/preprocessors/default_preprocessor.py
|
06e686b07b8308acea3c930bffc1837aaf054910
|
[
"MIT"
] |
permissive
|
paulwarkentin/tf-ssd-vgg
|
35d1ad0c7f827fde21e8ec5bcfa39241e4c60f94
|
f48e3ccbb8eb092d3cb82a9d90164c7328880477
|
refs/heads/master
| 2022-10-26T09:49:52.840484
| 2018-07-26T10:53:35
| 2018-07-26T10:53:35
| 142,419,537
| 0
| 1
|
MIT
| 2022-10-01T23:50:17
| 2018-07-26T09:30:31
|
Python
|
UTF-8
|
Python
| false
| false
| 2,310
|
py
|
##
## /src/data/preprocessors/default_preprocessor.py
##
## Created by Paul Warkentin <paul@warkentin.email> on 15/07/2018.
## Updated by Paul Warkentin <paul@warkentin.email> on 25/07/2018.
##
import os
import sys
import tensorflow as tf
__exec_dir = sys.path[0]
while os.path.basename(__exec_dir) != "src":
__exec_dir = os.path.dirname(__exec_dir)
sys.path.insert(0, __exec_dir)
class DefaultPreprocessor(object):
"""Handle the pre-processing step that decodes raw image data from the features.
The magic function `__call__(self, *args, **kwargs)` must be implemented in order to function as a valid pre-processing class.
"""
def __init__(self):
"""Initializes the class.
"""
super().__init__()
def __call__(self, inputs):
"""Handle the pre-processing step defined in this class.
The following input features are required:
'image/format',
'image/encoded',
'image/{height,width,channels}',
'image/object/bbox/{y_min,x_min,y_max,x_max}'.
The following output features are computed within this step:
'image',
'image/shape',
'image/object/bbox'.
Arguments:
inputs: Dictionary containing all available input features.
Returns:
Dictionary containing all input features and the new computed output features.
"""
output = {}
# decode image
image = tf.cond(
tf.equal(inputs["image/format"], tf.constant("jpeg", dtype=tf.string)),
true_fn = lambda image=inputs["image/encoded"]: tf.image.decode_jpeg(image, channels=3, dct_method="INTEGER_ACCURATE"),
false_fn = lambda image=inputs["image/encoded"]: tf.image.decode_image(image, channels=3)
)
image.set_shape((None, None, 3))
# build image shape
image_shape = tf.stack([inputs["image/height"], inputs["image/width"], inputs["image/channels"]])
# build bounding boxes
image_object_bbox = tf.stack([
inputs["image/object/bbox/y_min"],
inputs["image/object/bbox/x_min"],
inputs["image/object/bbox/y_max"],
inputs["image/object/bbox/x_max"]
], axis=1)
# change image dtype to float
if image.dtype != tf.float32:
image = tf.cast(image, tf.float32)
output["image"] = image
output["image/shape"] = image_shape
output["image/object/bbox"] = image_object_bbox
return output
|
[
"paul@warkentin.email"
] |
paul@warkentin.email
|
e50673c93f15ded18491c6c3004be70a2341f562
|
565cc3ca69edd22174e635ba3f6198bb8d3eadff
|
/noksfishes/views.py
|
288303b2b193a54ccff6799e7b782b9b5f3b7f76
|
[] |
no_license
|
Deer-WarLord/pyramid-backend
|
e3e29aaa85c6b97a87d30e04d8596103b8ce4a79
|
67b3375603a8534841e0fec4fe6e762d1d7b20dd
|
refs/heads/master
| 2022-12-17T21:59:32.550779
| 2019-04-21T15:57:29
| 2019-04-21T15:57:29
| 156,860,811
| 0
| 0
| null | 2022-12-08T02:27:03
| 2018-11-09T12:44:16
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,545
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Count
from django.http import HttpResponse
from noksfishes.serializers import *
from rest_framework import generics
from rest_framework import permissions
from rest_framework_csv import renderers as r
import csv
from noksfishes.tasks import async_get_ids_for_urls, async_get_ids_for_urls_from_json
r.CSVRenderer.writer_opts = {
"delimiter": str(u';')
}
class AnalyzedInfoList(generics.ListAPIView):
queryset = AnalyzedInfo.objects.all()
serializer_class = AnalyzedInfoSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class AnalyzedInfoDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = AnalyzedInfo.objects.all()
serializer_class = AnalyzedInfoSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class PublicationsList(generics.ListAPIView):
queryset = Publication.objects.all()
serializer_class = PublicationSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class PublicationsDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Publication.objects.all()
serializer_class = PublicationSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class ExportPublicationsList(generics.ListAPIView):
renderer_classes = (r.CSVRenderer, )
queryset = Publication.objects
serializer_class = ExportPublicationSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get(self, request, *args, **kwargs):
if len(request.query_params):
self.queryset = Publication.objects.filter(**dict(request.query_params.items()))
return self.list(request, *args, **kwargs)
class PublicationTitleDateList(generics.ListAPIView):
queryset = Publication.objects
serializer_class = PublicationTitleDateSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get(self, request, *args, **kwargs):
if len(request.query_params):
self.queryset = Publication.objects.filter(
**dict(request.query_params.items())
).values('title', 'posted_date').annotate(count=Count("title")).order_by("-count")
return self.list(request, *args, **kwargs)
class ExportTvPublicationsList(generics.ListAPIView):
renderer_classes = (r.CSVRenderer, )
queryset = Publication.objects.all()
serializer_class = ExportTvPublicationSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get(self, request, *args, **kwargs):
if len(request.query_params):
self.queryset = Publication.objects.filter(**dict(request.query_params.items()))
return self.list(request, *args, **kwargs)
class PublicationsWithoutKeysList(generics.ListAPIView):
queryset = Publication.objects.filter(shukachpublication__isnull=True).exclude(url__exact='').distinct().order_by("-posted_date")
serializer_class = PublicationSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get_publications_without_keys_count(request):
count = Publication.objects.filter(shukachpublication__isnull=True).exclude(url__exact='').distinct().count()
return HttpResponse(count)
def get_keys(request):
async_get_ids_for_urls.delay()
return HttpResponse()
def get_keys_from_url(request):
async_get_ids_for_urls_from_json.delay((request.GET["url"], request.GET["title"]))
return HttpResponse()
|
[
"deerwarlord@gmail.com"
] |
deerwarlord@gmail.com
|
0012af356190121a5729e7e62534f30986409a4d
|
aa35721925cee3fa5a600310d3e8e30a6e553162
|
/helpers/locators.py
|
52733356190bebb79eaa55efff47c1642a9b80e7
|
[] |
no_license
|
AlexKysil/countCarDeprecation
|
fe9db67cd24599e7e0c4c3dcc9cbfd144cbe3948
|
8fb35bdeb25e22137a9cdd885860ce15619cf5dc
|
refs/heads/main
| 2023-01-24T09:49:16.686481
| 2020-11-12T21:38:14
| 2020-11-12T21:38:14
| 312,264,095
| 0
| 0
| null | 2020-11-13T15:05:49
| 2020-11-12T12:02:12
|
Python
|
UTF-8
|
Python
| false
| false
| 744
|
py
|
"""
Global locators holfer
"""
from selenium.webdriver.common.by import By
# Main Page
USED_BUTTON = (By.XPATH, "//label[text() = 'Вживані ']")
BRAND_SEARCH = (By.XPATH, "//input[@id='brandTooltipBrandAutocompleteInput-brand']")
BRAND_LIST_VALUE = (By.XPATH, "//a[text()='{}']")
MARK_SEARCH = (By.XPATH, "//input[@id='brandTooltipBrandAutocompleteInput-model']")
SELECT_YEAR_FROM = (By.XPATH, "//select[@id='yearFrom']")
YEAR_FROM_VALUE = (By.XPATH, "//select[@id='yearFrom']//option[@value='{}']")
SELECT_YEAR_TO = (By.XPATH, "//select[@id='yearTo']")
YEAR_TO_VALUE = (By.XPATH, "//select[@id='yearTo']//option[@value='{}']")
OPTION_VALUE = (By.XPATH, "//option[@value='{}']")
SEARCH_BUTTON = (By.XPATH, "//button[@type='submit']")
|
[
"38106741+AlexKysil@users.noreply.github.com"
] |
38106741+AlexKysil@users.noreply.github.com
|
652c07706ee22b08f26f4a2e233e757e87fb3595
|
952b6a1140482cb3dba794dd0dcebe5ae8ac3cfb
|
/virtual/bin/wheel
|
92f092126b71f8ffc4af807851ce11365dce7127
|
[
"MIT"
] |
permissive
|
roxaline/Personal-gallery
|
16cb8881934babd015e6c2e1231a4f7ccf084b91
|
58b245cce52059e740c0213ba7b22b58cda97b3f
|
refs/heads/master
| 2020-04-28T23:50:43.949921
| 2019-03-26T14:33:54
| 2019-03-26T14:33:54
| 175,671,278
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 251
|
#!/home/wecode/Desktop/Personal-gallery/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"bayizerechristine4@gmail.com"
] |
bayizerechristine4@gmail.com
|
|
354d0413ca718b78e4f43af79d072d45addc922a
|
0bb58d5ee536b92631053fb8e18840b462475722
|
/PythonFlask/flask_projects/great_number_game/venv/bin/easy_install-2.7
|
1753b1b01698faec2d70c84fb4de831814489ab8
|
[] |
no_license
|
tutfakulunto/codingDojo
|
a0868a46229cc3b74ff72e5a8cc5b2d18d18168b
|
d04ac8b49dbf5f440287ce9f73d357ca7ff274e5
|
refs/heads/master
| 2021-01-12T15:53:51.032396
| 2017-02-22T05:33:41
| 2017-02-22T05:33:41
| 69,322,566
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 289
|
7
|
#!/Users/scott/Desktop/flask_projects/great_number_game/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"c.scott.johnson@gmail.com"
] |
c.scott.johnson@gmail.com
|
1aa13187b581668799052bae234fe73c77b9b0d3
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/JEt4kwPtY6CGPsT9t_6.py
|
184ee79d9dfae190511e21dc5f73d8aed48cb2d4
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 954
|
py
|
"""
Create a function that takes a mathematical expression as a string, list of
numbers on which the mathematical expression is to be calculated and return
the result as a list of string.
### Examples
mathematical("f(y)=y+1",[1,2]) ➞ ["f(1)=2","f(2)=3"]
mathematical("f(y)=y^2",[1,2,3]) ➞ ["f(1)=1","f(2)=4","f(3)=9"]
mathematical("f(y)=yx3",[1,2,3]) ➞ ["f(1)=3","f(2)=6","f(3)=9"]
### Notes
* List of numbers are positive integers.
* In the algebraic expression x = `*`
"""
def mathematical(exp, numbers):
answers = []
for num in numbers:
expression = exp.replace('y', str(num))
equation, call = expression.split('=')[1], expression.split('=')[0]
if 'x' in equation:
equation = equation.replace('x', '*')
if '^' in equation:
equation = equation.replace('^', '**')
answers.append('{0}={1:.0f}'.format(call, eval(equation)))
else:
return answers
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
30b139363aba2efd31d5bd8f195133904a3d6410
|
e008299cb5bd93cb51adf4b095baacf8e6e46f9a
|
/Default/side_bar.py
|
4a75c0a3dac4106404b3f231003fc7b07ba373a4
|
[] |
no_license
|
alademann/SublimeText3-Packages
|
317616793b8134af4793925bae37fd84640fef62
|
6c704a21cc4a4bc01ad2e0b165979f91552143b9
|
refs/heads/master
| 2021-01-17T07:02:01.161606
| 2016-04-14T14:20:39
| 2016-04-14T14:20:39
| 11,374,134
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,821
|
py
|
import sublime, sublime_plugin
import os
import functools
class NewFileAtCommand(sublime_plugin.WindowCommand):
def run(self, dirs):
v = self.window.new_file()
if len(dirs) == 1:
v.settings().set('default_dir', dirs[0])
def is_visible(self, dirs):
return len(dirs) == 1
class DeleteFileCommand(sublime_plugin.WindowCommand):
def run(self, files):
# Import send2trash on demand, to avoid initialising ctypes for as long as possible
import Default.send2trash as send2trash
for f in files:
v = self.window.find_open_file(f)
if v != None and not v.close():
return
send2trash.send2trash(f)
def is_visible(self, files):
return len(files) > 0
class NewFolderCommand(sublime_plugin.WindowCommand):
def run(self, dirs):
self.window.show_input_panel("Folder Name:", "", functools.partial(self.on_done, dirs[0]), None, None)
def on_done(self, dir, name):
os.makedirs(os.path.join(dir, name))
def is_visible(self, dirs):
return len(dirs) == 1
class DeleteFolderCommand(sublime_plugin.WindowCommand):
def run(self, dirs):
if sublime.ok_cancel_dialog("Delete Folder?", "Delete"):
import Default.send2trash as send2trash
try:
for d in dirs:
send2trash.send2trash(d)
except:
sublime.status_message("Unable to delete folder")
def is_visible(self, dirs):
return len(dirs) > 0
class RenamePathCommand(sublime_plugin.WindowCommand):
def run(self, paths):
branch, leaf = os.path.split(paths[0])
v = self.window.show_input_panel("New Name:", leaf, functools.partial(self.on_done, paths[0], branch), None, None)
name, ext = os.path.splitext(leaf)
v.sel().clear()
v.sel().add(sublime.Region(0, len(name)))
def on_done(self, old, branch, leaf):
new = os.path.join(branch, leaf)
try:
os.rename(old, new)
v = self.window.find_open_file(old)
if v:
v.retarget(new)
except:
sublime.status_message("Unable to rename")
def is_visible(self, paths):
return len(paths) == 1
class OpenContainingFolderCommand(sublime_plugin.WindowCommand):
def run(self, files):
branch,leaf = os.path.split(files[0])
self.window.run_command("open_dir", {"dir": branch, "file": leaf})
def is_visible(self, files):
return len(files) > 0
class FindInFolderCommand(sublime_plugin.WindowCommand):
def run(self, dirs):
self.window.run_command("show_panel", {"panel": "find_in_files",
"where": ",".join(dirs)})
def is_visible(self, dirs):
return len(dirs) > 0
|
[
"aaron.lademann@gmail.com"
] |
aaron.lademann@gmail.com
|
0ee27ac5c2bab74b50ad2464577cd9e7a785b147
|
2eef8688d9f928de1c9fa4de4a045fa0dae97eaa
|
/authentication/urls.py
|
a1547debf84ea68191354dcc0eae804b158ddae7
|
[] |
no_license
|
teefats/GlotiExpenses
|
3a98096642ef6df9008489c5db8c822703e43ab7
|
12aa1c9a37c5cf0148a7f55b114402a3044f4e60
|
refs/heads/master
| 2023-06-22T13:42:37.362537
| 2021-07-26T19:06:31
| 2021-07-26T19:06:31
| 389,741,004
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 219
|
py
|
from .views import RegistrationView,LoginView
from django.urls import path
urlpatterns = [
path('register', RegistrationView.as_view(), name='register'),
path('login', LoginView.as_view(), name='login')
]
|
[
"tee_fats@yahoo.com"
] |
tee_fats@yahoo.com
|
5c29f20a9b8f5d728b26a08303d398211c83160d
|
4933ddce71f4bcf03863fdd4a6760879a0a54707
|
/testingarea/visualsarea1/histogramexample.py
|
951242b5f4b9271d410b6a57eb5f9949681e7d92
|
[] |
no_license
|
thisisyomans/FCPM
|
414fcaedd169b5982d5fd522c727c06c56f2c7ad
|
5babc3d26f7600163c4381281287444954219eec
|
refs/heads/master
| 2020-04-09T09:29:37.498951
| 2019-02-06T03:54:33
| 2019-02-06T03:54:33
| 160,235,514
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 328
|
py
|
import matplotlib.pyplot as pt
import pandas as pd
age_data = [20, 30, 54, 66, 70, 10, 45, 65, 77, 99, 120, 130, 29, 40, 80, 75, 90]
bins = [0, 10, 20, 30, 40, 50, 60, 70, 80,90, 100, 110, 120]
pt.hist(age_data, bins, histtype = 'bar', rwidth = 0.8)
pt.title('Age Distribution')
pt.xlabel('Age')
pt.ylabel('People')
pt.show()
|
[
"tanejamm@gmail.com"
] |
tanejamm@gmail.com
|
be398d4900ead48a8c4f85677960d5cc62f5a9be
|
1a635b198973302850baeb1d5a3d583351fc50d3
|
/buglocalizer/test.py
|
b724a81795a3e1c2d4bc3b2947712fcb5e1dc5ea
|
[
"MIT"
] |
permissive
|
datnvhust/thesis
|
6ceb310c60f19e6179b4a7120641706c2ac94391
|
60a91c6f92171e48f02227b6d05ffdaf6be5fea8
|
refs/heads/master
| 2023-02-13T12:47:44.600783
| 2021-01-11T14:52:47
| 2021-01-11T14:52:47
| 314,499,492
| 0
| 0
|
MIT
| 2021-01-11T14:52:48
| 2020-11-20T09:00:29
|
Python
|
UTF-8
|
Python
| false
| false
| 8,943
|
py
|
import GA
import numpy
from datasets import DATASET
import json
import pickle
import numpy as np
import operator
from datetime import datetime
# import pygad
with open(DATASET.root / 'preprocessed_src.pickle', 'rb') as file:
src_files = pickle.load(file)
with open(DATASET.root / 'preprocessed_reports.pickle', 'rb') as file:
bug_reports = pickle.load(file)
with open(DATASET.root / 'token_matching.json', 'r') as file:
token_matching_score = json.load(file)
with open(DATASET.root / 'vsm_similarity.json', 'r') as file:
vsm_similarity_score = json.load(file)
with open(DATASET.root / 'stack_trace.json', 'r') as file:
stack_trace_score = json.load(file)
with open(DATASET.root / 'semantic_similarity.json', 'r') as file:
semantic_similarity_score = json.load(file)
with open(DATASET.root / 'fixed_bug_reports.json', 'r') as file:
fixed_bug_reports_score = json.load(file)
def combine_rank_scores(coeffs, *rank_scores):
"""Combining the rank score of different algorithms"""
final_score = []
for scores in zip(*rank_scores):
combined_score = (coeffs) @ (np.array(scores))
final_score.append(combined_score)
return final_score
def cost(coeffs, i):
"""The cost function to be minimized"""
# coeffs = [max(x, 0) for x in coeffs]
# s = sum(coeffs)
# coeffs = [x/s for x in coeffs]
print(coeffs)
final_scores = combine_rank_scores(coeffs, vsm_similarity_score, token_matching_score,
fixed_bug_reports_score, semantic_similarity_score,
stack_trace_score)
mrr = []
mean_avgp = []
# print(len(final_scores))
# print(len(final_scores[0]))
# print(sum(sum(final_scores)) /len(final_scores) /len(final_scores[0]))
for i, report in enumerate(bug_reports.items()):
# Finding source files from the simis indices
src_ranks, _ = zip(*sorted(zip(src_files.keys(), final_scores[i]),
key=operator.itemgetter(1), reverse=True))
# Getting reported fixed files
fixed_files = report[1].fixed_files
# Getting the ranks of reported fixed files
relevant_ranks = sorted(src_ranks.index(fixed) + 1
for fixed in fixed_files)
# MRR
min_rank = relevant_ranks[0]
mrr.append(1 / min_rank)
# MAP
mean_avgp.append(np.mean([len(relevant_ranks[:j + 1]) / rank
for j, rank in enumerate(relevant_ranks)]))
# print(datetime.now() - now)
# out = np.mean(mrr) + np.mean(mean_avgp)
# print(coeffs, out)
# return np.mean(mrr) + np.mean(mean_avgp) + sum(sum(final_scores)) /len(final_scores) /len(final_scores[0])
return np.mean(mrr) + np.mean(mean_avgp)
# return np.mean(mean_avgp)
def evaluate(coeffs):
final_scores = combine_rank_scores(coeffs, vsm_similarity_score, token_matching_score,
fixed_bug_reports_score, semantic_similarity_score,
stack_trace_score)
# Writer for the output file
result_file = open('output.csv', 'w')
top_n = (1, 5, 10)
top_n_rank = [0] * len(top_n)
mrr = []
mean_avgp = []
precision_at_n = [[] for _ in top_n]
recall_at_n = [[] for _ in top_n]
f_measure_at_n = [[] for _ in top_n]
for i, (bug_id, report) in enumerate(bug_reports.items()):
# Finding source codes from the simis indices
src_ranks, _ = zip(*sorted(zip(src_files.keys(), final_scores[i]),
key=operator.itemgetter(1), reverse=True))
# Getting reported fixed files
fixed_files = report.fixed_files
# Iterating over top n
for k, rank in enumerate(top_n):
hit = set(src_ranks[:rank]) & set(fixed_files)
# Computing top n rank
if hit:
top_n_rank[k] += 1
# Computing precision and recall at n
if not hit:
precision_at_n[k].append(0)
else:
precision_at_n[k].append(len(hit) / len(src_ranks[:rank]))
recall_at_n[k].append(len(hit) / len(fixed_files))
if not (precision_at_n[k][i] + recall_at_n[k][i]):
f_measure_at_n[k].append(0)
else:
f_measure_at_n[k].append(2 * (precision_at_n[k][i] * recall_at_n[k][i])
/ (precision_at_n[k][i] + recall_at_n[k][i]))
# Getting the ranks of reported fixed files
relevant_ranks = sorted(src_ranks.index(fixed) + 1
for fixed in fixed_files)
# MRR
min_rank = relevant_ranks[0]
mrr.append(1 / min_rank)
# MAP
mean_avgp.append(np.mean([len(relevant_ranks[:j + 1]) / rank
for j, rank in enumerate(relevant_ranks)]))
result_file.write(bug_id + ',' + ','.join(src_ranks) + '\n')
result_file.close()
return (top_n_rank, [x / len(bug_reports) for x in top_n_rank],
np.mean(mrr), np.mean(mean_avgp),
np.mean(precision_at_n, axis=1).tolist(), np.mean(recall_at_n, axis=1).tolist(),
np.mean(f_measure_at_n, axis=1).tolist())
# print(evaluate([0.14719488, 0.13018921, 0.08035653, 0.9556664, 6.26271194]))
num_generations = 100 # Number of generations.
num_parents_mating = 7 # Number of solutions to be selected as parents in the mating pool.
sol_per_pop = 75 # Number of solutions in the population.
num_genes = 5
init_range_low = 0
init_range_high = 1
parent_selection_type = "sss" # Type of parent selection.
keep_parents = 7 # Number of parents to keep in the next population. -1 means keep all parents and 0 means keep nothing.
crossover_type = "single_point" # Type of the crossover operator.
# Parameters of the mutation operation.
mutation_type = "random" # Type of the mutation operator.
mutation_percent_genes = 10 # Percentage of genes to mutate. This parameter has no action if the parameter mutation_num_genes exists or when mutation_type is None.
last_fitness = 0
def callback_generation(ga_instance):
global last_fitness
fitness = ga_instance.best_solution()[1]
print("Generation = {generation}".format(generation=ga_instance.generations_completed))
print("Fitness = {fitness}".format(fitness=fitness))
print("Change = {change}".format(change=fitness - last_fitness))
last_fitness = fitness
# Creating an instance of the GA class inside the ga module. Some parameters are initialized within the constructor.
time1 = datetime.now()
print(time1)
ga_instance = GA.GA(num_generations=num_generations,
num_parents_mating=num_parents_mating,
fitness_func=cost,
sol_per_pop=sol_per_pop,
num_genes=num_genes,
init_range_low=init_range_low,
init_range_high=init_range_high,
parent_selection_type=parent_selection_type,
keep_parents=keep_parents,
mutation_by_replacement=True,
crossover_type=crossover_type,
mutation_type=mutation_type,
mutation_percent_genes=mutation_percent_genes,
mutation_probability=0.1,
callback_generation=callback_generation)
# Running the GA to optimize the parameters of the function.
ga_instance.run()
# After the generations complete, some plots are showed that summarize the how the outputs/fitenss values evolve over generations.
ga_instance.plot_result()
# Returning the details of the best solution.
print(datetime.now() - time1)
solution, solution_fitness, solution_idx = ga_instance.best_solution()
print("Parameters of the best solution : {solution}".format(solution=solution))
print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness))
print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx))
solution = solution / sum(solution)
prediction = evaluate(solution)
print("Predicted output based on the best solution : {prediction}".format(prediction=prediction))
if ga_instance.best_solution_generation != -1:
print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation))
# Saving the GA instance.
filename = 'genetic' # The filename to which the instance is saved. The name is without extension.
ga_instance.save(filename=filename)
# Loading the saved GA instance.
loaded_ga_instance = GA.load(filename=filename)
loaded_ga_instance.plot_result()
|
[
"dat.nv160952@sis.hust.edu.vn"
] |
dat.nv160952@sis.hust.edu.vn
|
ee1d608f14e56fb7d473ff0f02a901191813393b
|
7fe6cfee4ab3c3b8df4f40976b57a93dc1735541
|
/StarMaker/TestSuitePackge/LogInSuite.py
|
810d04558f2f89f64e9bfd0690bdd111c379b0b1
|
[] |
no_license
|
idve/starmaker
|
a86661523503eeac9eec117a54fd09e8fa130daa
|
d14ca76c3a1c7fee51aedd56f1a07d061a00f2fa
|
refs/heads/master
| 2022-08-19T14:53:27.098580
| 2020-05-18T15:05:04
| 2020-05-18T15:05:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,585
|
py
|
# coding=utf-8
import unittest
from StarMaker.Utils.CreateTestReport import CreatTestReporter
Tester = "崔尧椋"
# 登录测试套
class LogInSuite(unittest.TestCase):
# 启动app测试套
@staticmethod
def test_Suite_001_StarUpSuite():
from StarMaker.Action import StartUpModular
# 定义一个测试套
StarUpSuiteTest = unittest.TestSuite()
# 添加测试套模版
StarUpSuiteTest.addTest(unittest.makeSuite(
StartUpModular.StarUpCase)
)
NameFile = "启动app"
Title = "<启动app—P0>自动化测试用例执行结果"
Describe = "启动appP0级用例——共6条"
CreatTestReporter().HTMLReporter(NameFile, Title, Describe, StarUpSuiteTest, Tester)
# 手机号登录测试套
@staticmethod
def test_Suite_002_PhoneLoginSuite():
from TestCase import PhoneLogInCase
PhoneLoginSuiteTest = unittest.TestSuite()
PhoneLoginSuiteTest.addTest(unittest.makeSuite(
PhoneLogInCase.PhoneLogInCase)
)
NameFile = "手机号登录"
T = "<手机号登录—P0>自动化测试用例执行结果"
Des = "手机号登录P0级用例——共17条"
CreatTestReporter().HTMLReporter(NameFile, T, Des, PhoneLoginSuiteTest, Tester)
# 邮箱登录测试套
@staticmethod
def test_Suite_003_EmailLoginSuite():
from StarMaker.Action import LogInModular
EmailLogInSuiteTest = unittest.TestSuite()
EmailLogInSuiteTest.addTest(unittest.makeSuite(
LogInModular.EmailLogInCase)
)
NameFile = "邮箱登录"
T = "<邮箱登录—P0>自动化测试用例执行结果"
Des = "邮箱登录P0级用例——共8条"
CreatTestReporter().HTMLReporter(NameFile, T, Des, EmailLogInSuiteTest, Tester)
# Profile页验证测试套
@staticmethod
def test_Suite_004_ProfileSuite():
from TestCase import ProfileCase
# 定义一个测试套
ProfileSuiteTest = unittest.TestSuite()
# 添加测试套模版
ProfileSuiteTest.addTest(unittest.makeSuite(
ProfileCase.ProfileCase)
)
NameFile = "Profile页"
Title = "<Profile页—P1>自动化测试用例执行结果"
Describe = "Profile页P1级用例——共19条"
CreatTestReporter().HTMLReporter(NameFile, Title, Describe, ProfileSuiteTest, Tester)
# if __name__ == '__main__':
# unittest.main()
# LogInSuite = unittest.TestSuite()
# LogInSuite.addTest(unittest.makeSuite())
|
[
"yaoliang.cui@ushow.media"
] |
yaoliang.cui@ushow.media
|
799e41b356a34fc5f1c698547653b846f6f7b943
|
d8c7c35232d485a511d45852bb362c9ff1a9a0ec
|
/Python/teste.py
|
242449ccab6146448de5983f405a5e6224dcc0f2
|
[] |
no_license
|
GustavoArsenio/deAlunoParaAluno
|
1ff88c9b191ce56b570141b9cbd51c6c781975fd
|
6adfe87a9b6eee4a0f81362d299b5946503e98d8
|
refs/heads/master
| 2020-03-19T07:03:49.627000
| 2019-12-17T01:04:55
| 2019-12-17T01:04:55
| 136,081,626
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,201
|
py
|
# x = int( input("Digite um valor"))
#x = float(input("Digite a primeira nota "))
#y = float(input("Digite a segunda nota") )
#print("É numero? ", x.isnumeric())
#print("É inteiro? ", x.isinterger())
#print("É espaço? ", x.isspace())
#print("É só letra? ", x.isalpha())
#print("É alfanumerico? ",x.isalnum())
#print("Esta em Maiuscula? ", x.isupper())
#print("Esta em minuscula? ", x.islower())
#print("Esta captalizada?", x.istitle())
#print("O valor tem como dobro {}, tem como triplo {} e tem como raiz {}".format(x*2,x*3, math.sqrt(x)))
#print("A medida de {} e {} é {}".format(x,y,(x+y)/2))
#x = float( input("Digite a quantidade em metros: "))
#print("{} mm".format(x*1000))
##print("{} cm".format(x*100))
#print("{} dm".format(x/10))
#print("{} km".format(x/1000))
#altura = float(input("Digite a altura da parede: "))
#largura = float(input("Digite a largura da parede: "))
#print("A sua parede tem dimensão de {} por {} e a area é: {}".format(altura,largura,altura*largura))
#print("Serão necessarias {} latas de tinta".format(altura*largura/2))
#valor = float(input("Digite o preço do produto: "))
#print("O valor do produto com 5'%' de desconto é: {}".format( valor - (valor/100*5)))
#salario = float(input("Digite o salario do funcionario: "))
#print("O salario atual sera de R${:.2f} ".format(salario+salario/100*15 ))
#km = float(input("Quantos km rodados? "))
#dias = float(input("Quantos dias?"))
#print("R${}".format( (km*0.15) + (dias*60) ))
#salario = float( input("Digite seu salario: ") )
#if salario <= 1250 :
# novo = salario+salario*15/100
#else :
# novo = salario+ salario*10/100
# *** Esta dando errado ***
# import math
# x = input("Digite o angulo: ")
# print('Seno: {}'.format( math.sin( math.radians( x ) ) ) )
import math
#x = float(input("Digite um valor float: "))
#print("A parte inteira é: {}".format( math.floor(x) ) )
#cat = float( input ("Digite o comprimento do primeiro cateto: ") )
#cat2 = float( input ("Digite o comprimento do segundo cateto: ") )
#print("A hipotenusa do triangulo retangulo é: {:.2f}".format( math.sqrt( cat2**2+cat**2 ) ) )
#angulo = float( input( "Digite o valor do angulo: " ) )
#print("O seno de {} é {:.2f}".format( angulo ,math.sin( math.radians(angulo) ) ))
#print("O cosseno de {} é {:.2f}".format( angulo ,math.cos( math.radians(angulo) ) ))
#print("O tangente de {} é {:.2f}".format( angulo ,math.tan( math.radians(angulo) ) ))
# import random
#alunos = [ input("Digite o nome do 1º aluno: ") , input("Digite o 2º aluno: "), input("Digite o 3º aluno: "), input("Digite o 4º aluno: ") ]
#random.shuffle(alunos)
#print("O aluno escolhido foi: {}".format( random.choice(alunos) ))
#print("A ordem será:")
#print(alunos)
# frase = str( input("Digite uma string: ") )
#print("O tamanaho da string é: {}".format( len(frase) ) )
#contar = str( input("Digite uma string para contar: ") )
#print("A letra {} incide {}x".format( contar ,frase.count(contar) ))
# Error procurar = str( input("Digite uma string para procura: ") )
# Error print("A string começa na posição: {}".format( frase.find('curso') )
#print("tem a string? {} ".format( 'curso' in frase))
|
[
"gustavo.arsenio.sousa@Hotmail.com"
] |
gustavo.arsenio.sousa@Hotmail.com
|
5b294355d75b411aad4ae279c9563bf3590c2325
|
340211a59f6fa930e500b6f97e7000fd312ce677
|
/calc_deriv/110z-concat_xisob_heat_melting_tseries.py
|
282b15034f87531f10265636d8d5a3c86aa49a70
|
[
"MIT"
] |
permissive
|
ChanJeunlam/AntarcticaCircumpolarIntrusions
|
a755098dc4dbb7758e2f559e37677474be23fabd
|
55018cdd9bb79c71d6393a34ce2237ce49918a61
|
refs/heads/master
| 2022-01-20T17:47:22.608854
| 2018-10-02T03:04:57
| 2018-10-02T03:04:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,693
|
py
|
# -*- coding: utf-8 -*-
#
# Description: Concatenate *npz files with the chunks
# of the heat transport time series.
#
# Author: André Palóczy Filho
# E-mail: paloczy@gmail.com
# Date: December/2017
import numpy as np
from glob import glob
from datetime import datetime
from reproducibility import savez
isob = 1000
# fnameglob = 'Tfmin_tseries%dm_????-????.npz'%isob
# fname_concat = 'Tfmin_tseries%dm.npz'%isob
fnameglob = 'hflxmelt_tseries%dm_????-????.npz'%isob
fname_concat = 'hflxmelt_tseries%dm.npz'%isob
# In [81]: Tfmins.min()
# Out[81]: -2.638742253002648
# aux_vars = []
# concat_vars = ['t', 'Tfmins']
aux_vars = ['d', 'i', 'j', 'y', 'x', 'z', 'Tf0']
concat_vars = ['t', 'Ux', 'Qm', 'SHT', 'UQx', 'UQxe', 'UQxm', 'UQxm_100m', 'UQxe_100m', 'UQxm_100m_700m', 'UQxe_100m_700m', 'UQxm_700m_1000m', 'UQxe_700m_1000m', 'UQxm_circ', 'UQxe_circ']
fnames = glob(fnameglob)
fnames.sort()
for avar in aux_vars:
v = np.load(fnames[0])[avar]
vars().update({avar:v})
for cvar in concat_vars:
skel = None
for fname in fnames: # Get the variable from all files.
v = np.load(fname)[cvar]
if v.ndim==1:
if skel is not None:
skel = np.hstack((skel, v))
else:
skel = v
else:
if skel is not None:
skel = np.vstack((skel, v))
else:
skel = v
vars().update({cvar:skel})
# Make sure months are centered on the 15th.
t = np.array([datetime(ti.year, ti.month, 15) for ti in t.tolist()])
ds = dict(t=None)
allvars = aux_vars + concat_vars
for k in allvars:
ds.update({k:vars()[k]})
savez(fname_concat, **ds)
|
[
"paloczy@gmail.com"
] |
paloczy@gmail.com
|
79740de4f30d468f2532d89dbfa91de323b563f4
|
868b9a336965184b28aa9a809145b0d2a693b662
|
/part4/4.31_seaborn_count.py
|
98c6d08f93e10049e8b41aade75bfe6a6ef132a5
|
[] |
no_license
|
gregoriusdev/Infopub_Pandas_okkam76
|
3f0f43652154602f9193daa8ba681d6190e29bb6
|
8480dd6332da6bf6fd19ae4b91bc5801d469c798
|
refs/heads/master
| 2022-11-18T08:48:36.561939
| 2020-07-20T02:37:12
| 2020-07-20T02:37:12
| 280,313,525
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,019
|
py
|
# -*- coding: utf-8 -*-
# 라이브러리 불러오기
import matplotlib.pyplot as plt
import seaborn as sns
# Seaborn 제공 데이터셋 가져오기
titanic = sns.load_dataset('titanic')
# 스타일 테마 설정 (5가지: darkgrid, whitegrid, dark, white, ticks)
sns.set_style('whitegrid')
# 그래프 객체 생성 (figure에 3개의 서브 플롯을 생성)
fig = plt.figure(figsize=(15, 5))
ax1 = fig.add_subplot(1, 3, 1)
ax2 = fig.add_subplot(1, 3, 2)
ax3 = fig.add_subplot(1, 3, 3)
# 기본값
sns.countplot(x='class', palette='Set1', data=titanic, ax=ax1)
# hue 옵션에 'who' 추가
sns.countplot(x='class', hue='who', palette='Set2', data=titanic, ax=ax2)
# dodge=False 옵션 추가 (축 방향으로 분리하지 않고 누적 그래프 출력)
sns.countplot(x='class', hue='who', palette='Set3', dodge=False, data=titanic, ax=ax3)
# 차트 제목 표시
ax1.set_title('titanic class')
ax2.set_title('titanic class - who')
ax3.set_title('titanic class - who(stacked)')
plt.show()
|
[
"pungjusa@hotmail.com"
] |
pungjusa@hotmail.com
|
e21ae503aee3d9d8dc44fddb5ad53587c7e6223f
|
ac65b1182091c8a630036e89b95f340a6a19147f
|
/companies/migrations/0016_auto_20150626_1528.py
|
2c5bc9bfcf8ee5e33a29ef814bacbcf02bfa7ca3
|
[] |
no_license
|
nokafor/prism
|
4a72499cd4fda8f555c162b713c83e949ab40e21
|
47f547fc848aa3763b98b8ac55d8c9ec8377b371
|
refs/heads/master
| 2016-08-07T11:37:02.098154
| 2015-09-24T20:17:30
| 2015-09-24T20:17:30
| 34,232,112
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 590
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('companies', '0015_remove_rehearsal_is_scheduled'),
]
operations = [
migrations.AlterModelOptions(
name='company',
options={'ordering': ['name']},
),
migrations.AddField(
model_name='company',
name='logo',
field=models.ImageField(default=1, upload_to=b'companies'),
preserve_default=False,
),
]
|
[
"nokafor@princeton.edu"
] |
nokafor@princeton.edu
|
38d2550ded5b9e90ade280fd154c84d20c0e92e2
|
b24d6a323ac7927e38f82749e1b4ae42cb5a2465
|
/randomwalker.py
|
3fd248c3bfb8ef7170eccf263247961f94b44afc
|
[] |
no_license
|
nickkreissler/fun
|
0f9bdfd62c3a2cafc9437128e5014ab919ff0b99
|
950db26db060a9f2482595ab0d7c5d84205cab08
|
refs/heads/master
| 2021-09-01T17:40:23.105819
| 2017-12-28T03:49:05
| 2017-12-28T03:49:05
| 115,582,320
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,206
|
py
|
def manhatten(int,inte):
l = getgrid(int,inte)
x=0
y_position = 0
x_position = 0
l1 = ['y_position', 'x_position']
l2= [1,-1]
while(x < 10000):
x+=1
y = random.choice(l1)
z = random.choice(l2)
if y == 'y_position':
if y_position == 0 and z == -1:
y_position = int-1
l[y_position][x_position]+=1
elif y_position == int-1 and z == 1:
y_position = 0
l[y_position][x_position] = 1
else:
y_position += z
l[y_position][x_position] = 1
if y == 'x_position':
if x_position == 0 and z == -1:
x_position = inte - 1
l[y_position][x_position]=1
elif x_position == inte - 1 and z == 1:
x_position = 0
l[y_position][x_position] = 1
else:
x_position += z
l[y_position][x_position] = 1
for i in l:
print('{}'.format(l[i]))
print('\n')
def getgrid(x,y):
l = {}
for i in range(x):
l[i] = []
for z in range(y):
l[i] += [0]
return l
|
[
"nkreissler@invisigo.org"
] |
nkreissler@invisigo.org
|
b420a3ada2d2d1084035ded8fedf94fab11c7246
|
6f05f7d5a67b6bb87956a22b988067ec772ba966
|
/data/test/python/c6ac49828b465d81fdafb56e8a05c0177a7ec6c2HelloZigguratApiView.py
|
c6ac49828b465d81fdafb56e8a05c0177a7ec6c2
|
[
"MIT"
] |
permissive
|
harshp8l/deep-learning-lang-detection
|
93b6d24a38081597c610ecf9b1f3b92c7d669be5
|
2a54293181c1c2b1a2b840ddee4d4d80177efb33
|
refs/heads/master
| 2020-04-07T18:07:00.697994
| 2018-11-29T23:21:23
| 2018-11-29T23:21:23
| 158,597,498
| 0
| 0
|
MIT
| 2018-11-21T19:36:42
| 2018-11-21T19:36:41
| null |
UTF-8
|
Python
| false
| false
| 786
|
py
|
# HelloZigguratApiView.py
# (C)2013
# Scott Ernst
from ziggurat.view.api.ApiRouterView import ApiRouterView
#___________________________________________________________________________________________________ HelloZigguratApiView
class HelloZigguratApiView(ApiRouterView):
"""A class for..."""
#===================================================================================================
# C L A S S
#___________________________________________________________________________________________________ __init__
def __init__(self, request, **kwargs):
"""Creates a new instance of HelloZigguratApiView."""
super(HelloZigguratApiView, self).__init__(request, **kwargs)
|
[
"aliostad+github@gmail.com"
] |
aliostad+github@gmail.com
|
2341f1c9dc3b1cd134f884291570972dbc1c681b
|
a2264d085481d7efc7ed2b2edbe58947ea9229fd
|
/generator/generate.py
|
aa8d5b80843389fccf91068e1871d11696c7f34a
|
[] |
no_license
|
TestSubjector/QuadTreeMeshSolver
|
cd0d3d34e98fbc1ef0e199e0f80691821a49406d
|
d3b069c77dde1793c4b946607bcc341ff833dd5e
|
refs/heads/master
| 2023-07-21T09:43:43.329959
| 2023-06-06T17:52:13
| 2023-06-06T17:52:13
| 120,038,698
| 0
| 0
| null | 2023-07-06T21:41:15
| 2018-02-02T22:34:42
|
Python
|
UTF-8
|
Python
| false
| false
| 4,436
|
py
|
import argparse
import load
import boundary
import misc
import logging
log = logging.getLogger(__name__)
log.addHandler(logging.StreamHandler())
import sys, os
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
from core import core
def main():
# Command Line Arguments
parser = argparse.ArgumentParser()
parser.add_argument("-n", "--neighbour", const=str, nargs="?")
parser.add_argument("-w", "--wall", nargs="+")
args = parser.parse_args()
log.info("Loading Data")
log.debug("Arguments Set")
log.debug(args)
try:
file1 = open(args.neighbour or "neighbour.txt", "r")
data = file1.read()
file1.close()
except Exception as error:
log.critical(error)
exit()
data = data.replace("\t", " ")
data = data.split("\n")
data.pop(0) # Pops the first blank line
core.setPrefix()
wallarg = args.wall
wallpoints = []
hashtable = {}
globaldata = []
log.info("Found " + str(len(wallarg)) + " wall geometry files.")
try:
bsplineWallData = dict(core.load_obj("wall"))
except IOError:
bsplineWallData = "None"
for idx,itm in enumerate(wallarg):
log.info("Loading Geometry " + str(itm))
file2 = open(str(itm) or "airfoil_160.txt", "r")
geometrydata = file2.read()
file2.close()
geometrydata = geometrydata.split("\n")
geometrydataOrg = load.wallFloat(geometrydata)
if bsplineWallData != "None":
insertionKeys = list(bsplineWallData.keys())
for itm in insertionKeys:
postInsert = True
itm2 = itm
if "pre" in itm2:
itm2 = itm2.replace("pre","")
postInsert = False
itmx = float(itm2.split(",")[0])
itmy = float(itm2.split(",")[1])
itmCheck = str(itmx) +"\t" + str(itmy)
resultMan,insertionidx = load.checkIfInside(itmx,itmy,geometrydata,geometrydataOrg,bsplineWallData)
if resultMan:
ptsToBeAdded = load.getItem(bsplineWallData,itm)
ptsToBeAdded = sorted(ptsToBeAdded,key = lambda point: load.distance_squared(itmx,itmy,point[0],point[1]),reverse=postInsert)
for ptCordItm in ptsToBeAdded:
dataInsert = str(ptCordItm[0]) + "\t" + str(ptCordItm[1])
if postInsert == True:
geometrydata.insert(insertionidx + 1,dataInsert)
else:
geometrydata.insert(insertionidx,dataInsert)
hashtable, wallpointsdata, globaldata = load.loadWall(geometrydata,hashtable,globaldata,idx + 1)
wallpoints.append(wallpointsdata)
log.info("Loading Interior and Outer Points")
hashtable, globaldata = load.loadInterior(data, hashtable, globaldata, len(hashtable))
globaldata = misc.cleanNeighbours(globaldata)
hashtable, globaldata = boundary.detectOuter(hashtable, globaldata)
globaldata = misc.generateReplacement(hashtable, globaldata)
core.setKeyVal("globaldata", globaldata)
with open("preprocessorfile.txt", "w") as text_file:
for item1 in globaldata:
text_file.writelines(["%s " % item for item in item1])
text_file.writelines("\n")
log.info("Preprocessor File Generated")
log.info("Done")
if __name__ == "__main__":
import logging
import os
import json
import logging.config
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
from core import core
default_path='logging.json'
path = default_path
level = core.getConfig()["global"]["logger"]["level"]
if level == "DEBUG":
level = logging.DEBUG
elif level == "INFO":
level = logging.INFO
elif level == "WARNING":
level = logging.WARNING
elif level == "ERROR":
level = logging.ERROR
else:
level = logging.WARNING
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(level=level,filename=core.getConfig()["global"]["logger"]["logPath"],format="%(asctime)s %(name)s %(levelname)s: %(message)s", datefmt="%m/%d/%Y %I:%M:%S %p")
main()
|
[
"NischayPro@gmail.com"
] |
NischayPro@gmail.com
|
556becf9474a1e70401b1bff5d9df9454d58084d
|
60a405684f1765f1bd33d7ad4edc80fea407f390
|
/CIFAR/train.py
|
a67658bbbc8061073c3021724e0aecb6d60cd9ea
|
[] |
no_license
|
qitianwu/SteinBridging
|
5dae3c13efcc47985c57737820575f74f902e94b
|
6fbccee4fe6053373c68dc200eab60e3cccf6a9a
|
refs/heads/master
| 2022-04-08T17:43:24.111570
| 2020-02-20T16:37:13
| 2020-02-20T16:37:13
| 241,910,946
| 6
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,181
|
py
|
import os
import numpy as np
from util import *
from model import *
from load import *
import pickle
from datetime import datetime
import time
import logging
logging.basicConfig(level=logging.INFO, filename='new_max_tanh.log', format='%(message)s')
flags = tf.app.flags
flags.DEFINE_string("gpus", "0", "gpus")
FLAGS = flags.FLAGS
print(FLAGS.gpus)
os.environ["CUDA_VISIBLE_DEVICES"] = FLAGS.gpus
from metrics.metric import *
n_epochs = 500
lr_gan = 0.0002
lr_est = 0.0002
batch_size = 100
image_shape = [32,32,3]
dim_z = 100
dim_W1 = 256
dim_W2 = 128
dim_W3 = 64
dim_channel = 3
lambda_p = 100
n_critic = 1
visualize_dim=100
trX = cifar10()
dcgan_model = DCGAN(
batch_size=batch_size,
image_shape=image_shape,
dim_z=dim_z,
dim_W1=dim_W1,
dim_W2=dim_W2,
dim_W3=dim_W3,
lambda_p=lambda_p
)
Z_tf, image_tf, dis2gen_cost_tf, gen2dis_cost_tf, gen2est_cost_tf, est2real_cost_tf, \
est2gen_cost_tf, l2, p_real, p_gen = dcgan_model.build_model()
L = tf.placeholder(tf.float32, shape=None)
dis_vars = filter(lambda x: x.name.startswith('dis'), tf.trainable_variables())
dis2gen_vars = filter(lambda x: x.name.startswith('dis2gen'), tf.trainable_variables())
gen_vars = filter(lambda x: x.name.startswith('gen'), tf.trainable_variables())
dis2est_vars = filter(lambda x: x.name.startswith('dis2est'), tf.trainable_variables())
est_vars = filter(lambda x: x.name.startswith('est'), tf.trainable_variables())
dis2gen_vars = [i for i in dis2gen_vars] + [i for i in dis_vars]
dis2est_vars = [i for i in dis2est_vars] + [i for i in dis_vars]
gen_vars = [i for i in gen_vars]
est_vars = [i for i in est_vars]
est_cost_tf = est2real_cost_tf + L*est2gen_cost_tf
dis2est_cost_tf = -est_cost_tf #+ l2
gen2est_cost_tf = L*est2gen_cost_tf
train_op_dis2gen = tf.train.AdamOptimizer(lr_gan, beta1=0.5).minimize(dis2gen_cost_tf, var_list=dis2gen_vars)
train_op_dis2est = tf.train.AdamOptimizer(lr_est, beta1=0.9).minimize(dis2est_cost_tf , var_list=dis2est_vars)
train_op_gen2dis = tf.train.AdamOptimizer(lr_gan, beta1=0.5).minimize(gen2dis_cost_tf, var_list=gen_vars)
train_op_gen2est = tf.train.AdamOptimizer(lr_est, beta1=0.9).minimize(gen2est_cost_tf, var_list=gen_vars)
train_op_est = tf.train.AdamOptimizer(lr_est, beta1=0.9).minimize(est_cost_tf, var_list=est_vars)
Z_tf_sample, image_tf_sample = dcgan_model.samples_generator(batch_size=visualize_dim)
model_var = [var for var in tf.trainable_variables() if var.name.startswith("dis2gen")] \
+ [var for var in tf.trainable_variables() if var.name.startswith("dis2est")] \
+ [var for var in tf.trainable_variables() if var.name.startswith("gen")] \
+ [var for var in tf.trainable_variables() if var.name.startswith("est")]
it = '0'
start_epoch = int(it)
sess = tf.Session()
saver = tf.train.Saver(model_var)
sess.run(tf.global_variables_initializer())
#saver.restore(sess, save_path='./model1/model_'+it)
Z_np_sample = np.random.uniform(-1, 1, size=(visualize_dim,dim_z))
step = 100
n_sample = trX.shape[0]
iterations = start_epoch * n_sample / batch_size
start_time = datetime.now()
k = 5
for epoch in range(start_epoch, n_epochs):
index = np.arange(n_sample)
np.random.shuffle(index)
trX = trX[index]
for start, end in zip(
range(0, n_sample, batch_size),
range(batch_size, n_sample, batch_size)
):
Image = trX[start:end]
Xs = Image.reshape( [-1, 32, 32, 3]) / 255.
Xs = 2*Xs - 1
Zs = np.random.uniform(-1, 1, size=[batch_size, dim_z]).astype(np.float32)
n_iter = n_epochs * n_sample // batch_size
l = min(4*iterations / n_iter, 1)
i_critic = 0
while i_critic < n_critic:
_, dis2gen_loss_val = sess.run(
[train_op_dis2gen, dis2gen_cost_tf],
feed_dict={
Z_tf:Zs,
image_tf:Xs
})
i_critic += 1
_, gen2dis_loss_val = sess.run(
[train_op_gen2dis, gen2dis_cost_tf],
feed_dict={
Z_tf:Zs,
})
if iterations % k ==0:
i_critic = 0
while i_critic < n_critic:
_, dis2est_loss_val = sess.run(
[train_op_dis2est, dis2est_cost_tf],
feed_dict={
Z_tf:Zs,
image_tf:Xs,
L:l
})
i_critic += 1
_, est_loss_val = sess.run(
[train_op_est, est_cost_tf],
feed_dict={
Z_tf:Zs,
image_tf:Xs,
L:l
})
_, gen2est_loss_val = sess.run(
[train_op_gen2est, gen2est_cost_tf],
feed_dict={
Z_tf:Zs,
L:l
})
Wgr_loss, Sre_loss, Sge_loss = sess.run([dis2gen_cost_tf, est2real_cost_tf, est2gen_cost_tf], \
feed_dict={Z_tf:Zs, image_tf:Xs})
p_real_val, p_gen_val = sess.run([p_real, p_gen], feed_dict={Z_tf:Zs, image_tf:Xs})
iterations += 1
if np.mod(iterations, step) == 0:
Wgr_loss_f = '{:.4f}'.format(float(Wgr_loss))
Sre_f = '{:.4f}'.format(float(Sre_loss))
Sge_f = '{:.4f}'.format(float(Sge_loss))
cost_time = str((datetime.now() - start_time) / (iterations+1) * (n_iter - iterations)).split('.')[0]
log = "Iter {:<6}: Wgr_loss {:<6} Sre_loss {:<6} Sge_f {:<6} (left: {})".\
format(iterations, Wgr_loss_f, Sre_f, Sge_f, cost_time)
logging.info(log)
print(log)
if epoch % 10 == 0:
generated_samples = sess.run(
image_tf_sample,
feed_dict={
Z_tf_sample:Z_np_sample
})
generated_samples = (generated_samples + 1.)/2.
save_visualization(generated_samples, (10,10), save_path='./vis/sample_%04d.jpg' % int(epoch))
#save_visualization(Image, (10,10), save_path='./vis/true.jpg')
saver = tf.train.Saver(model_var)
saver.save(sess, save_path='./model/model_%03d' % epoch, write_meta_graph=False)
print("====epoch {} finishes and model saved====".format(epoch))
Image = np.zeros([len(trX), 32, 32, 3])
for start, end in zip(
range(0, len(trX)-batch_size, batch_size),
range(batch_size, len(trX), batch_size)
):
Zs = np.random.uniform(-1, 1, size=[batch_size, dim_z]).astype(np.float32)
generated_samples = sess.run(image_tf_sample, feed_dict={Z_tf_sample:Zs})
generated_samples = (generated_samples + 1.)/2. * 255
Image[start:end, :, :, :] = generated_samples
Image = Image.transpose([0, 3, 1, 2])
IS_mean, IS_std = inception_score(Image)
log = 'Epoch {} IS score: {}'.format(epoch, IS_mean)
logging.info(log)
print(log)
if epoch > 490:
saver = tf.train.Saver(model_var)
saver.save(sess, save_path='./model1/model_%03d' % epoch, write_meta_graph=False)
print("====epoch {} finishes and model saved====".format(epoch))
Image = np.zeros([len(trX), 32, 32, 3])
for start, end in zip(
range(0, len(trX)-batch_size, batch_size),
range(batch_size, len(trX), batch_size)
):
Zs = np.random.uniform(-1, 1, size=[batch_size, dim_z]).astype(np.float32)
generated_samples = sess.run(image_tf_sample, feed_dict={Z_tf_sample:Zs})
generated_samples = (generated_samples + 1.)/2. * 255
Image[start:end, :, :, :] = generated_samples
Image = Image.transpose([0, 3, 1, 2])
IS_mean, IS_std = inception_score(Image)
log = 'Epoch {} IS score: {}'.format(epoch, IS_mean)
logging.info(log)
print(log)
|
[
"echo740@sjtu.edu.cn"
] |
echo740@sjtu.edu.cn
|
03e11eec763f1734c24e88054105c9cbe77a36d8
|
d7e875366f9ad9c97dfe84bce37b604f9b073746
|
/momo/manage.py
|
71dd5dcd8db20c43f542b40520d82fd81bdda3ff
|
[] |
no_license
|
thp101/Me
|
3a0a191396e8ae022a395f986da09adf231e5b9a
|
8a920a98afe407ff5a427c34d47386f718039a71
|
refs/heads/master
| 2020-05-27T00:23:42.325976
| 2019-05-24T13:25:11
| 2019-05-24T13:25:11
| 188,423,719
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 645
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'momo.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
7cce7640c1b48f684c3e43713bb520c2e7da36da
|
009bd2e689b1ca8762bb337c8c24cd618738f7d2
|
/etl.py
|
353ec55ec2bd4a6134551450582fc5867150e1a3
|
[] |
no_license
|
HKasie/Data-Modelling-with-Postgres
|
41f38b41a504530426051985c65c44f81e1af4c4
|
ca0f680b340dc0b24ecfb83da1a0079b5c04f5ad
|
refs/heads/master
| 2022-11-20T13:39:14.339041
| 2020-07-27T15:12:22
| 2020-07-27T15:12:22
| 274,392,116
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,567
|
py
|
import os
import glob
import psycopg2
import pandas as pd
from sql_queries import *
def process_song_file(cur, filepath):
"""
Processes the song file using the filepath argument provided.
It extracts the information on songs to be stored in the songs table.
Also extracts the information on artists to be stored in the artist table.
Inputs
cur: postgres cursor
filepath: the filepath to directory of the song file
Returns:
None
"""
#open song file
df = pd.read_json(filepath, lines=True)
# insert song record
song_data = df[['song_id', 'title', 'artist_id', 'year', 'duration']].values[0]
cur.execute(song_table_insert, song_data)
# insert artist record
artist_data = df[['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude']].values[0]
cur.execute(artist_table_insert, artist_data)
def process_log_file(cur, filepath):
"""
Processes the log file using the filepath argument provided.
Extracts datetime information to be stored in the time table.
Extracts songs played to be stored in songplay table.
Song ID and Artist ID are required to get songs played.
Song ID and Artist ID are are not include in logfile.
Use song_select query in sql_queries.py to get song ID and artist ID.
Inputs:
cur: postgres cursor
filepath: the filepath to directory of the log file
Returns:
None
"""
# open log file
df = pd.read_json(filepath, lines=True)
# filter by NextSong action
df = df[(df['page'] == 'NextSong')]
# convert timestamp column to datetime
t = pd.to_datetime(df['ts'], unit='ms')
df['ts'] = pd.to_datetime(df['ts'], unit='ms')
# insert time data records
time_data = pd.concat([df['ts'],
t.dt.hour,
t.dt.day,
t.dt.week,
t.dt.month,
t.dt.year,
t.dt.weekday],
axis = 1)
column_labels = ('timestamp', 'hour', 'day', 'week of year', 'month', 'year', 'weekday')
time_df = pd.DataFrame(data = time_data.values, columns = column_labels)
for i, row in time_df.iterrows():
cur.execute(time_table_insert, list(row))
# load user table
user_df = df[['userId', 'firstName', 'lastName', 'gender', 'level']]
# insert user records
for i, row in user_df.iterrows():
cur.execute(user_table_insert, row)
# insert songplay records
for index, row in df.iterrows():
# get songid and artistid from song and artist tables
cur.execute(song_select, (row.song, row.artist, row.length))
results = cur.fetchone()
if results:
songid, artistid = results
else:
songid, artistid = None, None
# insert songplay record
songplay_data = ( pd.to_datetime(row.ts, unit='ms'),
int(row.userId),
row.level,
songid,
artistid,
row.sessionId,
row.location,
row.userAgent)
cur.execute(songplay_table_insert, songplay_data)
def process_data(cur, conn, filepath, func):
"""
This funtion processes sparkify data
Inputs:
cur: postgres cursor
conn: postgres connection
filepath: file path to the directory of files to be processed
func: this is the name of the function for processing file types
Returns:
None
"""
# get all files matching extension from directory
all_files = []
for root, dirs, files in os.walk(filepath):
files = glob.glob(os.path.join(root,'*.json'))
for f in files :
all_files.append(os.path.abspath(f))
# get total number of files found
num_files = len(all_files)
print('{} files found in {}'.format(num_files, filepath))
# iterate over files and process
for i, datafile in enumerate(all_files, 1):
func(cur, datafile)
conn.commit()
print('{}/{} files processed.'.format(i, num_files))
def main():
conn = psycopg2.connect("host=127.0.0.1 dbname=sparkifydb user=student password=student")
cur = conn.cursor()
process_data(cur, conn, filepath='data/song_data', func=process_song_file)
process_data(cur, conn, filepath='data/log_data', func=process_log_file)
conn.close()
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
noreply@github.com
|
394e6f5b241413cc9c799014999c198da05585c9
|
26db935ce71c638a3ac34832b71a72dfd704af3e
|
/task/urls.py
|
445946276e4572c2384ac7ccbaa14e8c5604dd10
|
[
"Apache-2.0"
] |
permissive
|
suvajitsarkar/taskManagement
|
b3bbd9e56e36b9bf1e98954447663884b68ec5b2
|
0054c20fba8dd8eb3c4c83abdded8fc778a8b62b
|
refs/heads/master
| 2023-05-14T05:42:58.636676
| 2020-06-07T16:15:12
| 2020-06-07T16:15:12
| 270,276,881
| 0
| 0
|
Apache-2.0
| 2021-06-10T23:00:14
| 2020-06-07T10:55:59
|
Python
|
UTF-8
|
Python
| false
| false
| 436
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('ajax/get_data', views.get_data, name='get_data'),
path('create', views.create_tasks, name='create'),
path('view_tasks', views.view_tasks, name='view_tasks'),
path('<int:pk>/update', views.UpdateTasks.as_view(), name='update_tasks'),
path('view/<int:pk>', views.audit_detail_view, name='view_update')
]
|
[
"suvajit.sarkar@eternalrobotics.com"
] |
suvajit.sarkar@eternalrobotics.com
|
098b72e8cc03f6c1285665813d2e46094e10bc40
|
11b65941fe14f8f23d798b50f6d45487fd0ab234
|
/factorial.py
|
6a2ee8976b4f27a67828f9accc23354d5d3e3636
|
[] |
no_license
|
saahndongransom/hello-world
|
48426cf94f2511de473dbfd0215484ccdec6fed8
|
c148a6ee47dc7d6b8c1a7d1f4d9b13054685e69f
|
refs/heads/master
| 2021-05-21T13:00:25.865901
| 2020-04-06T16:51:15
| 2020-04-06T16:51:15
| 252,661,669
| 0
| 0
| null | 2020-04-03T07:47:48
| 2020-04-03T07:28:16
| null |
UTF-8
|
Python
| false
| false
| 174
|
py
|
def factorial(n):
if n == 0:
return 1
else:
return n * factorial(n-1)
n=int(input("enter a number to compute the factiorial : "))
print(factorial(n))
|
[
"saahndongransom@gmail.com"
] |
saahndongransom@gmail.com
|
b4b174a311285bd98e728124a6b9d6eea7a0b1e0
|
255ef10344981ae49174a7eb7278997819b441ec
|
/marketgrab/migrations/0017_movement_series.py
|
22172155eaf594e4fa2d75f58c50913570a31309
|
[
"MIT"
] |
permissive
|
colinmcglone/window-time
|
2f60b8bc37079876e76969011fc0dcb836b40eb7
|
74ed90440b9bb93fa569534c7557972242569d3a
|
refs/heads/master
| 2021-01-10T13:12:31.778070
| 2016-03-17T15:04:40
| 2016-03-17T15:04:40
| 49,010,233
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 492
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-14 16:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('marketgrab', '0016_movement'),
]
operations = [
migrations.AddField(
model_name='movement',
name='series',
field=models.CharField(default='n', max_length=10),
preserve_default=False,
),
]
|
[
"me@colinmcglone.ca"
] |
me@colinmcglone.ca
|
b1d3a9e6deb618f941b95fd15af5b73c26f2e958
|
1a4ca76087ae580b827a6455f483f4e6cc957a99
|
/shadow_program_inversion/experiments/contact/urscript/train_prior.py
|
14c3f6cc25dc91170c3de41b164bfe0730115696
|
[] |
no_license
|
benjaminalt/shadow-program-inversion
|
7170b4406f7ef605bf2fb25c418b57001a6723f9
|
05649f76a941092b3c2a829427b3dc980f712761
|
refs/heads/master
| 2023-03-22T22:54:13.396265
| 2021-03-11T11:09:19
| 2021-03-11T11:11:55
| 345,314,773
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 949
|
py
|
"""
Copyright (C) 2021 ArtiMinds Robotics GmbH
"""
import json
import os
from argparse import ArgumentParser
from shadow_program_inversion.priors.neural_prior import NeuralPrior
import shadow_program_inversion.utils.config as cfg
def main(args):
model_config_path = os.path.join(cfg.REPO_DIR, "shadow_program_inversion", "model", "config",
"autoregressive_small.json")
data_dir = os.path.join(cfg.DATA_DIR, "urscript", args.material, f"sim_train")
output_dir = os.path.join(cfg.TRAINED_MODELS_DIR, "urscript", args.material)
with open(model_config_path) as model_config_file:
model_config = json.load(model_config_file)
sim = NeuralPrior("Move Linear", 7 + 2, model_config)
sim.train(data_dir, output_dir)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument("material", type=str, choices=["foam", "pcb", "rubber"])
main(parser.parse_args())
|
[
"benjamin_alt@outlook.com"
] |
benjamin_alt@outlook.com
|
13b91b37dcfd67c4679d0faa3747a8011ef4b129
|
38d68c141e31d5c11ed2e471e49641581c7d13e3
|
/app.py
|
a933cadf372132cd13964335bbbe71a2dd6dc0a7
|
[
"MIT"
] |
permissive
|
SurabhiSuresh22/Iris_Flower_Prediction
|
4883f7e342da76c9da8bfd9764da6d078d75e7d5
|
b47bdbefc7f4c35b575572f3d5b8c06aa3e32be8
|
refs/heads/master
| 2022-12-25T07:15:49.379116
| 2020-10-06T18:32:51
| 2020-10-06T18:32:51
| 299,653,120
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 612
|
py
|
from flask import Flask, render_template, request
import pickle
import numpy as np
model = pickle.load(open('iris.pkl', 'rb'))
app = Flask(__name__)
@app.route('/')
def man():
return render_template('home.html')
@app.route('/predict', methods=['POST'])
def home():
data1 = request.form['a']
data2 = request.form['b']
data3 = request.form['c']
data4 = request.form['d']
arr = np.array([[data1, data2, data3, data4]])
pred = model.predict(arr)
return render_template('after.html', data=pred)
if __name__ == "__main__":
app.run(debug=True)
|
[
"noreply@github.com"
] |
noreply@github.com
|
4756f1ab9f395d38c2dc002023bc87b08d00c0ce
|
fffda6e06cb979e83db15e9142db7c9994400e2f
|
/language/bert_extraction/steal_bert_qa/utils/evaluate_squad.py
|
ea9eef5d7871c98ee1bf95b0138c2334ed1dfee8
|
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
ZachT1711/language
|
b48e551555a41bf269cc20f22871a9e4c47aacc9
|
de84080fc8a239a7271aad1d447fcb38a895790b
|
refs/heads/master
| 2023-01-05T21:36:59.194404
| 2020-04-14T17:19:25
| 2020-04-14T17:30:20
| 250,185,870
| 1
| 0
|
Apache-2.0
| 2022-12-23T20:28:38
| 2020-03-26T07:09:01
|
Python
|
UTF-8
|
Python
| false
| false
| 6,410
|
py
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Official evaluation script for version 1.1 of the SQuAD dataset."""
from __future__ import print_function
import collections as cll
import json
import re
import string
import tensorflow.compat.v1 as tf
app = tf.compat.v1.app
flags = tf.flags
gfile = tf.gfile
logging = tf.logging
flags.DEFINE_string('dataset_file', None, 'Dataset file')
flags.DEFINE_string('dataset_file2', None, 'Dataset file #2')
flags.DEFINE_string('prediction_file', None, 'Prediction file')
flags.DEFINE_string('prediction_file2', None, 'Prediction file #2')
FLAGS = flags.FLAGS
def normalize_answer(s):
"""Lower text and remove punctuation, articles and extra whitespace."""
def remove_articles(text):
return re.sub(r'\b(a|an|the)\b', ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def f1_score(prediction, ground_truth):
"""Calculate word level F1 score."""
prediction_tokens = normalize_answer(prediction).split()
ground_truth_tokens = normalize_answer(ground_truth).split()
if not prediction_tokens and not ground_truth_tokens:
return 1.0
common = cll.Counter(prediction_tokens) & cll.Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1
def f1_score_multiple(predictions):
"""Calculate word level F1 score across multiple predictions."""
all_f1 = []
for i, pred1 in enumerate(predictions[:-1]):
for pred2 in predictions[i + 1:]:
all_f1.append(f1_score(pred1, pred2))
return all_f1
def exact_match_score(prediction, ground_truth):
return normalize_answer(prediction) == normalize_answer(ground_truth)
def metric_max_over_ground_truths(metric_fn, prediction, ground_truths):
scores_for_ground_truths = []
for ground_truth in ground_truths:
score = metric_fn(prediction, ground_truth)
scores_for_ground_truths.append(score)
return max(scores_for_ground_truths)
def evaluate_preds_preds(preds1, preds2):
"""Evaluate word level metrics."""
f1 = exact_match = total = any_match = 0
for qa_id, pred1_str in preds1.items():
total += 1
ground_truths = [pred1_str]
prediction = preds2[qa_id]
exact_match += metric_max_over_ground_truths(exact_match_score, prediction,
ground_truths)
f1_current = metric_max_over_ground_truths(f1_score, prediction,
ground_truths)
if f1_current > 0:
any_match += 1
f1 += f1_current
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
any_match = 100.0 * any_match / total
return {'exact_match': exact_match, 'f1': f1, 'any_match': any_match}
def evaluate_dataset_preds(dataset, predictions):
"""Evaluate word level metrics."""
f1 = exact_match = total = 0
for article in dataset:
for paragraph in article['paragraphs']:
for qa in paragraph['qas']:
total += 1
if qa['id'] not in predictions:
message = 'Unanswered question ' + qa['id'] + ' will receive score 0.'
print(message)
continue
ground_truths = [x['text'] for x in qa['answers']]
prediction = predictions[qa['id']]
curr_exact_match = metric_max_over_ground_truths(
exact_match_score, prediction, ground_truths)
exact_match += curr_exact_match
f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths)
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
return {'exact_match': exact_match, 'f1': f1}
def evaluate_dataset_dataset(dataset, dataset2):
"""Evaluate word level metrics."""
f1 = exact_match = total = 0
for article, article2 in zip(dataset, dataset2):
for para, para2 in zip(article['paragraphs'], article2['paragraphs']):
assert para['context'].strip() == para2['context'].strip()
assert len(para['qas']) == len(para2['qas'])
for qa, qa2 in zip(para['qas'], para2['qas']):
total += 1
ground_truths = [x['text'] for x in qa['answers']]
prediction = qa2['answers'][0]['text']
exact_match += metric_max_over_ground_truths(exact_match_score,
prediction, ground_truths)
f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths)
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
return {'exact_match': exact_match, 'f1': f1}
def main(_):
def load_dataset_file(dataset_file):
with gfile.Open(dataset_file) as df:
dataset_json = json.load(df)
data = dataset_json['data']
return data
def load_preds_file(prediction_file):
with gfile.Open(prediction_file) as pf:
preds = json.load(pf)
return preds
if FLAGS.dataset_file and FLAGS.dataset_file2:
dataset1 = load_dataset_file(FLAGS.dataset_file)
dataset2 = load_dataset_file(FLAGS.dataset_file2)
print(json.dumps(evaluate_dataset_dataset(dataset1, dataset2)))
elif FLAGS.prediction_file and FLAGS.prediction_file2:
preds1 = load_preds_file(FLAGS.prediction_file)
preds2 = load_preds_file(FLAGS.prediction_file2)
print(json.dumps(evaluate_preds_preds(preds1, preds2)))
else:
dataset = load_dataset_file(FLAGS.dataset_file)
preds = load_preds_file(FLAGS.prediction_file)
print(json.dumps(evaluate_dataset_preds(dataset, preds)))
if __name__ == '__main__':
app.run(main)
|
[
"kentonl@google.com"
] |
kentonl@google.com
|
ec81cbd08688727070f00eda34dc1f3c799b1764
|
1fbace94e62e6858df8cdcde73db85e2d1406abc
|
/tags/bitbake-1.4.4/lib/bb/fetch/git.py
|
49235c141e1624cb9f464483ec22f46b0f3fbb7e
|
[] |
no_license
|
BackupTheBerlios/bitbake-svn
|
f095cf4d895abf143e402b12c2b3def2f55c6769
|
9cb4874974f20d3f5da208c7e071ca49be893edb
|
refs/heads/master
| 2020-12-24T14:36:57.555135
| 2009-07-23T15:41:07
| 2009-07-23T15:41:07
| 40,669,141
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,250
|
py
|
#!/usr/bin/env python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
BitBake 'Fetch' git implementation
Copyright (C) 2005 Richard Purdie
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA.
"""
import os, re
import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
def prunedir(topdir):
# Delete everything reachable from the directory named in 'topdir'.
# CAUTION: This is dangerous!
for root, dirs, files in os.walk(topdir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
def rungitcmd(cmd,d):
bb.debug(1, "Running %s" % cmd)
# Need to export PATH as git is likely to be in metadata paths
# rather than host provided
pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
myret = os.system(pathcmd)
if myret != 0:
raise FetchError("Git: %s failed" % pathcmd)
def gettag(parm):
if 'tag' in parm:
tag = parm['tag']
else:
tag = ""
if not tag:
tag = "master"
return tag
def getprotocol(parm):
if 'protocol' in parm:
proto = parm['protocol']
else:
proto = ""
if not proto:
proto = "rsync"
return proto
def localfile(url, d):
"""Return the filename to cache the checkout in"""
(type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
#if user sets localpath for file, use it instead.
if "localpath" in parm:
return parm["localpath"]
tag = gettag(parm)
return data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d)
class Git(Fetch):
"""Class to fetch a module or modules from git repositories"""
def supports(url, d):
"""Check to see if a given url can be fetched with cvs.
Expects supplied url in list form, as outputted by bb.decodeurl().
"""
(type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
return type in ['git']
supports = staticmethod(supports)
def localpath(url, d):
return os.path.join(data.getVar("DL_DIR", d, 1), localfile(url, d))
localpath = staticmethod(localpath)
def go(self, d, urls = []):
"""Fetch urls"""
if not urls:
urls = self.urls
for loc in urls:
(type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d))
tag = gettag(parm)
proto = getprotocol(parm)
gitsrcname = '%s%s' % (host, path.replace('/', '.'))
repofilename = 'git_%s.tar.gz' % (gitsrcname)
repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
coname = '%s' % (tag)
codir = os.path.join(repodir, coname)
cofile = self.localpath(loc, d)
# tag=="master" must always update
if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)):
bb.debug(1, "%s already exists (or was stashed). Skipping git checkout." % cofile)
continue
if not os.path.exists(repodir):
if Fetch.try_mirror(d, repofilename):
bb.mkdirhier(repodir)
os.chdir(repodir)
rungitcmd("tar -xzf %s" % (repofile),d)
else:
rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d)
os.chdir(repodir)
rungitcmd("git pull %s://%s%s" % (proto, host, path),d)
rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d)
rungitcmd("git prune-packed", d)
# old method of downloading tags
#rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d)
os.chdir(repodir)
bb.note("Creating tarball of git repository")
rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)
if os.path.exists(codir):
prunedir(codir)
bb.mkdirhier(codir)
os.chdir(repodir)
rungitcmd("git read-tree %s" % (tag),d)
rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)
os.chdir(codir)
bb.note("Creating tarball of git checkout")
rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
|
[
"zecke123@fd90740e-a5ea-0310-a7a2-b3005cc77bd6"
] |
zecke123@fd90740e-a5ea-0310-a7a2-b3005cc77bd6
|
fdb16f133bfd05fb79ded62ee7decb86e81d819c
|
76fd6eec61ffbe955f4cbaa49b0b10dcc2bb1f94
|
/subcubic_matrix_multiplication.py
|
8c68a45b2ad6cec2d2b8a4870874e07724130f7a
|
[] |
no_license
|
xiaoying1990/algorithm_stanford_part1
|
577eb029b6b997a760d34197c37193cd55c70b94
|
cbc65b5ad46728ac8e76c83e9355986e681156fd
|
refs/heads/master
| 2021-01-10T01:19:29.716387
| 2016-01-27T07:49:56
| 2016-01-27T07:49:56
| 49,760,083
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,699
|
py
|
#!/usr/bin/python3
import random
import copy
import functools
import time
class Matrix:
"""
Matrix operations:
binary operators: + , -, *, * num,
unary operator: ^T, -, abs()
Matrix access:
[]([]), iterable Rows() & Cols(), len()
"""
class SelectedArea: # todo: matrix[slice][slice], problem: is_regular_.., add, deepcopy, [][] too time consuming
def __init__(self, parent, row_slice, col_slice):
self._parent = parent
self._area = row_slice, col_slice
def __str__(self):
return str([r[self._area[1]] for r in self._parent.matrix[self._area[0]]])
def __setitem__(self, key, value):
pass
def __getitem__(self, item):
pass
class SelectedRows:
def __init__(self, parent, row_slice):
self._row_slice = row_slice
self._parent = parent
def __getitem__(self, item):
if isinstance(item, int):
return [r[item] for r in self._parent.matrix[self._row_slice]]
elif isinstance(item, slice):
# return Matrix.SelectedArea(self._parent, self._row_slice, item)
return Matrix(r[item] for r in self._parent.matrix[self._row_slice])
else:
raise IndexError('Matrix column index must be a integer or a slice, not {}'.format(item))
def __setitem__(self, key, value):
for r, v in zip(self._parent.matrix[self._row_slice], value):
r[key] = v
def __str__(self):
return str(self._parent[self._row_slice])
def __init__(self, *args, **kwargs):
if len(args) == 1:
if isinstance(args[0], Matrix.SelectedRows):
self._matrix = Matrix(list(args[0])).transposition_matrix().matrix
else:
self._matrix = copy.deepcopy(list(args[0]))
elif len(args) == 2:
ran_num = kwargs.get('ran', False)
n_rows, n_cols = args[0], args[1]
if isinstance(n_cols, int):
self._matrix = [[random.random() * 100 if ran_num else 0 * (r + c)
for c in range(n_cols)] for r in range(n_rows)]
elif isinstance(n_cols, tuple):
self._matrix = [[random.random() * 100 if ran_num else 0 * (r + c)
for c in range(n_cols[r])] for r in range(n_rows)]
else:
raise Exception('arguments wrong: {}'.format(args))
def __getitem__(self, item):
if isinstance(item, slice):
return self.SelectedRows(self, item)
elif isinstance(item, int):
return self._matrix[item]
else:
raise IndexError('Matrix row index must be a integer or a slice, not {}'.format(item))
def __setitem__(self, key, value):
self._matrix[key] = list(value)
def __str__(self):
return str(self._matrix)
def rows(self):
for row in self._matrix:
yield row
def cols(self):
n_c = self.num_cols
if isinstance(n_c, int):
for c in range(n_c):
yield [r[c] for r in self._matrix]
elif isinstance(n_c, tuple):
for c in range(max(n_c)):
yield [r[c] if c < len(r) else None for r in self._matrix]
else:
raise Exception('impossible')
def transposition_matrix(self):
if not self.is_regular_matrix():
raise Exception('Can not do this, for {} is not a regular matrix.'.format(self))
return Matrix(self.cols())
def is_regular_matrix(self):
t = tuple(len(r) for r in self._matrix)
assert len(t) == self.num_rows, 'impossible'
return max(t) == min(t) and functools.reduce((lambda x, y: x and y),
((lambda x: isinstance(x, (int, float)))(e)
for r in self._matrix for e in r)
)
def len(self):
return self.num_rows, self.num_cols
@property
def matrix(self):
return self._matrix
@matrix.setter
def matrix(self, value):
self._matrix = value
@property
def num_rows(self):
return len(self._matrix)
@property
def num_cols(self):
t = tuple(len(r) for r in self._matrix)
return max(t) if max(t) == min(t) else t
@staticmethod
def add(ma: 'Matrix', mb: 'Matrix') -> 'Matrix':
if ma.is_regular_matrix() and mb.is_regular_matrix() \
and ma.num_rows == mb.num_rows and ma.num_cols == mb.num_cols:
return Matrix([ca + cb for ca, cb in zip(ra, rb)] for ra, rb in zip(ma, mb))
else:
raise Exception('Matrix is not regular or cannot be added. \n{} \n{}'.format(ma, mb))
def __eq__(self, other):
return functools.reduce((lambda x, y: x and y),
(i < other.num_rows and (j < other.num_cols[i] if isinstance(other.num_cols, tuple)
else j < other.num_cols)
and (abs(other[i][j] - e) < 0.0000001 if isinstance(e, float)
else other[i][j] == e)
for i, r in enumerate(self._matrix) for j, e in enumerate(r))
)
def __add__(self, other):
return self.add(self, other)
@staticmethod
def sub(ma: 'Matrix', mb: 'Matrix') -> 'Matrix':
return Matrix.add(ma, Matrix.dm(mb, -1))
def __sub__(self, other):
return self.sub(self, other)
def __abs__(self):
return Matrix([abs(e) for e in r] for r in self._matrix)
@staticmethod
def dm(ma: ('Matrix', int), d: (int, float)) -> 'Matrix':
if ma.is_regular_matrix():
return Matrix([d * v for v in r] for r in ma)
else:
raise Exception('matrix is not regular. \n{}'.format(ma))
def __neg__(self):
return Matrix.dm(self, -1)
def __mul__(self, other):
if isinstance(other, (int, float)):
return Matrix.dm(self, other)
elif isinstance(other, Matrix):
if self.num_cols * self.num_rows * other.num_cols * other.num_rows < 100000000:
return Matrix.mul_simple(self, other)
return Matrix.mul(self, other)
else:
raise Exception('{} is not a number or a matrix.'.format(other))
@staticmethod
def mul_simple(ma: 'Matrix', mb: 'Matrix') -> 'Matrix': # O(n^3)
if not ma.is_regular_matrix() or not mb.is_regular_matrix():
raise Exception('at least one matrix is not regular.\n{}\n{}'
.format(ma, mb))
if ma.num_cols != mb.num_rows:
raise Exception('cannot multiply the two matrix, ' +
'because the number of columns {} of {} != the number of rows {} of {}'
.format(ma.num_cols, ma, mb.num_rows, mb)
)
same_index = ma.num_cols
return Matrix([sum(r[i] * c[i] for i in range(same_index)) for c in mb.cols()] for r in ma.rows())
@staticmethod
def mul(ma: 'Matrix', mb: 'Matrix') -> 'Matrix':
if not ma.is_regular_matrix() or not mb.is_regular_matrix():
raise Exception('at least one matrix is not regular.\n{}\n{}'
.format(ma, mb))
if ma.num_cols != mb.num_rows:
raise Exception('cannot multiply the two matrix, ' +
'because the number of columns {} of {} != the number of rows {} of {}'
.format(ma.num_cols, ma, mb.num_rows, mb)
)
ar, ac, br, bc = ma.len() + mb.len()
ans = Matrix(ar, bc)
a, b, c, d = ma[:ar // 2][:ac // 2], ma[:ar // 2][ac // 2:], ma[ar // 2:][:ac // 2], ma[ar // 2:][ac // 2:]
e, f, g, h = mb[:br // 2][:bc // 2], mb[:br // 2][bc // 2:], mb[br // 2:][:bc // 2], mb[br // 2:][bc // 2:]
if ar % 2 == 0 and ac % 2 == 0 and br % 2 == 0 and bc % 2 == 0: # O(n^(log(2, 7))
p1, p2, p3, p4, p5, p6, p7 = a * (f - h), (a + b) * h, (c + d) * e, \
d * (g - e), (a + d) * (e + h), \
(b - d) * (g + h), (a - c) * (e + f)
part1, part2, part3, part4 = p5 + p4 - p2 + p6, \
p1 + p2, \
p3 + p4, \
p1 + p5 - p3 - p7
else:
part1, part2, part3, part4 = a * e + b * g, \
a * f + b * h, \
c * e + d * g, \
c * f + d * h
ans[:ar // 2][:bc // 2], ans[:ar // 2][bc // 2:], \
ans[ar // 2:][:bc // 2], ans[ar // 2:][bc // 2:] = part1, part2, part3, part4
assert ans.len() == (ar, bc), 'impossible'
# print('------{}'.format(ans.len()))
return ans
def test():
a = Matrix([[1, 2], [3, 0], [4, 3]])
a[0:2][0:2] = [[3, -1], [2, 3]]
print(a[0:2][0])
print(a[:][:])
print(type(a[:][:]))
b = Matrix([[2, 4], [2, 1], [-4, 6]])
c = Matrix([[1, 2], [3, 0, 1.2], [4]])
print('Matrix a: {}'.format(a))
print('Matrix b: {}'.format(b))
print('-a: {}'.format(-a))
print('|b|: {}'.format(abs(b)))
print('a + b: {}'.format(a + b))
print('a - b: {}'.format(a - b))
print('a * 3.1: {}'.format(a * 3.1))
print('T of b: {}'.format(b.transposition_matrix()))
bt = b.transposition_matrix()
print('a * b^T: {}'.format(a * bt))
print('test multiply: {}'.format(a * bt == Matrix.mul_simple(a, bt)))
print('Matrix c: {}, is it a regular matrix? {}'.format(c, c.is_regular_matrix()))
print('len of a and c: \n{}\n{}'.format(a.len(), c.len()))
print('build 0s Matrix with same len of a and c:\n{}\n{}'.format(Matrix(*a.len()), Matrix(*c.len())))
print('deep copy of matrix c: {}, is it "c"? {}'.format(Matrix(c), c is Matrix(c)))
print('test == operator: {}'.format(c == Matrix(c)))
a = Matrix(50, 50, ran=True)
b = Matrix(50, 50, ran=True)
t1 = time.time()
c = a * b
print('time consuming for a * b: {} seconds'.format(time.time() - t1))
t2 = time.time()
d = Matrix.mul_simple(a, b)
print('time consuming for mul_simple(a, b): {} seconds'.format(time.time() - t2))
print(c == d)
if __name__ == '__main__':
test()
|
[
"2451707668@qq.com"
] |
2451707668@qq.com
|
6cbda8076fef4a0ae328d7d96cbf2d90effc9054
|
0cab379418c935d7d9cb1d97682f736483d1690b
|
/decode/base64_decode/base64_file.py
|
ebfa97753db62ec992d2299bcbc6171fc4d05501
|
[] |
no_license
|
Maggie147/Python_tools
|
f11332bf825ea80d4acdd79117d9aafd8772513a
|
310902cae29bb51c0fd45e56a0b6f397056ea29f
|
refs/heads/master
| 2021-09-21T23:40:18.821966
| 2018-09-03T07:54:10
| 2018-09-03T07:54:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,530
|
py
|
#!/usr/bin/python
#-*-coding:utf-8-*-
import os
import base64
def get_file(filepath, rmflag=0):
filebuf = None
try:
with open(filepath, "rb+") as fp:
filebuf = fp.read()
try:
if rmflag == 1:
os.remove(filepath)
except Exception as e:
print e
except Exception as e:
DEBUGE("Open file [%s] failed!!!" % filepath)
return filebuf
def write_data_toFile(tarPath, tarName, data):
if not os.path.exists(tarPath):
os.makedirs(tarPath)
try:
with open(tarPath+tarName, "wb") as fp:
fp.write(data)
return True
except:
return False
def decode_base64(sdata):
"""Decode base64, padding being optional.
:param data: Base64 data as an ASCII byte string
:returns: The decoded byte string.
"""
try:
missing_padding = 4 - len(sdata) % 4
if missing_padding:
sdata += b'='* missing_padding
return base64.decodestring(sdata)
except Exception as e:
print e
return None
def test():
s_path = "./test_file/picture.txt"
s_buf = get_file(s_path, 0)
decode_data = decode_base64(s_buf)
if decode_data:
ret = write_data_toFile('./test_file/', 'picture.bmp', decode_data)
if not ret:
print "save decode data failed!!!"
else:
print "decode success"
else:
print "decode failed!!!"
def main():
test()
if __name__ == '__main__':
main()
|
[
"tianxueol@outlook.com"
] |
tianxueol@outlook.com
|
d4cd42d3ae31fd16daad07315d0e65f6e0c9b818
|
bf73ff4441577074dee2225ac937fbbbf4e85fef
|
/pplbench/ppls/jags/inference.py
|
a09062be1785a08b5530f7b00494eb586446b37a
|
[
"MIT"
] |
permissive
|
rambam613/pplbench
|
632878a359945fe64cf24489aa7669040727c672
|
d69c652fc882ba50f56eb0cfaa3097d3ede295f9
|
refs/heads/master
| 2023-07-07T02:16:19.384357
| 2021-08-13T08:01:55
| 2021-08-13T08:03:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,413
|
py
|
# Copyright(C) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Dict, Type, cast
import numpy as np
import pyjags
import xarray as xr
from ..base_ppl_impl import BasePPLImplementation
from ..base_ppl_inference import BasePPLInference
from .base_jags_impl import BaseJagsImplementation
class MCMC(BasePPLInference):
def __init__(
self, impl_class: Type[BasePPLImplementation], model_attrs: Dict
) -> None:
# We always expect a BaseJagsImplementation here
self.impl_class = cast(Type[BaseJagsImplementation], impl_class)
self.impl = self.impl_class(**model_attrs)
def compile(self, seed: int, **compile_args):
# JAGS doesn't have a separate compile step.
# The model construction requires the actual data,
# so everything has to be done under inference.
pass
def infer( # type: ignore
self,
data: xr.Dataset,
iterations: int,
num_warmup: int,
seed: int,
RNG_name: str = "base::Mersenne-Twister",
) -> xr.Dataset:
"""
See https://phoenixnap.dl.sourceforge.net/project/mcmc-jags/Manuals/4.x/jags_user_manual.pdf
for JAGS documentation.
:param data: PPLBench dataset
:param iterations: number of samples to create
:param seed: seed for random number generator
:param adapt: the number of adaptive steps
:param RNG_name: the name of the random number generator
:returns: samples dataset
"""
model = pyjags.Model(
code=self.impl.get_code(),
data=self.impl.format_data_to_jags(data),
chains=1,
adapt=num_warmup,
init={".RNG.seed": seed, ".RNG.name": RNG_name},
)
samples = model.sample(iterations - num_warmup, vars=self.impl.get_vars())
# squeeze out the chain dimension from the samples
for varname in samples.keys():
samples[varname] = samples[varname].squeeze(-1)
samples = self.impl.extract_data_from_jags(samples)
# because jags does not return warm up samples, we need to shift the coordinates
# of the actual samples by num_warmup by padding with NaN
samples = samples.assign_coords(draw=samples.draw + num_warmup)
padding = xr.Dataset(coords={"draw": np.arange(num_warmup)})
return padding.merge(samples)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.