hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k β | max_stars_repo_stars_event_min_datetime stringlengths 24 24 β | max_stars_repo_stars_event_max_datetime stringlengths 24 24 β | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k β | max_issues_repo_issues_event_min_datetime stringlengths 24 24 β | max_issues_repo_issues_event_max_datetime stringlengths 24 24 β | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k β | max_forks_repo_forks_event_min_datetime stringlengths 24 24 β | max_forks_repo_forks_event_max_datetime stringlengths 24 24 β | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
722c4c63b3fd5d59bca4f70da65c4babd7f2a270 | 2,814 | py | Python | neptunecontrib/api/audio.py | piojanu/neptune-contrib | 7793c325af1c225cbda972bc0f89fa45f8da6cf3 | [
"MIT"
] | 22 | 2020-02-23T21:25:34.000Z | 2021-06-11T16:34:27.000Z | neptunecontrib/api/audio.py | piojanu/neptune-contrib | 7793c325af1c225cbda972bc0f89fa45f8da6cf3 | [
"MIT"
] | 29 | 2020-02-11T11:10:22.000Z | 2021-10-03T09:01:28.000Z | neptunecontrib/api/audio.py | piojanu/neptune-contrib | 7793c325af1c225cbda972bc0f89fa45f8da6cf3 | [
"MIT"
] | 7 | 2020-05-10T06:59:53.000Z | 2021-06-11T16:34:32.000Z | #
# Copyright (c) 2020, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import neptune
__all__ = [
'log_audio',
]
def log_audio(path_to_file, audio_name=None, experiment=None):
"""Logs audio file to 'artifacts/audio' with player.
Logs audio file to the 'artifacts/audio' in the experiment, where you can play it directly from the browser.
You can also download raw audio file to the local machine.
Just use "three vertical dots" located to the right from the player.
Args:
path_to_file (:obj:`str`): Path to audio file.
audio_name (:obj:`str`, optional, default is ``None``): Name to be displayed in artifacts/audio.
| If `None`, file name is used.
experiment (:obj:`neptune.experiments.Experiment`, optional, default is ``None``):
| For advanced users only. Pass Neptune
`Experiment <https://docs.neptune.ai/neptune-client/docs/experiment.html#neptune.experiments.Experiment>`_
object if you want to control to which experiment data is logged.
| If ``None``, log to currently active, and most recent experiment.
Example:
.. code:: python3
log_audio('audio-file.wav')
log_audio('/full/path/to/some/other/audio/file.mp3')
log_audio('/full/path/to/some/other/audio/file.mp3', 'my_audio')
Note:
Check out how the logged audio file looks in Neptune:
`here <https://ui.neptune.ai/o/shared/org/showroom/e/SHOW-1485/artifacts?path=audio%2F>`_.
"""
import base64
from io import StringIO
_exp = experiment if experiment else neptune
name, file_ext = os.path.split(path_to_file)[1].split('.')
if audio_name is None:
audio_name = name
else:
assert isinstance(audio_name, str), 'audio_name must be string, got {}'.format(type(audio_name))
encoded_sound = base64.b64encode(open(path_to_file, 'rb').read())
html = """<!DOCTYPE html>
<html>
<body>
<audio controls>
<source src='data:audio/{};base64,{}'>
</audio>
</body>
</html>""".format(file_ext, encoded_sound.decode())
buffer = StringIO(html)
buffer.seek(0)
_exp.log_artifact(buffer, 'audio/{}.html'.format(audio_name))
| 33.903614 | 120 | 0.662758 | #
# Copyright (c) 2020, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import neptune
__all__ = [
'log_audio',
]
def log_audio(path_to_file, audio_name=None, experiment=None):
"""Logs audio file to 'artifacts/audio' with player.
Logs audio file to the 'artifacts/audio' in the experiment, where you can play it directly from the browser.
You can also download raw audio file to the local machine.
Just use "three vertical dots" located to the right from the player.
Args:
path_to_file (:obj:`str`): Path to audio file.
audio_name (:obj:`str`, optional, default is ``None``): Name to be displayed in artifacts/audio.
| If `None`, file name is used.
experiment (:obj:`neptune.experiments.Experiment`, optional, default is ``None``):
| For advanced users only. Pass Neptune
`Experiment <https://docs.neptune.ai/neptune-client/docs/experiment.html#neptune.experiments.Experiment>`_
object if you want to control to which experiment data is logged.
| If ``None``, log to currently active, and most recent experiment.
Example:
.. code:: python3
log_audio('audio-file.wav')
log_audio('/full/path/to/some/other/audio/file.mp3')
log_audio('/full/path/to/some/other/audio/file.mp3', 'my_audio')
Note:
Check out how the logged audio file looks in Neptune:
`here <https://ui.neptune.ai/o/shared/org/showroom/e/SHOW-1485/artifacts?path=audio%2F>`_.
"""
import base64
from io import StringIO
_exp = experiment if experiment else neptune
name, file_ext = os.path.split(path_to_file)[1].split('.')
if audio_name is None:
audio_name = name
else:
assert isinstance(audio_name, str), 'audio_name must be string, got {}'.format(type(audio_name))
encoded_sound = base64.b64encode(open(path_to_file, 'rb').read())
html = """<!DOCTYPE html>
<html>
<body>
<audio controls>
<source src='data:audio/{};base64,{}'>
</audio>
</body>
</html>""".format(file_ext, encoded_sound.decode())
buffer = StringIO(html)
buffer.seek(0)
_exp.log_artifact(buffer, 'audio/{}.html'.format(audio_name))
| 0 | 0 | 0 |
7f715fc74a87d6223c95472dd37b95a2fe0a7ed2 | 122 | py | Python | example4/vehicles.py | python-spokane/getting-started-with-fastapi | b3ad03c03a391b3f569a6c881eb2d3035a9a1334 | [
"MIT"
] | null | null | null | example4/vehicles.py | python-spokane/getting-started-with-fastapi | b3ad03c03a391b3f569a6c881eb2d3035a9a1334 | [
"MIT"
] | null | null | null | example4/vehicles.py | python-spokane/getting-started-with-fastapi | b3ad03c03a391b3f569a6c881eb2d3035a9a1334 | [
"MIT"
] | null | null | null | from pydantic import BaseModel
| 13.555556 | 30 | 0.680328 | from pydantic import BaseModel
class Vehicle(BaseModel):
vehicle_id: int
year: int
make: str
model: str
| 0 | 67 | 23 |
9d1b6d427be95ea45d7c26c3d929358589c3f356 | 52,569 | py | Python | PUBGSearchbot.py | J-hoplin1/PUBG-player-search-bot | b3389cde41f4c3723b29885d355ac4491146934c | [
"MIT"
] | 6 | 2020-06-09T06:45:17.000Z | 2021-07-31T16:07:40.000Z | PUBGSearchbot.py | J-hoplin1/PUBG-player-search-bot | b3389cde41f4c3723b29885d355ac4491146934c | [
"MIT"
] | null | null | null | PUBGSearchbot.py | J-hoplin1/PUBG-player-search-bot | b3389cde41f4c3723b29885d355ac4491146934c | [
"MIT"
] | 5 | 2020-05-23T01:27:43.000Z | 2021-08-21T14:38:15.000Z | #This code and description is written by Hoplin
#This code is written with API version 1.0.0(Rewirte-V)
#No matter to use it as non-commercial.
import discord
import asyncio
import os
from discord.ext import commands
import urllib
from urllib.request import URLError
from urllib.request import HTTPError
from urllib.request import urlopen
from urllib.request import Request, urlopen
from bs4 import BeautifulSoup
from urllib.parse import quote
import re # Regex for youtube link
import warnings
import requests
import unicodedata
import json
import time
token = ''
client = discord.Client()
@client.event # Use these decorator to register an event.
@client.event
client.run(token)
| 65.793492 | 185 | 0.531739 | #This code and description is written by Hoplin
#This code is written with API version 1.0.0(Rewirte-V)
#No matter to use it as non-commercial.
import discord
import asyncio
import os
from discord.ext import commands
import urllib
from urllib.request import URLError
from urllib.request import HTTPError
from urllib.request import urlopen
from urllib.request import Request, urlopen
from bs4 import BeautifulSoup
from urllib.parse import quote
import re # Regex for youtube link
import warnings
import requests
import unicodedata
import json
import time
token = ''
client = discord.Client()
@client.event # Use these decorator to register an event.
async def on_ready(): # on_ready() event : when the bot has finised logging in and setting things up
await client.change_presence(status=discord.Status.online, activity=discord.Game("Type !help or !λμλ§ for help"))
print("New log in as {0.user}".format(client))
@client.event
async def on_message(message): # on_message() event : when the bot has recieved a message
#To user who sent message
# await message.author.send(msg)
print(message.content)
if message.author == client.user:
return
if message.content.startswith("!κ²½μμ 1"):#TabErrorTPP
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !κ²½μμ (1 : TPP or 2 : FPP) : !κ²½μμ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
# Varaibel rankElements : index 0: fpp 1 : tpp
rankElements = bs.findAll('div',{'class' : re.compile('squad ranked [A-Za-z0-9]')})
'''
-> ν΄λμ€ κ°μ κ°μ Έμμ νλ³νλ κ²λ μμ§λ§ μ΄ λ°©λ²μ μ¬μ©ν΄ λ³Έλ€.
-> λ§μ½ κΈ°λ‘μ΄ μ‘΄μ¬ νμ§ μλ κ²½μ° class κ° no_recordλΌλ κ°μ κ°μ§ <div>κ° μμ±λλ€. μ΄ νκ·Έλ‘ λ°μ΄ν° μ 무 νλ³νλ©΄λλ€.
print(rankElements[1].find('div',{'class' : 'no_record'}))
'''
if rankElements[0].find('div',{'class' : 'no_record'}) != None: # μΈλ±μ€ 0 : κ²½μμ fpp -> μ λ³΄κ° μλμ§ μλμ§ μ 무λ₯Ό νλ³νλ€.
embed = discord.Embed(title="Record not found", description="Rank TPP record not found.",color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s TPP Ranking information",embed=embed)
else:
#Short of fpp Rank
fR = rankElements[0]
# Tier Information
# Get tier medal image
tierMedalImage = fR.find('div',{'class' : 'grade-info'}).img['src']
# Get tier Information
tierInfo = fR.find('div',{'class' : 'grade-info'}).img['alt']
# Rating Inforamtion
# RP Score
RPScore = fR.find('div',{'class' : 'rating'}).find('span',{'class' : 'caption'}).text
#Get top rate statistics
#λ±μ
topRatioRank = topRatio = fR.find('p',{'class' : 'desc'}).find('span',{'class' : 'rank'}).text
#μμ %
topRatio = fR.find('p',{'class' : 'desc'}).find('span',{'class' : 'top'}).text
# Main : Stats all in here.
mainStatsLayout = fR.find('div',{'class' : 'stats'})
#Stats Data Saved As List
statsList = mainStatsLayout.findAll('p',{'class' : 'value'})# [KDA,μΉλ₯ ,Top10,νκ· λλ, κ²μμ, νκ· λ±μ]
statsRatingList = mainStatsLayout.findAll('span',{'class' : 'top'})#[KDA, μΉλ₯ ,Top10 νκ· λλ, κ²μμ]
for r in range(0,len(statsList)):
# \nμΌλ‘ ν° μ¬λ°±μ΄ μμ΄ split μ²λ¦¬
statsList[r] = statsList[r].text.strip().split('\n')[0]
statsRatingList[r] = statsRatingList[r].text
# νκ· λ±μλ stats Ratingμ νμνμ§ μλλ€.
statsRatingList = statsRatingList[0:5]
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server", value=seasonInfo[2] + " Server", inline=False)
embed.add_field(name = "Tier / Top Rate / Average Rank",
value = tierInfo + " (" + RPScore + ") / "+topRatio + " / " + topRatioRank,inline=False)
embed.add_field(name="K/D", value=statsList[0] + "/" + statsRatingList[0], inline=True)
embed.add_field(name="μΉλ₯ ", value=statsList[1] + "/" + statsRatingList[1], inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=statsList[2] + "/" + statsRatingList[2], inline=True)
embed.add_field(name="νκ· λλ", value=statsList[3] + "/" + statsRatingList[3], inline=True)
embed.add_field(name="κ²μμ", value=statsList[4] + "ν/" + statsRatingList[4], inline=True)
embed.add_field(name="νκ· λ±μ", value=statsList[5],inline=True)
embed.set_thumbnail(url=f'https:{tierMedalImage}')
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s TPP Ranking information", embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer", description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
print(e)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
print(e)
if message.content.startswith("!κ²½μμ 2"):#FPP
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !κ²½μμ (1 : TPP or 2 : FPP) : !κ²½μμ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
# index 0: fpp 1 : tpp
rankElements = bs.findAll('div',{'class' : re.compile('squad ranked [A-Za-z0-9]')})
'''
-> ν΄λμ€ κ°μ κ°μ Έμμ νλ³νλ κ²λ μμ§λ§ μ΄ λ°©λ²μ μ¬μ©ν΄ λ³Έλ€.
-> λ§μ½ κΈ°λ‘μ΄ μ‘΄μ¬ νμ§ μλ κ²½μ° class κ° no_recordλΌλ κ°μ κ°μ§ <div>κ° μμ±λλ€. μ΄ νκ·Έλ‘ λ°μ΄ν° μ 무 νλ³νλ©΄λλ€.
print(rankElements[1].find('div',{'class' : 'no_record'}))
'''
if rankElements[1].find('div',{'class' : 'no_record'}) != None: # μΈλ±μ€ 0 : κ²½μμ fpp -> μ λ³΄κ° μλμ§ μλμ§ μ 무λ₯Ό νλ³νλ€a.
embed = discord.Embed(title="Record not found", description="Solo que record not found.",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s FPP Ranking information",embed=embed)
else:
#Short of fpp Rank
fR = rankElements[1]
# Tier Information
# Get tier medal image
tierMedalImage = fR.find('div',{'class' : 'grade-info'}).img['src']
# Get tier Information
tierInfo = fR.find('div',{'class' : 'grade-info'}).img['alt']
# Rating Inforamtion
# RP Score
RPScore = fR.find('div',{'class' : 'rating'}).find('span',{'class' : 'caption'}).text
#Get top rate statistics
#λ±μ
topRatioRank = topRatio = fR.find('p',{'class' : 'desc'}).find('span',{'class' : 'rank'}).text
#μμ %
topRatio = fR.find('p',{'class' : 'desc'}).find('span',{'class' : 'top'}).text
# Main : Stats all in here.
mainStatsLayout = fR.find('div',{'class' : 'stats'})
#Stats Data Saved As List
statsList = mainStatsLayout.findAll('p',{'class' : 'value'})# [KDA,μΉλ₯ ,Top10,νκ· λλ, κ²μμ, νκ· λ±μ]
statsRatingList = mainStatsLayout.findAll('span',{'class' : 'top'})#[KDA, μΉλ₯ ,Top10 νκ· λλ, κ²μμ]
for r in range(0,len(statsList)):
# \nμΌλ‘ ν° μ¬λ°±μ΄ μμ΄ split μ²λ¦¬
statsList[r] = statsList[r].text.strip().split('\n')[0]
statsRatingList[r] = statsRatingList[r].text
# νκ· λ±μλ stats Ratingμ νμνμ§ μλλ€.
statsRatingList = statsRatingList[0:5]
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server", value=seasonInfo[2] + " Server", inline=False)
embed.add_field(name = "Tier / Top Rate / Average Rank",
value = tierInfo + " (" + RPScore + ") / "+topRatio + " / " + topRatioRank,inline=False)
embed.add_field(name="K/D", value=statsList[0] + "/" + statsRatingList[0], inline=True)
embed.add_field(name="μΉλ₯ ", value=statsList[1] + "/" + statsRatingList[1], inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=statsList[2] + "/" + statsRatingList[2], inline=True)
embed.add_field(name="νκ· λλ", value=statsList[3] + "/" + statsRatingList[3], inline=True)
embed.add_field(name="κ²μμ", value=statsList[4] + "ν/" + statsRatingList[4], inline=True)
embed.add_field(name="νκ· λ±μ", value=statsList[5],inline=True)
embed.set_thumbnail(url=f'https:{tierMedalImage}')
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s FPP Ranking information", embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer", description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
if message.content.startswith("!λ°°κ·Έμλ‘1"):
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !λ°°κ·Έμλ‘ : !λ°°κ·Έμλ‘ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
soloQueInfo = bs.find('section', {'class': "solo modeItem"}).find('div', {'class': "mode-section tpp"})
if soloQueInfo == None:
embed = discord.Embed(title="Record not found", description="Solo que record not found.",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
await message.channel.send("PUBG player " + playerNickname + "'s TPP solo que information", embed=embed)
else:
# print(soloQueInfo)
# Get total playtime
soloQueTotalPlayTime = soloQueInfo.find('span', {'class': "time_played"}).text.strip()
# Get Win/Top10/Lose : [win,top10,lose]
soloQueGameWL = soloQueInfo.find('em').text.strip().split(' ')
# RankPoint
rankPoint = soloQueInfo.find('span', {'class': 'value'}).text
# Tier image url, tier
tierInfos = soloQueInfo.find('img', {
'src': re.compile('\/\/static\.dak\.gg\/images\/icons\/tier\/[A-Za-z0-9_.]')})
tierImage = "https:" + tierInfos['src']
print(tierImage)
tier = tierInfos['alt']
# Comprehensive info
comInfo = []
# [K/D,μΉλ₯ ,Top10,νκ· λλ,κ²μμ, μ΅λ€ν¬μ,ν€λμ·,μ 격거리,μμ‘΄,νκ· μμ]
for ci in soloQueInfo.findAll('p', {'class': 'value'}):
comInfo.append(''.join(ci.text.split()))
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server", value=seasonInfo[2] + " Server / Total playtime : " +soloQueTotalPlayTime, inline=False)
embed.add_field(name="Tier",
value=tier + " ("+rankPoint+"p)" , inline=False)
embed.add_field(name="K/D", value=comInfo[0], inline=True)
embed.add_field(name="μΉλ₯ ", value=comInfo[1], inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=comInfo[2], inline=True)
embed.add_field(name="νκ· λλ", value=comInfo[3], inline=True)
embed.add_field(name="κ²μμ", value=comInfo[4] + "ν", inline=True)
embed.add_field(name="μ΅λ€ν¬μ", value=comInfo[5] + "ν¬", inline=True)
embed.add_field(name="ν€λμ· λΉμ¨", value=comInfo[6], inline=True)
embed.add_field(name="μ 격거리", value=comInfo[7], inline=True)
embed.add_field(name="νκ· μμ‘΄μκ°", value=comInfo[8], inline=True)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s TPP solo que information", embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer", description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
if message.content.startswith("!λ°°κ·Έλμ€1"):
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !λ°°κ·Έμ€μΏΌλ : !λ°°κ·Έμ€μΏΌλ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
duoQueInfo = bs.find('section',{'class' : "duo modeItem"}).find('div',{'class' : "mode-section tpp"})
if duoQueInfo == None:
embed = discord.Embed(title="Record not found", description="Duo que record not found.",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
await message.channel.send("PUBG player " + playerNickname + "'s TPP duo que information", embed=embed)
else:
# print(duoQueInfo)
# Get total playtime
duoQueTotalPlayTime = duoQueInfo.find('span', {'class': "time_played"}).text.strip()
# Get Win/Top10/Lose : [win,top10,lose]
duoQueGameWL = duoQueInfo.find('em').text.strip().split(' ')
# RankPoint
rankPoint = duoQueInfo.find('span', {'class': 'value'}).text
# Tier image url, tier
tierInfos = duoQueInfo.find('img', {
'src': re.compile('\/\/static\.dak\.gg\/images\/icons\/tier\/[A-Za-z0-9_.]')})
tierImage = "https:" + tierInfos['src']
tier = tierInfos['alt']
# Comprehensive info
comInfo = []
# [K/D,μΉλ₯ ,Top10,νκ· λλ,κ²μμ, μ΅λ€ν¬μ,ν€λμ·,μ 격거리,μμ‘΄,νκ· μμ]
for ci in duoQueInfo.findAll('p', {'class': 'value'}):
comInfo.append(''.join(ci.text.split()))
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server and total playtime", value=seasonInfo[2] + " Server / Total playtime : " +duoQueTotalPlayTime, inline=False)
embed.add_field(name="Tier(Rank Point)",
value=tier + " ("+rankPoint+"p)", inline=False)
embed.add_field(name="K/D", value=comInfo[0], inline=True)
embed.add_field(name="μΉλ₯ ", value=comInfo[1], inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=comInfo[2], inline=True)
embed.add_field(name="νκ· λλ", value=comInfo[3], inline=True)
embed.add_field(name="κ²μμ", value=comInfo[4] + "ν", inline=True)
embed.add_field(name="μ΅λ€ν¬μ", value=comInfo[5] + "ν¬", inline=True)
embed.add_field(name="ν€λμ· λΉμ¨", value=comInfo[6], inline=True)
embed.add_field(name="μ 격거리", value=comInfo[7], inline=True)
embed.add_field(name="νκ· μμ‘΄μκ°", value=comInfo[8], inline=True)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s TPP duo que information", embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
if message.content.startswith("!λ°°κ·Έμ€μΏΌλ1"):
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !λ°°κ·Έμλ‘ : !λ°°κ·Έμλ‘ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
squadQueInfo = bs.find('section',{'class' : "squad modeItem"}).find('div',{'class' : "mode-section tpp"})
if squadQueInfo == None:
embed = discord.Embed(title="Record not found", description="Squad que record not found.",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
await message.channel.send("PUBG player " + playerNickname + "'s TPP squad que information", embed=embed)
else:
# print(duoQueInfo)
# Get total playtime
squadQueTotalPlayTime = squadQueInfo.find('span', {'class': "time_played"}).text.strip()
# Get Win/Top10/Lose : [win,top10,lose]
squadQueGameWL = squadQueInfo.find('em').text.strip().split(' ')
# RankPoint
rankPoint = squadQueInfo.find('span', {'class': 'value'}).text
# Tier image url, tier
tierInfos = squadQueInfo.find('img', {
'src': re.compile('\/\/static\.dak\.gg\/images\/icons\/tier\/[A-Za-z0-9_.]')})
tierImage = "https:" + tierInfos['src']
tier = tierInfos['alt']
# Comprehensive info
comInfo = []
# [K/D,μΉλ₯ ,Top10,νκ· λλ,κ²μμ, μ΅λ€ν¬μ,ν€λμ·,μ 격거리,μμ‘΄,νκ· μμ]
for ci in squadQueInfo.findAll('p', {'class': 'value'}):
comInfo.append(''.join(ci.text.split()))
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server", value=seasonInfo[2] + " Server / Total playtime : " +squadQueTotalPlayTime, inline=False)
embed.add_field(name="Tier(Rank Point)",
value=tier + " (" + rankPoint + "p)", inline=False)
embed.add_field(name="K/D", value=comInfo[0] , inline=True)
embed.add_field(name="μΉλ₯ ", value=comInfo[1] , inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=comInfo[2] , inline=True)
embed.add_field(name="νκ· λλ", value=comInfo[3] , inline=True)
embed.add_field(name="κ²μμ", value=comInfo[4] + "ν", inline=True)
embed.add_field(name="μ΅λ€ν¬μ", value=comInfo[5] + "ν¬", inline=True)
embed.add_field(name="ν€λμ· λΉμ¨", value=comInfo[6], inline=True)
embed.add_field(name="μ 격거리", value=comInfo[7], inline=True)
embed.add_field(name="νκ· μμ‘΄μκ°", value=comInfo[8], inline=True)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s TPP squad que information", embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
if message.content.startswith("!λ°°κ·Έμλ‘2"):
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !λ°°κ·Έμλ‘ : !λ°°κ·Έμλ‘ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
soloQueInfo = bs.find('section', {'class': "solo modeItem"}).find('div', {'class': "mode-section fpp"})
if soloQueInfo == None:
embed = discord.Embed(title="Record not found", description="Solo que record not found.",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
await message.channel.send("PUBG player " + playerNickname + "'s FPP solo que information",
embed=embed)
else:
# print(soloQueInfo)
# Get total playtime
soloQueTotalPlayTime = soloQueInfo.find('span', {'class': "time_played"}).text.strip()
# Get Win/Top10/Lose : [win,top10,lose]
soloQueGameWL = soloQueInfo.find('em').text.strip().split(' ')
# RankPoint
rankPoint = soloQueInfo.find('span', {'class': 'value'}).text
# Tier image url, tier
tierInfos = soloQueInfo.find('img', {
'src': re.compile('\/\/static\.dak\.gg\/images\/icons\/tier\/[A-Za-z0-9_.]')})
tierImage = "https:" + tierInfos['src']
print(tierImage)
tier = tierInfos['alt']
# Comprehensive info
comInfo = []
# [K/D,μΉλ₯ ,Top10,νκ· λλ,κ²μμ, μ΅λ€ν¬μ,ν€λμ·,μ 격거리,μμ‘΄,νκ· μμ]
for ci in soloQueInfo.findAll('p', {'class': 'value'}):
comInfo.append(''.join(ci.text.split()))
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server",
value=seasonInfo[2] + " Server / Total playtime : " + soloQueTotalPlayTime,
inline=False)
embed.add_field(name="Tier(Rank Point)",
value=tier + " (" + rankPoint + "p)", inline=False)
embed.add_field(name="K/D", value=comInfo[0], inline=True)
embed.add_field(name="μΉλ₯ ", value=comInfo[1], inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=comInfo[2], inline=True)
embed.add_field(name="νκ· λλ", value=comInfo[3], inline=True)
embed.add_field(name="κ²μμ", value=comInfo[4] + "ν" , inline=True)
embed.add_field(name="μ΅λ€ν¬μ", value=comInfo[5] + "ν¬" , inline=True)
embed.add_field(name="ν€λμ· λΉμ¨", value=comInfo[6] , inline=True)
embed.add_field(name="μ 격거리", value=comInfo[7], inline=True)
embed.add_field(name="νκ· μμ‘΄μκ°", value=comInfo[8] , inline=True)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s FPP solo que information",
embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
if message.content.startswith("!λ°°κ·Έλμ€2"):
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !λ°°κ·Έμ€μΏΌλ : !λ°°κ·Έμ€μΏΌλ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
duoQueInfo = bs.find('section', {'class': "duo modeItem"}).find('div', {'class': "mode-section fpp"})
if duoQueInfo == None:
embed = discord.Embed(title="Record not found", description="Duo que record not found.",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
await message.channel.send("PUBG player " + playerNickname + "'s FPP duo que information",
embed=embed)
else:
# print(duoQueInfo)
# Get total playtime
duoQueTotalPlayTime = duoQueInfo.find('span', {'class': "time_played"}).text.strip()
# Get Win/Top10/Lose : [win,top10,lose]
duoQueGameWL = duoQueInfo.find('em').text.strip().split(' ')
# RankPoint
rankPoint = duoQueInfo.find('span', {'class': 'value'}).text
# Tier image url, tier
tierInfos = duoQueInfo.find('img', {
'src': re.compile('\/\/static\.dak\.gg\/images\/icons\/tier\/[A-Za-z0-9_.]')})
tierImage = "https:" + tierInfos['src']
tier = tierInfos['alt']
# Comprehensive info
comInfo = []
# [K/D,μΉλ₯ ,Top10,νκ· λλ,κ²μμ, μ΅λ€ν¬μ,ν€λμ·,μ 격거리,μμ‘΄,νκ· μμ]
for ci in duoQueInfo.findAll('p', {'class': 'value'}):
comInfo.append(''.join(ci.text.split()))
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server and total playtime",
value=seasonInfo[2] + " Server / Total playtime : " + duoQueTotalPlayTime,
inline=False)
embed.add_field(name="Tier(Rank Point)",
value=tier + " (" + rankPoint + "p)", inline=False)
embed.add_field(name="K/D", value=comInfo[0] , inline=True)
embed.add_field(name="μΉλ₯ ", value=comInfo[1], inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=comInfo[2], inline=True)
embed.add_field(name="νκ· λλ", value=comInfo[3], inline=True)
embed.add_field(name="κ²μμ", value=comInfo[4] + "ν", inline=True)
embed.add_field(name="μ΅λ€ν¬μ", value=comInfo[5] + "ν¬", inline=True)
embed.add_field(name="ν€λμ· λΉμ¨", value=comInfo[6] , inline=True)
embed.add_field(name="μ 격거리", value=comInfo[7] , inline=True)
embed.add_field(name="νκ· μμ‘΄μκ°", value=comInfo[8] , inline=True)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s FPP duo que information",
embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
if message.content.startswith("!λ°°κ·Έμ€μΏΌλ2"):
baseURL = "https://dak.gg/profile/"
playerNickname = ''.join((message.content).split(' ')[1:])
URL = baseURL + quote(playerNickname)
try:
html = urlopen(URL)
bs = BeautifulSoup(html, 'html.parser')
if len(message.content.split(" ")) == 1:
embed = discord.Embed(title="λλ€μμ΄ μ
λ ₯λμ§ μμμ΅λλ€", description="", color=0x5CD1E5)
embed.add_field(name="Player nickname not entered",
value="To use command !λ°°κ·Έμλ‘ : !λ°°κ·Έμλ‘ (Nickname)", inline=False)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("Error : Incorrect command usage ", embed=embed)
else:
accessors = bs.findAll('a', {'href': re.compile('\/statistics\/[A-Za-z]')})
# Season Information : ['PUBG',(season info),(Server),'overview']
seasonInfo = []
for si in bs.findAll('li', {'class': "active"}):
seasonInfo.append(si.text.strip())
serverAccessorAndStatus = []
# To prevent : Parsing Server Status, Make a result like Server:\nOnline. So I need to delete '\n'to get good result
for a in accessors:
serverAccessorAndStatus.append(re.sub(pattern='[\n]', repl="", string=a.text.strip()))
# Varaible serverAccessorAndStatus : [(accessors),(ServerStatus),(Don't needed value)]
squadQueInfo = bs.find('section', {'class': "squad modeItem"}).find('div',
{'class': "mode-section fpp"})
if squadQueInfo == None:
embed = discord.Embed(title="Record not found", description="Squad que record not found.",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
await message.channel.send("PUBG player " + playerNickname + "'s FPP squad que information",
embed=embed)
else:
# print(duoQueInfo)
# Get total playtime
squadQueTotalPlayTime = squadQueInfo.find('span', {'class': "time_played"}).text.strip()
# Get Win/Top10/Lose : [win,top10,lose]
squadQueGameWL = squadQueInfo.find('em').text.strip().split(' ')
# RankPoint
rankPoint = squadQueInfo.find('span', {'class': 'value'}).text
# Tier image url, tier
tierInfos = squadQueInfo.find('img', {
'src': re.compile('\/\/static\.dak\.gg\/images\/icons\/tier\/[A-Za-z0-9_.]')})
tierImage = "https:" + tierInfos['src']
tier = tierInfos['alt']
# Comprehensive info
comInfo = []
# [K/D,μΉλ₯ ,Top10,νκ· λλ,κ²μμ, μ΅λ€ν¬μ,ν€λμ·,μ 격거리,μμ‘΄,νκ· μμ]
for ci in squadQueInfo.findAll('p', {'class': 'value'}):
comInfo.append(''.join(ci.text.split()))
embed = discord.Embed(title="Player Unkonw Battle Ground player search from dak.gg", description="",
color=0x5CD1E5)
embed.add_field(name="Player search from dak.gg", value=URL, inline=False)
embed.add_field(name="Real Time Accessors and Server Status",
value="Accessors : " + serverAccessorAndStatus[0] + " | " "Server Status : " +
serverAccessorAndStatus[1].split(':')[-1], inline=False)
embed.add_field(name="Player located server",
value=seasonInfo[2] + " Server / Total playtime : " + squadQueTotalPlayTime,
inline=False)
embed.add_field(name="Tier(Rank Point)",
value=tier + " (" + rankPoint + "p)", inline=False)
embed.add_field(name="K/D", value=comInfo[0], inline=True)
embed.add_field(name="μΉλ₯ ", value=comInfo[1], inline=True)
embed.add_field(name="Top 10 λΉμ¨", value=comInfo[2], inline=True)
embed.add_field(name="νκ· λλ", value=comInfo[3], inline=True)
embed.add_field(name="κ²μμ", value=comInfo[4] + "ν", inline=True)
embed.add_field(name="μ΅λ€ν¬μ", value=comInfo[5] + "ν¬", inline=True)
embed.add_field(name="ν€λμ· λΉμ¨", value=comInfo[6] , inline=True)
embed.add_field(name="μ 격거리", value=comInfo[7], inline=True)
embed.add_field(name="νκ· μμ‘΄μκ°", value=comInfo[8], inline=True)
embed.set_footer(text='Service provided by Hoplin.',
icon_url='https://avatars2.githubusercontent.com/u/45956041?s=460&u=1caf3b112111cbd9849a2b95a88c3a8f3a15ecfa&v=4')
await message.channel.send("PUBG player " + playerNickname + "'s FPP squad que information",
embed=embed)
except HTTPError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
except AttributeError as e:
embed = discord.Embed(title="Not existing plyer",
description="Can't find player " + playerNickname + "'s information.\nPlease check player's nickname again",
color=0x5CD1E5)
await message.channel.send("Error : Not existing player", embed=embed)
client.run(token)
| 53,556 | 0 | 44 |
b8808619cc1183cfb2b684a8df56450fbbbcf6bc | 1,913 | py | Python | test_scripts/functional_tests/pool/close_pool_ledger_after_reopen_pool_test.py | hyperledger/indy-post-install-automation | a19cb3c66f0adea6bb4c1fc20e1509cc97bd3d5f | [
"Apache-2.0"
] | 2 | 2021-08-23T15:20:22.000Z | 2021-12-03T01:58:02.000Z | test_scripts/functional_tests/pool/close_pool_ledger_after_reopen_pool_test.py | hyperledger-archives/indy-post-install-automation | a19cb3c66f0adea6bb4c1fc20e1509cc97bd3d5f | [
"Apache-2.0"
] | 1 | 2018-02-22T10:04:41.000Z | 2018-02-22T10:04:41.000Z | test_scripts/functional_tests/pool/close_pool_ledger_after_reopen_pool_test.py | hyperledger/indy-post-install-automation | a19cb3c66f0adea6bb4c1fc20e1509cc97bd3d5f | [
"Apache-2.0"
] | 7 | 2018-01-03T20:45:48.000Z | 2019-08-12T11:02:31.000Z | """
Created on Dec 8, 2017
@author: nhan.nguyen
Verify that user can close a reopened pool ledger.
"""
from indy import pool
from utilities import utils
from utilities import common, constant
from test_scripts.functional_tests.pool.pool_test_base import PoolTestBase
import pytest
| 37.509804 | 79 | 0.590695 | """
Created on Dec 8, 2017
@author: nhan.nguyen
Verify that user can close a reopened pool ledger.
"""
from indy import pool
from utilities import utils
from utilities import common, constant
from test_scripts.functional_tests.pool.pool_test_base import PoolTestBase
import pytest
class TestCloseReopenedPoolLedgerConfig(PoolTestBase):
@pytest.mark.asyncio
async def test(self):
# 1. Create pool ledger config.
# 2. Open pool ledger.
self.pool_handle = await \
common.create_and_open_pool_ledger_for_steps(self.steps,
self.pool_name,
constant.
pool_genesis_txn_file)
# 3. Close pool ledger.
self.steps.add_step("Close pool ledger")
await utils.perform(self.steps, pool.close_pool_ledger,
self.pool_handle)
# 4. Reopen pool ledger.
self.steps.add_step("Reopen pool ledger")
self.pool_handle = await \
utils.perform(self.steps, pool.open_pool_ledger,
self.pool_name, None)
# 5. Close reopened pool ledger.
self.steps.add_step("Close reopened pool ledger")
result = await utils.perform(self.steps, pool.close_pool_ledger,
self.pool_handle, ignore_exception=True)
# 6. Verify that reopened pool ledger is closed successfully.
self.steps.add_step("Verify that reopened pool "
"ledger is closed successfully")
error_message = "Cannot close reopened pool ledger"
if utils.check(self.steps, error_message,
condition=lambda: result is None):
# prevent post-condition close pool ledger again.
self.pool_handle = None
| 1,521 | 85 | 23 |
5302279d5e263be1aa5efb48ba63ba98f8a24bf7 | 13,981 | py | Python | KerasMps.py | KanHarI/MatrixPowerSeries | d08f8761d32212f895a3cd0467c9a964015bc88e | [
"MIT"
] | null | null | null | KerasMps.py | KanHarI/MatrixPowerSeries | d08f8761d32212f895a3cd0467c9a964015bc88e | [
"MIT"
] | null | null | null | KerasMps.py | KanHarI/MatrixPowerSeries | d08f8761d32212f895a3cd0467c9a964015bc88e | [
"MIT"
] | null | null | null |
from keras.engine.topology import Layer
from keras import backend as K
import tensorflow as tf
import numpy as np
import random
import math
import ComplexTensor as ct
# This layer represets a power series
# a_0*I+a_1*X+a_2*X^2+...+a_n*X^n
# Where X is a complex input matrix, and the coefficients a_n are complex.
# The optimized weights of this layer are the coefficients.
# Input shape: [?(batch_size), 2(0=real\1=imag), n, n] (n is the size of input matrices)
# Output shape: same as input
MPS = MatrixPowerSeriesLayer
# This layer represets a power series
# A_0*I + A_1*X + A_2*X^2 + ... + A_n*X^n
# Where X is a complex input matrix, and the coefficients A_n are complex matrices.
# The optimized weights of this layer are the coefficients
MMPS = MatrixMPowerSeriesLayer
# This layer represets a power series
# A_0*I*B_0 + A_1*X*B_1 + A_2*X^2*B_2 + ... + A_n*X^n*B_n
# Where X is a complex input matrix, and the coefficients A_n, B_n are complex matrices.
# The optimized weights of this layer are the coefficients
MM2PS = MatrixM2PowerSeriesLayer
# This is the same as MatrixPowerSeriesLayer, only for multiple channels of input and output.
# Input shape: [?(batch_size), k(input channels), 2(0=real\1=imag), n, n]
# Output shape: [?(batch_size), j(output channel), k(input channels), 2(0=real\1=imag), n, n]
# Calculates the same computations for every input channel in parallel
MchMPS = MultichannelMatrixPowerSeriesLayer
# This is the same as MatrixMPowerSeriesLayer, only for multiple channels of input and output
MchMMPS = MultichannelMatrixMPowerSeriesLayer
# This is the same as MatrixM2PowerSeriesLayer, only for multiple channels of input and output
MchMM2PS = MultichannelMatrixM2PowerSeriesLayer
| 38.621547 | 118 | 0.609685 |
from keras.engine.topology import Layer
from keras import backend as K
import tensorflow as tf
import numpy as np
import random
import math
import ComplexTensor as ct
def factorial_decaying_random_init(shape):
# Decay by one over factorial, inspired by the taylor expansion of the exponent function
res = np.zeros(shape)
for i in range(shape[0]):
radius = random.random() / math.factorial(i)
theta = random.random() * 2 * math.pi
res[0] = radius*math.cos(theta)
res[1] = radius*math.sin(theta)
return res
# This layer represets a power series
# a_0*I+a_1*X+a_2*X^2+...+a_n*X^n
# Where X is a complex input matrix, and the coefficients a_n are complex.
# The optimized weights of this layer are the coefficients.
# Input shape: [?(batch_size), 2(0=real\1=imag), n, n] (n is the size of input matrices)
# Output shape: same as input
class MatrixPowerSeriesLayer(Layer):
def __init__(self, degree, **kwrags):
assert degree > 1
self.degree = degree
super().__init__(**kwrags)
def build(self, input_shape):
self.coefficients = self.add_weight(name='coefficients',
shape=(self.degree,2),
initializer=factorial_decaying_random_init,
trainable=True)
self.coefficients=ct.ComplexTensor(self.coefficients, split_axis=-1)
self.unit = K.eye(input_shape[2])
super().build(input_shape)
def call(self, x):
# Extract the real and imaginary parts of the input matrix
x = ct.ComplexTensor(x)
# tmp is used as the matrix raised to the n^th power
tmp = ct.ComplexTensor.unit_like(x)
# Initialize with the zeorth power of the series
res = tmp*self.coefficients[0]
for i in range(1, self.degree):
# Calculate a raise by one of the current power of the matrix
# Remainder: (a+bj)(c+dj)=(ac-bd)+(ad+bc)j, and the same
# formulas hold for matrices
# new_tmp_real = tf.einsum('ijk,ikl->ijl', tmp_real, x_real) - tf.einsum('ijk,ikl->ijl', tmp_imag, x_imag)
# tmp_imag = tf.einsum('ijk,ikl->ijl', tmp_real, x_imag) + tf.einsum('ijk,ikl->ijl', tmp_imag, x_real)
# tmp_real = new_tmp_real
tmp = ct.compEinsum('ijk,ikl->ijl', tmp, x)
# Update the result with the current element of the power series
res += tmp*self.coefficients[i]
# Unite real and complex parts
res = tf.stack([res.real, res.imag], axis=1)
return res
def compute_output_shape(self, input_shape):
return input_shape
MPS = MatrixPowerSeriesLayer
def factorial_decaying_random_initM(shape):
res = np.zeros(shape)
for i in range(shape[0]): # degree
# initiate matrix as a random multiple of the unit matrix
radius = random.random() / math.factorial(i)
theta = random.random() * 2 * math.pi
res[i,0,:,:] = radius * math.cos(theta) * np.identity(shape[2])
res[i,1,:,:] = radius * math.sin(theta) * np.identity(shape[2])
for k in range(shape[2]):
for l in range(shape[3]):
# Add noise to initial matrix
radius = random.random() / ((math.factorial(i)+1)**2)
theta = random.random() * 2 * math.pi
res[i,0,k,l] += radius*math.cos(theta)
res[i,1,k,l] += radius*math.sin(theta)
return res
def multi_factorial_decaying_random_initM(shape):
res = []
for i in range(shape[0]):
if len(shape[1:]) == 4:
res.append(factorial_decaying_random_initM(shape[1:]))
else:
res.append(multi_factorial_decaying_random_initM(shape[1:]))
return np.array(res)
# This layer represets a power series
# A_0*I + A_1*X + A_2*X^2 + ... + A_n*X^n
# Where X is a complex input matrix, and the coefficients A_n are complex matrices.
# The optimized weights of this layer are the coefficients
class MatrixMPowerSeriesLayer(Layer):
def __init__(self, degree, **kwrags):
assert degree > 1
self.degree = degree
super().__init__(**kwrags)
def build(self, input_shape):
self.lcoefficients = self.add_weight(name='lrcoefficients',
shape=(self.degree,*input_shape[1:]),
initializer=factorial_decaying_random_initM,
trainable=True)
self.coefficients = ct.ComplexTensor(self.lcoefficients)
self.unit = K.eye(input_shape[2])
super().build(input_shape)
def call(self, x):
coefficients = ct.ComplexTensor(self.lcoefficients)
x = ct.ComplexTensor(x)
tmp = ct.ComplexTensor.unit_like(x)
res = ct.compEinsum('jk,ikl->ijl', self.coefficients[0], tmp)
for i in range(1, self.degree):
tmp = ct.compEinsum('ijk,ikl->ijl', tmp, x)
# Multiply by left coefficient
res += ct.compEinsum('jk,ikl->ijl', self.coefficients[i], tmp)
res = tf.stack([res.real, res.imag], axis=1)
return res
def compute_output_shape(self, input_shape):
return input_shape
MMPS = MatrixMPowerSeriesLayer
# This layer represets a power series
# A_0*I*B_0 + A_1*X*B_1 + A_2*X^2*B_2 + ... + A_n*X^n*B_n
# Where X is a complex input matrix, and the coefficients A_n, B_n are complex matrices.
# The optimized weights of this layer are the coefficients
class MatrixM2PowerSeriesLayer(Layer):
def __init__(self, degree, **kwrags):
assert degree > 1
self.degree = degree
super().__init__(**kwrags)
def build(self, input_shape):
self.lrcoefficients = self.add_weight(name='lrcoefficients',
shape=(self.degree,2,*input_shape[1:]),
initializer=multi_factorial_decaying_random_initM,
trainable=True)
self.lcoefficients = ct.ComplexTensor(self.lrcoefficients[:,0])
self.rcoefficients = ct.ComplexTensor(self.lrcoefficients[:,1])
super().build(input_shape)
def call(self, x):
x = ct.ComplexTensor(x)
tmp = ct.ComplexTensor.unit_like(x)
# The unit matrix is "transperent" in matrix multiplication, therefore - there
# is no need for both left and right coefficients
res = ct.compEinsum('ijk,kl->ijl', tmp, self.rcoefficients[0])
for i in range(1, self.degree):
tmp = ct.compEinsum('ijk,ikl->ijl', tmp, x)
# Multiply by right coefficient
# Temporary results of right multiplication only to keep line degree managable
rmul = ct.compEinsum('ijk,kl->ijl', tmp, self.rcoefficients[i])
# Multiply by left coefficient and add to result
res += ct.compEinsum('jk,ikl->ijl', self.lcoefficients[i], rmul)
res = tf.stack([res.real, res.imag], axis=1)
return res
def compute_output_shape(self, input_shape):
return input_shape
MM2PS = MatrixM2PowerSeriesLayer
def multi_factorial_decaying_random_init(shape):
res = []
for i in range(shape[0]):
if len(shape[1:]) == 2:
res.append(factorial_decaying_random_init(shape[1:]))
else:
res.append(multi_factorial_decaying_random_init(shape[1:]))
return np.array(res)
# This is the same as MatrixPowerSeriesLayer, only for multiple channels of input and output.
# Input shape: [?(batch_size), k(input channels), 2(0=real\1=imag), n, n]
# Output shape: [?(batch_size), j(output channel), k(input channels), 2(0=real\1=imag), n, n]
# Calculates the same computations for every input channel in parallel
class MultichannelMatrixPowerSeriesLayer(Layer):
def __init__(self, degree, out_channels, **kwrags):
assert degree > 1
self.degree = degree
self.out_channels = out_channels
super().__init__(**kwrags)
def build(self, input_shape):
self.coefficients = self.add_weight(name='coefficients',
shape=(self.degree,self.out_channels,2),
initializer=multi_factorial_decaying_random_init,
trainable=True)
self.coefficients = ct.ComplexTensor(self.coefficients, split_axis=-1)
# self.coefficients is of shape [o,j]
super().build(input_shape)
def call(self, x):
# convention:
# i is batch size
# j is output channels
# k is input channels
# l is 0=real\1=complex
# m,n are elements of the input matrix
# o - degree of polynomial
x = ct.ComplexTensor(x)
# x is a tensor of dimension [i,k,m,n]
# tmp is used as the matrix raised to the n^th power
tmp = ct.ComplexTensor.unit_like(x)
# "tf.ones" is needed to raise dimension, this cannot be broadcasted later on
res = ct.compEinsum('j,ikmn->ijkmn', self.coefficients[0], tmp)
for o in range(1, self.degree):
tmp = ct.compEinsum('ikmt,iktn->ikmn', tmp, x)
# Update the result with the current element of the power series
res += ct.compEinsum('j,ikmn->ijkmn', self.coefficients[o], tmp)
# Unite real and complex parts
res = tf.stack([res.real, res.imag], axis=3)
return res
def compute_output_shape(self, input_shape):
return (input_shape[0], self.out_channels, *input_shape[1:])
MchMPS = MultichannelMatrixPowerSeriesLayer
# This is the same as MatrixMPowerSeriesLayer, only for multiple channels of input and output
class MultichannelMatrixMPowerSeriesLayer(Layer):
def __init__(self, degree, out_channels, **kwrags):
assert degree > 1
self.degree = degree
self.out_channels = out_channels
super().__init__(**kwrags)
def build(self, input_shape):
self.lcoefficients = self.add_weight(name='coefficients',
shape=(self.degree,self.out_channels,2,*input_shape[-2:]),
initializer=multi_factorial_decaying_random_initM,
trainable=True)
self.lcoefficients = ct.ComplexTensor(self.lcoefficients)
self.unit = K.eye(input_shape[-1])
super().build(input_shape)
def call(self, x):
# convention:
# i is batch size
# j is output channels
# k is input channels
# l is 0/1 - real/complex part
# m,n,t are elements of matrices
# o - degree of polynomial
# This element ordering allows intuitive broadcasting
x = ct.ComplexTensor(x)
# [i,k,m,n]
# tmp is used as the matrix raised to the n^th power
tmp = unit_like(x)
# "tf.ones" is needed to raise dimension, this cannot be broadcasted later on
res = ct.compEinsum('jmt,iktn->ijkmn', self.lcoefficients, tmp)
for o in range(1, self.degree):
tmp = ct.compEinsum('ikmt,iktn->ikmn', tmp, x)
# Update the result with the current element of the power series
res += ct.compEinsum('ikmt,jtn->ijkmn', tmp, self.lcoefficients)
# Unite real and complex parts
res = tf.stack([res.real, res.imag], axis=3)
return res
def compute_output_shape(self, input_shape):
return (input_shape[0], self.out_channels, *input_shape[1:])
MchMMPS = MultichannelMatrixMPowerSeriesLayer
# This is the same as MatrixM2PowerSeriesLayer, only for multiple channels of input and output
class MultichannelMatrixM2PowerSeriesLayer(Layer):
def __init__(self, degree, out_channels, **kwrags):
assert degree > 1
self.degree = degree
self.out_channels = out_channels
super().__init__(**kwrags)
def build(self, input_shape):
self.lrcoefficients = self.add_weight(name='coefficients',
shape=(self.degree,self.out_channels,2,2,*input_shape[-2:]),
initializer=multi_factorial_decaying_random_initM,
trainable=True)
self.lcoefficients = ct.ComplexTensor(self.lrcoefficients[:,:,0])
self.rcoefficients = ct.ComplexTensor(self.lrcoefficients[:,:,1])
self.unit = K.eye(input_shape[-1])
super().build(input_shape)
def call(self, x):
# convention:
# i is batch size
# j is output channels
# k is input channels
# l is 0/1 - real/complex part
# m,n,t are elements of matrices
# o - degree of polynomial
# This element ordering allows intuitive broadcasting
x = ct.ComplexTensor(x)
# x is a tensor of dimension [i,k,m,n]
tmp = ct.ComplexTensor.unit_like(x)
# On the 0th degree, there is no need for both left and right coefficient
# so the left coefficient is discarded
res = ct.compEinsum('ikmt,jtn', tmp, self.rcoefficients)
for o in range(1, self.degree):
tmp = ct.compEinsum('ikmt,iktn->ikmn', tmp, x)
# Multiply by right coefficient
# Temporary results of right multiplication only to keep line degree managable
rmul = ct.compEinsum('ikmt,jtn->ijkmn', tmp, self.rcoefficients[o])
res += ct.compEinsum('jmt,iktn->ijkmn', self.rcoefficients[o], rmul)
# Unite real and complex parts
res = tf.stack([res_real, res_imag], axis=3)
return res
def compute_output_shape(self, input_shape):
return (input_shape[0], self.out_channels, *input_shape[1:])
MchMM2PS = MultichannelMatrixM2PowerSeriesLayer
| 11,252 | 132 | 866 |
b0be46f19793dfd79ded724164d81438c56fa5b5 | 6,790 | py | Python | model_sandbox/model_nn_controller_service/train.py | surfertas/amr_core | 100ebaf149558611e1b406392ffd49e71dad9b69 | [
"MIT"
] | 4 | 2018-02-28T15:36:45.000Z | 2020-12-07T19:17:03.000Z | model_sandbox/model_nn_controller_service/train.py | surfertas/amr_core | 100ebaf149558611e1b406392ffd49e71dad9b69 | [
"MIT"
] | 2 | 2018-02-26T06:24:12.000Z | 2018-04-01T06:53:10.000Z | model_sandbox/model_nn_controller_service/train.py | surfertas/amr_core | 100ebaf149558611e1b406392ffd49e71dad9b69 | [
"MIT"
] | 1 | 2021-09-07T11:04:39.000Z | 2021-09-07T11:04:39.000Z | # @author Tasuku Miura
# @brief Training for controller to output steering and throttle commands given
# an image taken from a monocular camera. (Assumes CUDA enabled)
# python train.py --root-dir /home/ubuntu/ws/amr_core/model_sandbox/model_nn_controller_service/data
# put images and pickle file in ./data
# TODO: save model and reload model, test with ROS package
# http://pytorch.org/docs/master/notes/serialization.html#recommend-saving-models
import os
import pickle
import argparse
from skimage import io, transform
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils.data import Dataset, DataLoader, ConcatDataset
from torch.utils.data.sampler import SubsetRandomSampler
from torchvision import transforms, utils
from data_loader import *
from transforms import *
import models
import utils
# used for logging to TensorBoard
from tensorboard_logger import configure, log_value
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='NN Controller')
parser.add_argument('--root-dir', type=str, default='.',
help='path to root')
parser.add_argument('--ckpt-file-name', type=str, default='checkpoint.pth.tar',
help='name of checkpoint file')
parser.add_argument('--train-data', type=str, default='predictions.pickle',
help='filename containing train data')
parser.add_argument('--train-valid-split', type=float, default='0.2',
help='x% valid split')
parser.add_argument('--batch-size', type=int, default=32, metavar='N',
help='input batch size for training (default: 32)')
parser.add_argument('--valid-batch-size', type=int, default=32, metavar='N',
help='input batch size for validation (default: 32)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=0, metavar='S',
help='random seed (default: 0)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
args = parser.parse_args()
main(args)
| 35.736842 | 100 | 0.665832 | # @author Tasuku Miura
# @brief Training for controller to output steering and throttle commands given
# an image taken from a monocular camera. (Assumes CUDA enabled)
# python train.py --root-dir /home/ubuntu/ws/amr_core/model_sandbox/model_nn_controller_service/data
# put images and pickle file in ./data
# TODO: save model and reload model, test with ROS package
# http://pytorch.org/docs/master/notes/serialization.html#recommend-saving-models
import os
import pickle
import argparse
from skimage import io, transform
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils.data import Dataset, DataLoader, ConcatDataset
from torch.utils.data.sampler import SubsetRandomSampler
from torchvision import transforms, utils
from data_loader import *
from transforms import *
import models
import utils
# used for logging to TensorBoard
from tensorboard_logger import configure, log_value
def train_one_epoch(epoch, model, loss_fn, optimizer, train_loader):
model.train()
print("Epoch {} starting.".format(epoch))
epoch_loss = 0
for batch in train_loader:
data, target = batch['image'].cuda(), batch['commands'].cuda()
data = Variable(data).type(torch.cuda.FloatTensor)
target = Variable(target).type(torch.cuda.FloatTensor)
predict = model(data)
loss = loss_fn(predict, target)
optimizer.zero_grad()
loss.backward()
optimizer.step()
epoch_loss += loss.data[0]
epoch_loss /= len(train_loader.dataset)
print("Epoch {:.4f}: Train set: Average loss: {:.6f}\t".format(epoch, epoch_loss))
log_value('train_loss', epoch_loss, epoch)
def validate(epoch, model, loss_fn, optimizer, valid_loader):
model.eval()
valid_loss = 0
for batch in valid_loader:
data, target = batch['image'].cuda(), batch['commands'].cuda()
data = Variable(data, volatile=True).type(torch.cuda.FloatTensor)
target = Variable(target).type(torch.cuda.FloatTensor)
predict = model(data)
print("Predict: {} Target: {}".format(predict[0], target[0]))
valid_loss += loss_fn(predict, target).data[0] # sum up batch loss
valid_loss /= len(valid_loader.dataset)
print('Valid set: Average loss: {:.6f}\n'.format(valid_loss))
log_value('valid_loss', valid_loss, epoch)
return valid_loss
def main(args):
args.cuda = not args.no_cuda and torch.cuda.is_available()
# Set random seed to 0
np.random.seed(args.seed)
torch.manual_seed(args.seed)
# Set file paths.
ckpt_path = os.path.join(args.root_dir, 'output') # checkpoint.pth.tar')
log_path = os.path.join(args.root_dir, 'log')
utils.create_dir(ckpt_path)
utils.create_dir(log_path)
# Configure tensorboard log dir
configure(os.path.join(args.root_dir, 'log'))
train_pickle_file = args.train_data
# Get transforms
transforms = imagenet_transforms()
train_transforms = transforms['train_transforms']
pre_process = transforms['eval_transforms']
# Set Up data
train_data_aug = AMRControllerDataset(
train_pickle_file,
args.root_dir,
train_transforms
)
train_data_orig = AMRControllerDataset(
train_pickle_file,
args.root_dir,
pre_process
)
train_data = ConcatDataset([train_data_orig, train_data_aug])
print("Train data size: {}".format(len(train_data)))
# Create train and validation samplers
indices = list(range(len(train_data)))
n_train = int((1 - args.train_valid_split) * len(train_data))
train_sampler = SubsetRandomSampler(indices[:n_train])
valid_sampler = SubsetRandomSampler(indices[n_train:])
# Create data loader
train_loader = DataLoader(
train_data,
batch_size=args.batch_size,
sampler=train_sampler,
num_workers=4
)
valid_loader = DataLoader(
train_data,
batch_size=args.batch_size,
sampler=valid_sampler,
num_workers=4
)
# Initiate model.
model = models.ResNet18FE().cuda()
resume = False # set to false for now.
if resume:
print("Resuming from checkpoint")
ckpt = torch.load(os.path.join(ckpt_path, args.ckpt_file_name))
model.load_state_dict(ckpt['state_dict'])
# Set up optimizer and define loss function.
# If feature extraction mode, use model.fc.parameters(). Need to optimize
# parameters that are not frozen.
parameters = model.resnet18fe.fc.parameters()
optimizer = torch.optim.Adam(parameters)
loss_fn = nn.MSELoss()
print("Model setup...")
# Train and validate
best_valid_loss = float('inf')
for epoch in range(args.epochs):
train_one_epoch(epoch, model, loss_fn, optimizer, train_loader)
ave_valid_loss = validate(epoch, model, loss_fn, optimizer, valid_loader)
if ave_valid_loss < best_valid_loss:
best_valid_loss = ave_valid_loss
utils.save_checkpoint({
'epoch': epoch,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict()
}, os.path.join(ckpt_path, 'checkpoint.pth.tar'))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='NN Controller')
parser.add_argument('--root-dir', type=str, default='.',
help='path to root')
parser.add_argument('--ckpt-file-name', type=str, default='checkpoint.pth.tar',
help='name of checkpoint file')
parser.add_argument('--train-data', type=str, default='predictions.pickle',
help='filename containing train data')
parser.add_argument('--train-valid-split', type=float, default='0.2',
help='x% valid split')
parser.add_argument('--batch-size', type=int, default=32, metavar='N',
help='input batch size for training (default: 32)')
parser.add_argument('--valid-batch-size', type=int, default=32, metavar='N',
help='input batch size for validation (default: 32)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=0, metavar='S',
help='random seed (default: 0)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
args = parser.parse_args()
main(args)
| 4,196 | 0 | 69 |
a767d24f1a2a569232594f410ebb5cd20e83c0f1 | 202 | py | Python | python_synth/exceptions.py | jtschoonhoven/python-synth | b77422461b0965ea845834f56f9711f8a9e90b26 | [
"MIT"
] | null | null | null | python_synth/exceptions.py | jtschoonhoven/python-synth | b77422461b0965ea845834f56f9711f8a9e90b26 | [
"MIT"
] | null | null | null | python_synth/exceptions.py | jtschoonhoven/python-synth | b77422461b0965ea845834f56f9711f8a9e90b26 | [
"MIT"
] | null | null | null | class SynthError(Exception):
"""
Generic error raised by library.
"""
pass
class SynthValidationError(SynthError):
"""
Raised on attribute validation failure.
"""
pass
| 15.538462 | 43 | 0.633663 | class SynthError(Exception):
"""
Generic error raised by library.
"""
pass
class SynthValidationError(SynthError):
"""
Raised on attribute validation failure.
"""
pass
| 0 | 0 | 0 |
90bc1b1cdc5c031399541ab62ec7874f6ce72ff0 | 434 | py | Python | obsim/config/observing_bands.py | fjfaggingerauer/obsim | a5a9a48841fc13e822fbbfbbd0fb0fd0953cdd5a | [
"MIT"
] | null | null | null | obsim/config/observing_bands.py | fjfaggingerauer/obsim | a5a9a48841fc13e822fbbfbbd0fb0fd0953cdd5a | [
"MIT"
] | null | null | null | obsim/config/observing_bands.py | fjfaggingerauer/obsim | a5a9a48841fc13e822fbbfbbd0fb0fd0953cdd5a | [
"MIT"
] | null | null | null | __all__ = ['observing_bands']
import astropy.units as u
observing_bands = {
'U' : (365*u.nm, 66*u.nm),
'B' : (445*u.nm, 94*u.nm),
'G' : (464*u.nm, 128*u.nm),
'V' : (551*u.nm, 88*u.nm),
'R' : (658*u.nm, 138*u.nm),
'I' : (806*u.nm, 149*u.nm),
'Y' : (1020*u.nm, 120*u.nm),
'J' : (1220*u.nm, 213*u.nm),
'H' : (1630*u.nm, 307*u.nm),
'K' : (2190*u.nm, 390*u.nm),
'L' : (3450*u.nm, 472*u.nm),
}
| 24.111111 | 32 | 0.467742 | __all__ = ['observing_bands']
import astropy.units as u
observing_bands = {
'U' : (365*u.nm, 66*u.nm),
'B' : (445*u.nm, 94*u.nm),
'G' : (464*u.nm, 128*u.nm),
'V' : (551*u.nm, 88*u.nm),
'R' : (658*u.nm, 138*u.nm),
'I' : (806*u.nm, 149*u.nm),
'Y' : (1020*u.nm, 120*u.nm),
'J' : (1220*u.nm, 213*u.nm),
'H' : (1630*u.nm, 307*u.nm),
'K' : (2190*u.nm, 390*u.nm),
'L' : (3450*u.nm, 472*u.nm),
}
| 0 | 0 | 0 |
b52ab56e96428cb2e4a00578d5eae43fd5a5677f | 3,159 | py | Python | Adventure-Project/Main.py | MatthewGraber/Portfolio | 333237de1efd7de4465614e15c1285a23cab0be7 | [
"MIT"
] | null | null | null | Adventure-Project/Main.py | MatthewGraber/Portfolio | 333237de1efd7de4465614e15c1285a23cab0be7 | [
"MIT"
] | null | null | null | Adventure-Project/Main.py | MatthewGraber/Portfolio | 333237de1efd7de4465614e15c1285a23cab0be7 | [
"MIT"
] | null | null | null | import GlobalVariables
import Character, Enemies, Encounters
import AdventureMap
import Actions
import gui
import NPCs
import tkinter
import Conditions
# Will change to initialize to 'None' after more testing and whatnot is done
# GlobalVariables.PC = Character.Rogue()
# GlobalVariables.Enemy = Enemies.Chimera()
# Resets the game
# Each .py file should have a reset function that gets called here
if (__name__ == "__main__"):
main() | 33.967742 | 145 | 0.716049 | import GlobalVariables
import Character, Enemies, Encounters
import AdventureMap
import Actions
import gui
import NPCs
import tkinter
import Conditions
# Will change to initialize to 'None' after more testing and whatnot is done
# GlobalVariables.PC = Character.Rogue()
# GlobalVariables.Enemy = Enemies.Chimera()
# Resets the game
# Each .py file should have a reset function that gets called here
def resetAll():
pass
def restartGame():
resetAll()
beginGame()
def beginGame():
locations = ['Mountains', 'Swamp', 'Forest', 'City', 'Fortress']
conditions = Conditions.List()
GlobalVariables.conditionsList = conditions.allConditions
GlobalVariables.currentGems = 80
# mountains = AdventureMap.Map(3, 'Mountains')
# swamp = AdventureMap.Map(3, 'Swamp')
# forest = AdventureMap.Map(3, 'Forest')
# city = AdventureMap.Map(3, 'City')
# fortress = AdventureMap.Map(4, 'Fortress')
#GlobalVariables.FixedEncounters.append(Encounters.GenericBattle(Enemies.BlackPudding(), "A Black Pudding appears and slides towards you!"))
# GlobalVariables.FixedEncounters.append(Encounters.LoneChimera())
# Conditions.SetCondition('MetBob', True)
GlobalVariables.FixedEncounters.append(Encounters.EncounterInTheFog())
GlobalVariables.FixedEncounters.append(Encounters.RandomEncounter())
GlobalVariables.FixedEncounters.append(Encounters.ChasedByChimeras())
GlobalVariables.FixedEncounters.append(Encounters.WanderingMerchant())
GlobalVariables.FixedEncounters.append(Encounters.RandomEncounter())
GlobalVariables.FixedEncounters.append(Encounters.LookASign())
GlobalVariables.FixedEncounters.append(Encounters.GenericBattle(Enemies.Mage(), 'A possessed mage approaches.')) # Just wanted to test this.
GlobalVariables.FixedEncounters.append(Encounters.WanderingMerchant())
GlobalVariables.FixedEncounters.append(Encounters.ExploreYsmayArea())
GlobalVariables.FixedEncounters.append(Encounters.RandomEncounter())
# GlobalVariables.FixedEncounters.append(Encounters.LoneChimera())
GlobalVariables.FixedEncounters.append(Encounters.SearchingForFriends())
GlobalVariables.FixedEncounters.append(Encounters.SearchingForAliveFriends())
GlobalVariables.FixedEncounters.append(Encounters.RandomEncounter())
GlobalVariables.FixedEncounters.append(Encounters.BoblinLovesYou())
# GlobalVariables.FixedEncounters.append(Encounters.DualChimeras())
GlobalVariables.GUI = gui.gui()
GlobalVariables.GUI.startGUI()
def main():
beginGame()
# Eventually functions will be made for these button presses
# if (keyboard.w.GetPressed()):
# trigger2 = True
# else:
# trigger2 = Falses
# if (trigger2 and keyboard.w.GetPressed()):
# ui.ClearScreen()
# trigger2 = False
# Then (once they have been through all scenarios in Map 1),
# have them choose between the FIRST scenario for
# Game Map 2 and Game Map 3.
if (__name__ == "__main__"):
main() | 2,600 | 0 | 97 |
b8e65fdc7e4cc3f501a1c406a54c562cc11c3153 | 48 | py | Python | racecar_gym/bullet/__init__.py | luigiberducci/racecar_gym | fd2ff7fb14e9319530786ef54a4a6864bf1f1c26 | [
"MIT"
] | 16 | 2020-11-27T02:55:24.000Z | 2022-03-24T01:27:29.000Z | racecar_gym/bullet/__init__.py | luigiberducci/racecar_gym | fd2ff7fb14e9319530786ef54a4a6864bf1f1c26 | [
"MIT"
] | 5 | 2020-08-24T15:59:39.000Z | 2020-10-20T19:45:46.000Z | racecar_gym/bullet/__init__.py | luigiberducci/racecar_gym | fd2ff7fb14e9319530786ef54a4a6864bf1f1c26 | [
"MIT"
] | 4 | 2020-10-08T16:14:19.000Z | 2021-12-26T18:19:53.000Z | from .providers import load_world, load_vehicle
| 24 | 47 | 0.854167 | from .providers import load_world, load_vehicle
| 0 | 0 | 0 |
868fb40951fb9bcc5e105ed27a289b96298fe4cb | 2,171 | py | Python | src/pysem/sense.py | lingpy/pysen | 2b4cb66ed2d56f05a73ef4d4940cebf435e6e5a7 | [
"MIT"
] | 1 | 2020-08-17T06:52:04.000Z | 2020-08-17T06:52:04.000Z | src/pysem/sense.py | lingpy/pysem | 2b4cb66ed2d56f05a73ef4d4940cebf435e6e5a7 | [
"MIT"
] | 4 | 2021-06-02T18:31:23.000Z | 2022-01-13T19:22:28.000Z | src/pysem/sense.py | lingpy/pysen | 2b4cb66ed2d56f05a73ef4d4940cebf435e6e5a7 | [
"MIT"
] | null | null | null | """
Sense manipulations following the framework of the STARLING package.
"""
# import networkx as nx
from pysem.data import SENSE
from collections import defaultdict
| 31.014286 | 79 | 0.436205 | """
Sense manipulations following the framework of the STARLING package.
"""
# import networkx as nx
from pysem.data import SENSE
from collections import defaultdict
class Sense(object):
def __init__(self):
"""
Creates a sense graph upon initialization.
"""
G = {key: set() for key in SENSE}
for key, values in SENSE.items():
for value in values:
val = "s:" + value
if not val in G:
G[val] = set()
G[key].add(val)
G[val].add(key)
L = defaultdict(list)
for key in SENSE:
L[key] += [key]
if "(V)" in key:
L[key[:-4]] += [key]
if " " in key:
if "(V)" in key:
L[key.replace(" ", "")[:-3]] += [key]
elif key[-1].isdigit():
L[key[:-1].strip()] += [key]
L[key[:-1].replace(" ", "")] += [key]
else:
L[key.replace(" ", "")] += [key]
for k, vals in L.items():
L[k] = sorted(set(vals), key=lambda x: vals.count(x), reverse=True)
self.G = G
self.L = L
def sense(self, word):
"""
Return the senses of a word.
"""
out = []
for key in self.L[word]:
out += [(key, "; ".join(sorted(SENSE[key])))]
return out
def similar(self, word, threshold=2, maxitems=5):
"""
Search for similar items in the dataset.
"""
out = []
for key in self.L[word]:
neighbors = defaultdict(list)
for node in self.G[key]:
for next_node in self.G[node]:
if next_node != key:
neighbors[next_node] += [node]
for k, v in neighbors.items():
neighbors[k] = sorted(set(v))
for k, v in sorted(
neighbors.items(), key=lambda x: len(x[1]), reverse=True
):
if len(v) >= threshold:
out += [[key, k, "; ".join(v), len(v)]]
return out[:maxitems]
| 0 | 1,981 | 23 |
dc0e21f2d63875c49aabacfc838f0045189a5fef | 4,784 | py | Python | src/pages/SetNames.py | linjorejoy/json-testcase-generator | 872c5dd48e97746ee8f538c105611cb2395d7d22 | [
"MIT"
] | 1 | 2021-08-11T02:51:28.000Z | 2021-08-11T02:51:28.000Z | src/pages/SetNames.py | linjorejoy/json-testcase-generator | 872c5dd48e97746ee8f538c105611cb2395d7d22 | [
"MIT"
] | null | null | null | src/pages/SetNames.py | linjorejoy/json-testcase-generator | 872c5dd48e97746ee8f538c105611cb2395d7d22 | [
"MIT"
] | null | null | null | from tkinter import Frame, Label
from tkinter import StringVar
from tkinter import N, Y, SW, SE
from widgetclasses.MyLabelFrame import MyLabelFrame
from widgetclasses.MyOptionMenu import MyOptionMenu
from widgetclasses.MyButton import MyButton
from widgetclasses.MyLabel import MyLabel
from widgetclasses.DoubleScrolledFrame import DoubleScrolledFrame
from widgetclasses.MyEntry import MyEntry
from helpermodules.MyFonts import FONTS
import pages.ProcessVariables as ProcessVariables
import pages.PreviewVariables as PreviewVariables
import JSON_Test_Case_Generator
| 29.530864 | 101 | 0.57337 | from tkinter import Frame, Label
from tkinter import StringVar
from tkinter import N, Y, SW, SE
from widgetclasses.MyLabelFrame import MyLabelFrame
from widgetclasses.MyOptionMenu import MyOptionMenu
from widgetclasses.MyButton import MyButton
from widgetclasses.MyLabel import MyLabel
from widgetclasses.DoubleScrolledFrame import DoubleScrolledFrame
from widgetclasses.MyEntry import MyEntry
from helpermodules.MyFonts import FONTS
import pages.ProcessVariables as ProcessVariables
import pages.PreviewVariables as PreviewVariables
import JSON_Test_Case_Generator
class SetNames(Frame):
def __init__(self, parent, controller:JSON_Test_Case_Generator.JsonTestCaseTracker):
Frame.__init__(self, parent)
self.parent = parent
self.controller = controller
self.variables_for_dropdown = ["None", "Counter"]
self.widgets_added = []
self.header_label_frame = MyLabelFrame(
self,
controller,
text="Info",
height="50",
expand=N
)
test_label = Label(self.header_label_frame, text="Set Names", font = FONTS["LARGE_FONT"])
test_label.pack(padx=10, pady=10)
self.body_label_frame = MyLabelFrame(
self,
controller,
text="Body",
height="500",
expand=Y
)
self.body_scrollable = DoubleScrolledFrame(self.body_label_frame)
self.footer_label_frame = MyLabelFrame(
self,
controller,
text="Footer",
height="50",
expand=N
)
button_prev = MyButton(
self.footer_label_frame,
controller,
text="Go Back",
command=self.go_back,
rely=1,
relx=0,
x=5,
y=-5,
anchor=SW
)
button_next = MyButton(
self.footer_label_frame,
controller,
text="Preview Results",
command=self.goto_next,
rely=1.0,
relx=1.0,
x=-5,
y=-5,
anchor=SE
)
def set_ui(self):
self.destroy_existing()
self.set_widgets()
def destroy_existing(self):
for widget in self.widgets_added:
widget.destroy()
self.controller.reference_arr_for_name_gen = []
self.widgets_added = []
def set_widgets(self):
# print(f"The variables Present while generating Name : {self.controller.VARIABLES_PRESENT}")
self.variables_for_dropdown = ["None", "Counter", *self.controller.VARIABLES_PRESENT]
entry_0 = MyEntry(
self.body_scrollable,
self.controller,
grid=(0, 0)
)
self.widgets_added.append(entry_0)
self.controller.reference_arr_for_name_gen.append(entry_0)
for index in range(len(self.variables_for_dropdown)):
if not self.variables_for_dropdown:
self.variables_for_dropdown = [""]
plus_label_0 = MyLabel(
self.body_scrollable,
self.controller,
text="+",
font=FONTS['FONT_PLUS_SIGN'],
grid=(index, 1)
)
this_dropdown_var = StringVar()
this_dropdown_var.set(None)
this_dropdown = MyOptionMenu(
self.body_scrollable,
self.controller,
this_dropdown_var,
options=self.variables_for_dropdown,
grid=(index, 2),
padx=1,
pady=3
)
self.controller.reference_arr_for_name_gen.append(this_dropdown_var)
plus_label_1 = MyLabel(
self.body_scrollable,
self.controller,
text="+",
font=FONTS['FONT_PLUS_SIGN'],
grid=(index, 3)
)
entry_n = MyEntry(
self.body_scrollable,
self.controller,
grid=(index, 4),
padx=1,
pady=3
)
self.controller.reference_arr_for_name_gen.append(entry_n)
self.widgets_added.append(plus_label_0)
self.widgets_added.append(this_dropdown)
self.widgets_added.append(plus_label_1)
self.widgets_added.append(entry_n)
self.body_scrollable.pack(side="top", fill="both", expand=True)
def goto_next(self):
self.controller.show_frame(PreviewVariables.PreviewVariables)
# self.controller.frames[PreviewVariables.PreviewVariables].set_ui()
def go_back(self):
self.controller.go_back()
| 4,005 | 1 | 197 |
db5b3178214c83b369a1908d539a0187d4dfdc54 | 120 | py | Python | exercises/CursoemVideo/ex007.py | arthurguerra/cursoemvideo-python | 37f45ec25f422673fa9bbeee682e098f14d8ceab | [
"MIT"
] | null | null | null | exercises/CursoemVideo/ex007.py | arthurguerra/cursoemvideo-python | 37f45ec25f422673fa9bbeee682e098f14d8ceab | [
"MIT"
] | null | null | null | exercises/CursoemVideo/ex007.py | arthurguerra/cursoemvideo-python | 37f45ec25f422673fa9bbeee682e098f14d8ceab | [
"MIT"
] | null | null | null | n1 = float(input('Nota 1: '))
n2 = float(input('Nota 2: '))
m = (n1 + n2)/2
print('A mΓ©dia do aluno Γ© {:.2f}'.format(m)) | 30 | 44 | 0.566667 | n1 = float(input('Nota 1: '))
n2 = float(input('Nota 2: '))
m = (n1 + n2)/2
print('A mΓ©dia do aluno Γ© {:.2f}'.format(m)) | 0 | 0 | 0 |
6498d5fe8372397f7571e4984a1962dcb8798c9f | 6,956 | py | Python | test.py | yzyouzhang/AIR-ASVspoof | b26830d56d4baa6247a72955292ad9d2a336c6f6 | [
"MIT"
] | 40 | 2020-10-30T20:41:58.000Z | 2022-03-14T05:36:47.000Z | test.py | AirLabUR/AIR-ASVspoof | b26830d56d4baa6247a72955292ad9d2a336c6f6 | [
"MIT"
] | 13 | 2020-11-01T16:58:12.000Z | 2021-12-29T16:49:36.000Z | test.py | AirLabUR/AIR-ASVspoof | b26830d56d4baa6247a72955292ad9d2a336c6f6 | [
"MIT"
] | 18 | 2020-12-23T09:03:12.000Z | 2022-03-30T10:20:33.000Z | import argparse
import os
import torch
from torch.utils.data import DataLoader
import torch.nn as nn
import torch.nn.functional as F
from dataset import ASVspoof2019
from evaluate_tDCF_asvspoof19 import compute_eer_and_tdcf
from tqdm import tqdm
import eval_metrics as em
import numpy as np
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-m', '--model_dir', type=str, help="path to the trained model", default="./models/ocsoftmax")
parser.add_argument('-l', '--loss', type=str, default="ocsoftmax",
choices=["softmax", 'amsoftmax', 'ocsoftmax'], help="loss function")
parser.add_argument("--gpu", type=str, help="GPU index", default="0")
args = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
args.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
test(args.model_dir, args.loss, args.device)
# eer_cm_lst, min_tDCF_lst = test_individual_attacks(os.path.join(args.model_dir, 'checkpoint_cm_score.txt'))
# print(eer_cm_lst)
# print(min_tDCF_lst)
| 46.684564 | 133 | 0.636141 | import argparse
import os
import torch
from torch.utils.data import DataLoader
import torch.nn as nn
import torch.nn.functional as F
from dataset import ASVspoof2019
from evaluate_tDCF_asvspoof19 import compute_eer_and_tdcf
from tqdm import tqdm
import eval_metrics as em
import numpy as np
def test_model(feat_model_path, loss_model_path, part, add_loss, device):
dirname = os.path.dirname
basename = os.path.splitext(os.path.basename(feat_model_path))[0]
if "checkpoint" in dirname(feat_model_path):
dir_path = dirname(dirname(feat_model_path))
else:
dir_path = dirname(feat_model_path)
model = torch.load(feat_model_path, map_location="cuda")
# print(list(range(torch.cuda.device_count())))
# model = nn.DataParallel(model, list(range(torch.cuda.device_count()))) # for multiple GPUs
model = model.to(device)
loss_model = torch.load(loss_model_path) if add_loss != "softmax" else None
# if add_loss != "softmax":
# loss_model = nn.DataParallel(loss_model, list(range(torch.cuda.device_count())))
test_set = ASVspoof2019("LA", "/dataNVME/neil/ASVspoof2019LAFeatures/",
"/data/neil/DS_10283_3336/LA/ASVspoof2019_LA_cm_protocols/", part,
"LFCC", feat_len=750, padding="repeat")
testDataLoader = DataLoader(test_set, batch_size=32, shuffle=False, num_workers=0,
collate_fn=test_set.collate_fn)
model.eval()
with open(os.path.join(dir_path, 'checkpoint_cm_score.txt'), 'w') as cm_score_file:
for i, (lfcc, audio_fn, tags, labels) in enumerate(tqdm(testDataLoader)):
lfcc = lfcc.unsqueeze(1).float().to(device)
tags = tags.to(device)
labels = labels.to(device)
feats, lfcc_outputs = model(lfcc)
score = F.softmax(lfcc_outputs)[:, 0]
if add_loss == "ocsoftmax":
ang_isoloss, score = loss_model(feats, labels)
elif add_loss == "amsoftmax":
outputs, moutputs = loss_model(feats, labels)
score = F.softmax(outputs, dim=1)[:, 0]
for j in range(labels.size(0)):
cm_score_file.write(
'%s A%02d %s %s\n' % (audio_fn[j], tags[j].data,
"spoof" if labels[j].data.cpu().numpy() else "bonafide",
score[j].item()))
eer_cm, min_tDCF = compute_eer_and_tdcf(os.path.join(dir_path, 'checkpoint_cm_score.txt'),
"/data/neil/DS_10283_3336/")
return eer_cm, min_tDCF
def test(model_dir, add_loss, device):
model_path = os.path.join(model_dir, "anti-spoofing_lfcc_model.pt")
loss_model_path = os.path.join(model_dir, "anti-spoofing_loss_model.pt")
test_model(model_path, loss_model_path, "eval", add_loss, device)
def test_individual_attacks(cm_score_file):
asv_score_file = os.path.join('/data/neil/DS_10283_3336',
'LA/ASVspoof2019_LA_asv_scores/ASVspoof2019.LA.asv.eval.gi.trl.scores.txt')
# Fix tandem detection cost function (t-DCF) parameters
Pspoof = 0.05
cost_model = {
'Pspoof': Pspoof, # Prior probability of a spoofing attack
'Ptar': (1 - Pspoof) * 0.99, # Prior probability of target speaker
'Pnon': (1 - Pspoof) * 0.01, # Prior probability of nontarget speaker
'Cmiss_asv': 1, # Cost of ASV system falsely rejecting target speaker
'Cfa_asv': 10, # Cost of ASV system falsely accepting nontarget speaker
'Cmiss_cm': 1, # Cost of CM system falsely rejecting target speaker
'Cfa_cm': 10, # Cost of CM system falsely accepting spoof
}
# Load organizers' ASV scores
asv_data = np.genfromtxt(asv_score_file, dtype=str)
asv_sources = asv_data[:, 0]
asv_keys = asv_data[:, 1]
asv_scores = asv_data[:, 2].astype(np.float)
# Load CM scores
cm_data = np.genfromtxt(cm_score_file, dtype=str)
cm_utt_id = cm_data[:, 0]
cm_sources = cm_data[:, 1]
cm_keys = cm_data[:, 2]
cm_scores = cm_data[:, 3].astype(np.float)
other_cm_scores = -cm_scores
eer_cm_lst, min_tDCF_lst = [], []
for attack_idx in range(7,20):
# Extract target, nontarget, and spoof scores from the ASV scores
tar_asv = asv_scores[asv_keys == 'target']
non_asv = asv_scores[asv_keys == 'nontarget']
spoof_asv = asv_scores[asv_sources == 'A%02d' % attack_idx]
# Extract bona fide (real human) and spoof scores from the CM scores
bona_cm = cm_scores[cm_keys == 'bonafide']
spoof_cm = cm_scores[cm_sources == 'A%02d' % attack_idx]
# EERs of the standalone systems and fix ASV operating point to EER threshold
eer_asv, asv_threshold = em.compute_eer(tar_asv, non_asv)
eer_cm = em.compute_eer(bona_cm, spoof_cm)[0]
other_eer_cm = em.compute_eer(other_cm_scores[cm_keys == 'bonafide'], other_cm_scores[cm_sources == 'A%02d' % attack_idx])[0]
[Pfa_asv, Pmiss_asv, Pmiss_spoof_asv] = em.obtain_asv_error_rates(tar_asv, non_asv, spoof_asv, asv_threshold)
if eer_cm < other_eer_cm:
# Compute t-DCF
tDCF_curve, CM_thresholds = em.compute_tDCF(bona_cm, spoof_cm, Pfa_asv, Pmiss_asv, Pmiss_spoof_asv, cost_model,
True)
# Minimum t-DCF
min_tDCF_index = np.argmin(tDCF_curve)
min_tDCF = tDCF_curve[min_tDCF_index]
else:
tDCF_curve, CM_thresholds = em.compute_tDCF(other_cm_scores[cm_keys == 'bonafide'],
other_cm_scores[cm_sources == 'A%02d' % attack_idx],
Pfa_asv, Pmiss_asv, Pmiss_spoof_asv, cost_model, True)
# Minimum t-DCF
min_tDCF_index = np.argmin(tDCF_curve)
min_tDCF = tDCF_curve[min_tDCF_index]
eer_cm_lst.append(min(eer_cm, other_eer_cm))
min_tDCF_lst.append(min_tDCF)
return eer_cm_lst, min_tDCF_lst
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-m', '--model_dir', type=str, help="path to the trained model", default="./models/ocsoftmax")
parser.add_argument('-l', '--loss', type=str, default="ocsoftmax",
choices=["softmax", 'amsoftmax', 'ocsoftmax'], help="loss function")
parser.add_argument("--gpu", type=str, help="GPU index", default="0")
args = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
args.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
test(args.model_dir, args.loss, args.device)
# eer_cm_lst, min_tDCF_lst = test_individual_attacks(os.path.join(args.model_dir, 'checkpoint_cm_score.txt'))
# print(eer_cm_lst)
# print(min_tDCF_lst)
| 5,779 | 0 | 69 |
308b220de68d603d9b2db4d133efa6beac43dfb6 | 2,578 | py | Python | evaluation/utils.py | ETHmodlab/de_novo_design_RNN | 622628317c2ee6545eea9643767216904a7d7415 | [
"MIT"
] | 6 | 2022-01-02T12:49:17.000Z | 2022-02-27T17:10:48.000Z | evaluation/utils.py | molML/de_novo_design_RNN | e4e2dc1a791e0a7af92c8767cf6f12fb44f0f42c | [
"MIT"
] | null | null | null | evaluation/utils.py | molML/de_novo_design_RNN | e4e2dc1a791e0a7af92c8767cf6f12fb44f0f42c | [
"MIT"
] | 3 | 2021-03-16T19:05:05.000Z | 2021-11-11T07:24:42.000Z | # utilities to generate molecules with this repo.
# v1. Dec 2020, F. Grisoni
| 40.920635 | 169 | 0.687742 | # utilities to generate molecules with this repo.
# v1. Dec 2020, F. Grisoni
def make_config(model_type='BIMODAL', net_size=512, epochs=10, starting_point='random', fine_tuning='fine_tuning', n_sampling=1000, T_sampling=0.7,augmentation_level=1):
# writes the configuration file for fine-tuning depending on user-defined settings
# model (str): 'BIMODAL' or 'ForwardRNN'
# net_size (int): size of the network
# epochs (int): fine-tuning epochs
# start (str): 'random' or 'fixed'
# fine_tuning (str): name of the fine-tuning set
# n_sampling (int): molecules to sample for each fine-tuning epoch
# T_sampling (double): sampling temperature
import configparser
# name of the configuration file to use
reference_name = model_type + '_' + starting_point + '_' + str(net_size)
if augmentation_level == 1:
reference_name = model_type + '_' + starting_point + '_' + str(net_size)
else:
reference_name = model_type + '_' + starting_point + '_' + str(net_size) + '_aug_' + str(augmentation_level)
exp_name = reference_name + '_FineTuning'
# file to use as template
if model_type is 'BIMODAL':
template_name = 'BIMODAL_random_512_FineTuning_template.ini'
else:
template_name = 'ForwardRNN_512_FineTuning_template.ini'
# location of processed fine tuning set
fine_tuning_preprocessed = fine_tuning + '_' + model_type + '_' + starting_point
# reads the config file from the template
config = configparser.ConfigParser()
config.sections()
config.read('../experiments/' + template_name) # starts from one of the templates
# changes the fields based on the specified options
config['MODEL']['model'] = model_type
if model_type is 'BIMODAL':
config['MODEL']['hidden_units'] = str(net_size//4)
else:
config['MODEL']['hidden_units'] = str(net_size // 2)
# start writing the config file
config['DATA']['data'] = fine_tuning_preprocessed
config['TRAINING']['epochs'] = str(epochs)
config['EVALUATION']['samples'] = str(n_sampling)
config['EVALUATION']['temp'] = str(T_sampling)
# picks one of our pre-trained models that are provided in the repo.
# If the SMILES preprocessing changes, the pre-training has to be performed again
config['FINETUNING']['start_model'] = '../evaluation/' + reference_name + '/models/model_fold_1_epochs_9'
# writes back the new options
with open('../experiments/' + exp_name + '.ini', 'w') as configfile:
config.write(configfile)
return exp_name
| 2,477 | 0 | 23 |
340c5b3222ddc720364f924508c5460c1a49982b | 2,590 | py | Python | event_chain/app/models/mobile.py | ArcBlock/event-chain | 50a37c76ab094386fc66c985f4174f8dabc98ad5 | [
"MIT"
] | null | null | null | event_chain/app/models/mobile.py | ArcBlock/event-chain | 50a37c76ab094386fc66c985f4174f8dabc98ad5 | [
"MIT"
] | null | null | null | event_chain/app/models/mobile.py | ArcBlock/event-chain | 50a37c76ab094386fc66c985f4174f8dabc98ad5 | [
"MIT"
] | null | null | null | import base64
import json
import logging
import base58
from forge_sdk import protos as forge_protos
from forge_sdk import utils as forge_utils
logger = logging.getLogger('model-mobile')
| 31.585366 | 78 | 0.584942 | import base64
import json
import logging
import base58
from forge_sdk import protos as forge_protos
from forge_sdk import utils as forge_utils
logger = logging.getLogger('model-mobile')
class WalletResponse:
def __init__(self, response):
self.response = response
self.user_info = response.get('userInfo')
self.decoded_info = self.decode_user_info()
self.requested_claim = self.decoded_info.get('requestedClaims')[0]
def get_user_pk(self):
pk = self.response.get('userPk')
logger.debug("Got userpk from wallet {}".format(pk))
return forge_utils.multibase_b58decode(pk)
def decode_user_info(self):
if not self.user_info:
logger.error(
"Fail to parse user_info from this Response {}.".format(
self.response,
),
)
else:
sig = self.user_info.split('.')[1]
decoded = base64.urlsafe_b64decode(
(sig + '=' * (-len(sig) % 4)).encode(),
).decode()
dict = json.loads(decoded)
logger.debug("User info is decoded successfully. {}".format(dict))
return dict
def get_origin_tx(self):
origin = self.requested_claim.get('origin')
origin = str(origin)
logger.debug(
"Wallet Response:origin tx before decode: {}".format(origin),
)
decoded = base58.b58decode(origin[1:])
tx = forge_protos.Transaction()
tx.ParseFromString(decoded)
logger.debug(
"Wallet Response:origin tx after base58 decode: {}".format(tx),
)
return tx
def get_address(self):
did = self.decoded_info.get('iss')
logger.debug("Wallet Response:raw address: {}".format(did))
did = str(did)
return did.split(':')[-1]
def get_signature(self):
sig = self.requested_claim.get('sig')
logger.debug("Wallet Response:raw sig {}: ".format(sig))
str_sig = str(sig)
decoded_sig = base58.b58decode(str_sig[1:])
logger.debug(
"Wallet Response:sig after base58 decode: {}".format(
decoded_sig),
)
return decoded_sig
def get_asset_address(self):
asset_address = self.requested_claim.get('did')
if not asset_address:
return None
else:
asset_address = str(asset_address)
logger.debug(
"Wallet Response: asset_address: {}".format(asset_address),
)
return asset_address
| 2,189 | 0 | 211 |
3075f003af9b01017b421ee8ca69f17356c3e8a2 | 18,001 | py | Python | lampy/semantic.py | pyccel/lampy | 696f65218f251095054003e87f8c05324be02a29 | [
"MIT"
] | 1 | 2019-04-12T21:41:32.000Z | 2019-04-12T21:41:32.000Z | lampy/semantic.py | pyccel/lampy | 696f65218f251095054003e87f8c05324be02a29 | [
"MIT"
] | null | null | null | lampy/semantic.py | pyccel/lampy | 696f65218f251095054003e87f8c05324be02a29 | [
"MIT"
] | null | null | null | # coding: utf-8
import os
from os.path import join, dirname
from sympy import Symbol, Lambda, Function, Dummy
from sympy import Tuple, IndexedBase
from sympy.core.function import AppliedUndef
from sympy.core.function import UndefinedFunction
from sympy import Integer, Float
from sympy import sympify
from sympy import FunctionClass
from pyccel.codegen.utilities import random_string
from pyccel.ast.utilities import build_types_decorator
from pyccel.ast.datatypes import Int, Real, Complex, Bool
from pyccel.ast.core import Slice
from pyccel.ast.core import Variable, FunctionDef, Assign, AugAssign
from pyccel.ast.core import Return
from pyccel.ast.basic import Basic
from .datatypes import assign_type, BasicTypeVariable
from .datatypes import TypeVariable, TypeTuple, TypeList, TypeFunction
from .lexeme import _internal_map_functors
from .lexeme import _internal_functors
from .lexeme import _internal_zip_functions
from .lexeme import _internal_product_functions
from .lexeme import _internal_applications
from .lexeme import _elemental_math_functions
from .lexeme import _math_vector_functions
from .lexeme import _math_matrix_functions
from .lexeme import _math_functions
from .ast import Map, ProductMap, TensorMap, Zip, Product
from .ast import BasicReduce, AddReduce, MulReduce
from .ast import BasicMap
from .ast import PartialFunction
from .ast import LampyLambda
from .ast import FunctionSymbol
#=========================================================================
#=========================================================================
# TODO add some verifications before starting annotating L
| 30.355818 | 96 | 0.598745 | # coding: utf-8
import os
from os.path import join, dirname
from sympy import Symbol, Lambda, Function, Dummy
from sympy import Tuple, IndexedBase
from sympy.core.function import AppliedUndef
from sympy.core.function import UndefinedFunction
from sympy import Integer, Float
from sympy import sympify
from sympy import FunctionClass
from pyccel.codegen.utilities import random_string
from pyccel.ast.utilities import build_types_decorator
from pyccel.ast.datatypes import Int, Real, Complex, Bool
from pyccel.ast.core import Slice
from pyccel.ast.core import Variable, FunctionDef, Assign, AugAssign
from pyccel.ast.core import Return
from pyccel.ast.basic import Basic
from .datatypes import assign_type, BasicTypeVariable
from .datatypes import TypeVariable, TypeTuple, TypeList, TypeFunction
from .lexeme import _internal_map_functors
from .lexeme import _internal_functors
from .lexeme import _internal_zip_functions
from .lexeme import _internal_product_functions
from .lexeme import _internal_applications
from .lexeme import _elemental_math_functions
from .lexeme import _math_vector_functions
from .lexeme import _math_matrix_functions
from .lexeme import _math_functions
from .ast import Map, ProductMap, TensorMap, Zip, Product
from .ast import BasicReduce, AddReduce, MulReduce
from .ast import BasicMap
from .ast import PartialFunction
from .ast import LampyLambda
from .ast import FunctionSymbol
#=========================================================================
def sanitize(expr):
if isinstance(expr, Lambda):
args = expr.variables
expr = sanitize(expr.expr)
return Lambda(args, expr)
elif isinstance(expr, AppliedUndef):
name = expr.__class__.__name__
args = [sanitize(i) for i in expr.args]
# first argument of Map & Reduce are functions
if name in _internal_functors:
first = args[0]
if isinstance(first, Symbol):
args[0] = Function(first.name)
else:
return Function(name)(*args)
elif isinstance(expr, (int, float, Integer, Float, Symbol)):
return expr
else:
raise TypeError('Not implemented for {}'.format(type(expr)))
#=========================================================================
# TODO add some verifications before starting annotating L
class Parser(object):
def __init__(self, expr, **kwargs):
assert(isinstance(expr, Lambda))
self._expr = LampyLambda( expr )
# ...
self._d_types = {}
self._d_domain_types = {} # for each codomain we store its associated domain type
self._d_expr = {}
self._tag = random_string( 8 )
# TODO to be removed later?
self._d_functions = {}
# to store current typed expr
# this must not be a private variable,
# in order to modify it on the fly
self.main = self.expr
self.main_type = None
# ...
# ... add types for arguments and results
# TODO use domain and codomain optional args for functions
self._typed_functions = kwargs.pop('typed_functions', {})
for f in self.typed_functions.values():
type_domain = assign_type( f.arguments )
type_codomain = assign_type( f.results )
self._set_type( f, value = type_domain, domain = True )
self._set_type( f, value = type_codomain, codomain = True )
self._set_domain_type( type_domain, type_codomain )
self._insert_function( f, type_domain, type_codomain )
# ...
# ... default Type
prefix = kwargs.pop('prefix', 'd') # doubles as default
dtype = None
precision = None
if prefix == 'i':
dtype = Int
precision = 4
elif prefix == 's':
dtype = Real
precision = 4
elif prefix == 'd':
dtype = Real
precision = 8
elif prefix == 'c':
dtype = Complex
precision = 8
elif prefix == 'z':
dtype = Complex
precision = 16
else:
raise ValueError('Wrong prefix. Available: i, s, d, c, z')
var = Variable(dtype, 'dummy_' + self.tag, precision=precision)
self._default_type = TypeVariable(var)
# ...
# ... get all functions
functions = list(expr.atoms(FunctionSymbol))
for f in functions:
if f.name in _elemental_math_functions:
type_domain = self.default_type
type_codomain = self.default_type
self._set_type(f, value=type_domain, domain=True)
self._set_type(f, value=type_codomain, codomain=True)
self._set_domain_type(type_domain, type_codomain)
self._insert_function( str(f), type_domain, type_codomain )
elif not str(f) in list(_internal_applications) + list(self.typed_functions.keys()):
raise NotImplementedError('{} not available'.format(str(f)))
# ...
@property
def expr(self):
return self._expr
@property
def typed_functions(self):
return self._typed_functions
@property
def default_type(self):
return self._default_type
@property
def d_types(self):
return self._d_types
@property
def d_domain_types(self):
return self._d_domain_types
@property
def d_functions(self):
return self._d_functions
@property
def d_expr(self):
return self._d_expr
@property
def tag(self):
return self._tag
def inspect(self):
print(self.d_types)
for k,v in self.d_types.items():
print(' {k} = {v}'.format(k=k, v=v.view()))
print('')
print(self.d_domain_types)
for k,v in self.d_domain_types.items():
print(' {v} --> {k}'.format(k=k, v=v))
def _get_label(self, target, domain=False, codomain=False):
# TODO improve
if codomain:
assert(not domain)
if (isinstance(target, FunctionClass)):
name = str(target)
else:
name = str(target.name)
return name
if domain:
assert(not codomain)
if (isinstance(target, FunctionClass)):
name = str(target)
else:
name = str(target.name)
_avail_funcs = list(self.typed_functions.keys()) + _math_functions
if name in _avail_funcs:
return name + '_args'
if isinstance(target, FunctionDef):
return str(target.name) + '_args'
elif isinstance(target, UndefinedFunction):
return str(target)
elif isinstance(target, BasicMap):
return target.name
elif isinstance(target, Lambda):
if not hasattr(target, 'name'):
raise ValueError('Expecting an attribut name')
return target.name
elif isinstance(target, Symbol):
return target.name
else:
raise NotImplementedError('for {}'.format(type(target)))
def _get_type(self, target, domain=False, codomain=False):
label = self._get_label(target, domain=domain, codomain=codomain)
if label in self.d_types.keys():
return self.d_types[label]
return None
def _set_type(self, target, value, domain=False, codomain=False):
label = self._get_label(target, domain=domain, codomain=codomain)
self._d_types[label] = value
self._set_expr(value, target)
def _set_expr(self, t_var, expr):
self._d_expr[t_var.name] = expr
def _set_domain_type(self, type_domain, type_codomain):
self._d_domain_types[type_codomain] = type_domain
def _insert_function(self, f, type_domain, type_codomain):
# ...
if isinstance(f, FunctionDef):
f_name = str(f.name)
elif isinstance(f, str):
f_name = f
elif isinstance(f, PartialFunction):
f_name = str(f.name)
elif isinstance(f, Lambda):
if not hasattr(f, 'name'):
msg = 'Expecting {} to have a name'.format(f)
raise AttributeError(msg)
f_name = str(f.name)
else:
raise NotImplementedError('{} not available'.format(type(f)))
# ...
type_func = TypeFunction( type_domain, type_codomain )
self._d_functions[f_name] = type_func
def doit(self, verbose=False):
# ... compute type
i_count = 0
max_count = 2
while(i_count < max_count and not isinstance(self.main, BasicTypeVariable)):
if verbose:
print('----> BEFORE ', self.main)
self.main = self._visit(self.main)
if verbose:
print('<---- AFTER', self.main)
i_count += 1
# ...
return self.main
def _visit(self, stmt, value=None):
cls = type(stmt)
name = cls.__name__
method = '_visit_{}'.format(name)
if hasattr(self, method):
return getattr(self, method)(stmt, value=value)
elif name in self.d_functions.keys():
# application case
if not isinstance( stmt, AppliedUndef ):
raise TypeError('Expecting an application')
# ... in the case of a typed function, we check that the number of
# arguments is the same as the call
if name in self.typed_functions.keys():
f = self.typed_functions[name]
f_args = f.arguments
call_args = stmt.args
assert(len(call_args) == len(f_args))
# ...
# get the type of the function
type_func = self.d_functions[name]
return type_func.codomain
# Unknown object, we raise an error.
raise TypeError('{node} not yet available'.format(node=type(stmt)))
def _visit_Lambda(self, stmt, value=None):
# TODO treat args
# ... treat the expression of the lambda
expr = self._visit(stmt.expr)
# ...
# ...
if isinstance( stmt.expr, AppliedUndef ):
func_name = stmt.expr.__class__.__name__
elif isinstance( stmt.expr, PartialFunction ):
func_name = stmt.expr.name
else:
msg = '{} not available yet'.format(type(stmt.expr))
raise NotImplementedError(msg)
# ...
# ...
type_func = self.d_functions[func_name]
type_domain = type_func.domain
type_codomain = type_func.codomain
# ...
# ...
self._insert_function( stmt, type_domain, type_codomain )
# ...
return type_codomain
def _visit_LampyLambda(self, stmt, value=None):
self.main = self._visit(stmt.func.expr)
if isinstance(self.main, BasicTypeVariable):
self.main_type = self.main
return self.main
def _visit_TypeVariable(self, stmt, value=None):
return stmt
def _visit_TypeTuple(self, stmt, value=None):
return stmt
def _visit_TypeList(self, stmt, value=None):
return stmt
def _visit_Symbol(self, stmt, value=None):
assert(not( value is None ))
self._set_type(stmt, value)
def _visit_Map(self, stmt, value=None):
func = stmt.func
target = stmt.target
target = Zip(*target)
type_codomain = self._get_type(func, codomain=True)
type_domain = self._get_type(func, domain=True)
if not type_codomain:
expr = self._visit(func)
type_func = self.d_functions[func.name]
type_domain = type_func.domain
type_codomain = type_func.codomain
if not type_codomain:
print('> Unable to compute type for {} '.format(stmt))
raise NotImplementedError('')
type_domain = TypeList(type_domain)
type_codomain = TypeList(type_codomain)
self._set_domain_type(type_domain, type_codomain)
self._insert_function( stmt.name, type_domain, type_codomain )
self._visit(target, value=type_domain)
self._set_expr(type_codomain, stmt)
return type_codomain
def _visit_ProductMap(self, stmt, value=None):
func = stmt.func
target = stmt.target
target = Product(*target)
type_codomain = self._get_type(func, codomain=True)
type_domain = self._get_type(func, domain=True)
if not type_codomain:
print('> Unable to compute type for {} '.format(stmt))
raise NotImplementedError('')
type_domain = TypeList(type_domain)
type_codomain = TypeList(type_codomain)
self._set_domain_type(type_domain, type_codomain)
self._insert_function( stmt.name, type_domain, type_codomain )
self._visit(target, value=type_domain)
self._set_expr(type_codomain, stmt)
return type_codomain
def _visit_TensorMap(self, stmt, value=None):
func = stmt.func
target = stmt.target
target = Product(*target)
type_codomain = self._get_type(func, codomain=True)
type_domain = self._get_type(func, domain=True)
if not type_codomain:
print('> Unable to compute type for {} '.format(stmt))
raise NotImplementedError('')
# TODO check that rank is the same for all domain
for i in range(0, len(target.arguments)):
type_domain = TypeList(type_domain)
type_codomain = TypeList(type_codomain)
self._set_domain_type(type_domain, type_codomain)
self._insert_function( stmt.name, type_domain, type_codomain )
self._visit(target, value=type_domain)
self._set_expr(type_codomain, stmt)
return type_codomain
def _visit_Zip(self, stmt, value=None):
arguments = stmt.arguments
assert(not( value is None ))
assert(isinstance(value, TypeList))
# ...
if isinstance(value.parent, TypeVariable):
values = [value.parent]
elif isinstance(value.parent, TypeTuple):
values = value.types.types
elif isinstance(value.parent, TypeList):
values = [value.parent]
else:
msg = '{} not available yet'.format(type(value.parent))
raise NotImplementedError(msg)
# ...
# ...
for a,t in zip(arguments, values):
type_domain = TypeList(t)
self._visit(a, value=type_domain)
# ...
type_codomain = value
self._set_domain_type(value, type_codomain)
self._insert_function( stmt.name, value, type_codomain )
# # update main expression
# print(self.main)
# print(stmt)
# print(type_codomain)
## import sys; sys.exit(0)
# self.main = self.main.xreplace({stmt: type_codomain})
self._set_expr(type_codomain, stmt)
return type_codomain
def _visit_Product(self, stmt, value=None):
arguments = stmt.arguments
assert(not( value is None ))
assert(isinstance(value, TypeList))
# # TODO add this check only when using tmap
# assert(len(value) == len(arguments))
values = value.types.types
for a,t in zip(arguments, values):
type_domain = TypeList(t)
self._visit(a, value=type_domain)
type_codomain = value
self._set_domain_type(value, type_codomain)
self._insert_function( stmt.name, value, type_codomain )
# update main expression
self.main = self.main.xreplace({stmt: type_codomain})
self._set_expr(type_codomain, stmt)
return type_codomain
def _visit_AddReduce(self, stmt, value=None):
return self._visit_Reduce( stmt, value=value, op='+' )
def _visit_MulReduce(self, stmt, value=None):
return self._visit_Reduce( stmt, value=value, op='*' )
def _visit_Reduce(self, stmt, value=None, op=None):
target = stmt.target
type_codomain = self._visit(target)
assert( isinstance( type_codomain, TypeList ) )
type_codomain = type_codomain.types
type_domain = self.d_domain_types[type_codomain]
type_domain = TypeList(type_domain)
type_codomain = type_codomain.duplicate()
self._set_domain_type(type_domain, type_codomain)
self._insert_function( stmt.name, type_domain, type_codomain )
self._visit(target, value=type_domain)
self._set_expr(type_codomain, stmt)
return type_codomain
def _visit_PartialFunction(self, stmt, value=None):
func = stmt.func
target = stmt.target
# ...
if not func.name in self.typed_functions.keys():
raise ValueError('{} is not a typed function'.format(func.name))
funcdef = self.typed_functions[func.name]
func_args = funcdef.arguments
stmt.set_definition(funcdef)
# ...
# ... get the codomain of the function
if not func.name in self.d_functions.keys():
raise ValueError('{} type not available'.format(func.name))
t_func = self.d_functions[func.name]
type_codomain = t_func.codomain
# ...
# ...
target_arg_names = [i.name for i in list(target.keys())]
newargs = [i for i in func_args if not i.name in target_arg_names]
# ...
# ... assign domain type from new arguments
type_domain = assign_type( newargs )
# ...
# ...
self._insert_function( stmt, type_domain, type_codomain )
# ...
return type_codomain
| 15,264 | 1,003 | 44 |
556ae4f320a23cf3338b934cc7b8fefbdd18b58d | 78 | py | Python | djangosqs/apps/website/tests/test_py.py | codehutlabs/django_sqs | 7ff113cec7017fef09a92f6797153fd9207df5fc | [
"MIT"
] | null | null | null | djangosqs/apps/website/tests/test_py.py | codehutlabs/django_sqs | 7ff113cec7017fef09a92f6797153fd9207df5fc | [
"MIT"
] | 9 | 2019-09-06T11:14:20.000Z | 2021-12-13T20:12:17.000Z | djangosqs/apps/website/tests/test_py.py | codehutlabs/django_sqs | 7ff113cec7017fef09a92f6797153fd9207df5fc | [
"MIT"
] | null | null | null | import pytest
| 11.142857 | 45 | 0.576923 | import pytest
def test_py():
assert 1 + 1 == 2, "1 + 1 should equal 2"
| 40 | 0 | 23 |
baa49fa64ab53b5b85149b7b0e2a33494d10098b | 2,604 | py | Python | process.py | ervbrian/GpxProcessor | 4f15ea0548b6d601b4eaaa3d38888b20e9c9ae28 | [
"MIT"
] | null | null | null | process.py | ervbrian/GpxProcessor | 4f15ea0548b6d601b4eaaa3d38888b20e9c9ae28 | [
"MIT"
] | null | null | null | process.py | ervbrian/GpxProcessor | 4f15ea0548b6d601b4eaaa3d38888b20e9c9ae28 | [
"MIT"
] | null | null | null | import argparse
import os
from multiprocessing.pool import ThreadPool
from utils.backend import HikeDBClient, update_db
from utils.combine_gpx import CombineGpx
from utils.gpx_import import GpxImport
from utils.plotting import plot_elevation, plot_coordinates, plot_heart_rate
from utils.report import render_html
GPX = "GPX"
if __name__ == "__main__":
main()
| 37.2 | 124 | 0.682028 | import argparse
import os
from multiprocessing.pool import ThreadPool
from utils.backend import HikeDBClient, update_db
from utils.combine_gpx import CombineGpx
from utils.gpx_import import GpxImport
from utils.plotting import plot_elevation, plot_coordinates, plot_heart_rate
from utils.report import render_html
GPX = "GPX"
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--path", action="store", help="list of files to process")
parser.add_argument("-r", "--render_only", action="store_true", default=False, help="render html only")
parser.add_argument("-c", "--combine", action="store", nargs='+', required=False, help="list of input files to combine")
return parser.parse_args()
def main():
args = parse_args()
client = HikeDBClient()
# TODO Separate processing logic into separate methods
if not args.render_only:
file_list = [filename for filename in os.listdir(args.path) if filename.endswith(GPX)]
print(f"Found {len(file_list)} GPX files in path...")
if args.combine:
print(f"Combining the following: {args.combine}")
full_paths = [f"{args.path}{filename}" for filename in args.combine]
combined_filename = os.path.join(f"{args.combine[0]}_combined.GPX")
CombineGpx(gpx_files=full_paths).write_to_file(args.path + combined_filename)
file_list.append(combined_filename)
for file in args.combine:
file_list.remove(file)
print(f"Removed {file} from processing in favor of {combined_filename}")
file_list = client.filter_populated_hikes(file_list)
print(f"Processing {len(file_list)} files after removing hikes already populated in HikeDB")
hike_list = []
for filename in file_list:
print(f"Processing {filename}")
processed_gpx = GpxImport(filename=os.path.join(args.path, filename))
hike_list.append(processed_gpx.hike)
print("Generating plot graphs")
for hike in hike_list:
plot_elevation(hike)
plot_coordinates(hike)
plot_heart_rate(hike)
print("Populating database")
update_db(client=client, hikes=hike_list)
print(f"Total hikes stored in HikeDB database: {client.entry_count}")
render_html(
hikes=client.show_all_hikes(),
coordinates={hike.name: client.show_all_points_for_hike(hike.name) for hike in client.show_all_hikes()})
print("Generated HTML page: html/index.html")
if __name__ == "__main__":
main()
| 2,187 | 0 | 46 |
eb5fe9d3e1d1c2e581c61e80ef61a5d0758ce9a1 | 375 | py | Python | python/processing/__init__.py | rwightman/pytorch-nips2017-adversarial | 1727494ea3bfcbc3b4754b35096e816e1269ff38 | [
"Apache-2.0"
] | 17 | 2018-02-05T15:09:01.000Z | 2022-03-15T06:27:07.000Z | python/processing/__init__.py | rwightman/pytorch-nips2017-adversarial | 1727494ea3bfcbc3b4754b35096e816e1269ff38 | [
"Apache-2.0"
] | 1 | 2019-03-03T05:30:38.000Z | 2019-03-08T04:44:39.000Z | python/processing/__init__.py | rwightman/pytorch-nips2017-adversarial | 1727494ea3bfcbc3b4754b35096e816e1269ff38 | [
"Apache-2.0"
] | 2 | 2019-07-26T07:17:09.000Z | 2019-10-16T03:44:02.000Z | from .augmentation_factory import *
from .blur import Blur, RandomBlur, RandomGaussianBlur
from .mirror import Mirror, RandomMirror
from .crop import RandomCrop, CentreCrop
from .normalize import Normalize, NormalizeDpn, NormalizeLe, NormalizeTorchvision
from .colour import RandomBrightnessContrast, RandomSaturation
from .shift import RandomShift
from .affine import Affine | 46.875 | 81 | 0.850667 | from .augmentation_factory import *
from .blur import Blur, RandomBlur, RandomGaussianBlur
from .mirror import Mirror, RandomMirror
from .crop import RandomCrop, CentreCrop
from .normalize import Normalize, NormalizeDpn, NormalizeLe, NormalizeTorchvision
from .colour import RandomBrightnessContrast, RandomSaturation
from .shift import RandomShift
from .affine import Affine | 0 | 0 | 0 |
ac31d083edca47c913001aa39d76107ee30d56cf | 865 | py | Python | tales/systems/system.py | MrTrustworthy/tales | 4a03bf502ad47a9cd720ebbffa3dcc72005a84e7 | [
"MIT"
] | null | null | null | tales/systems/system.py | MrTrustworthy/tales | 4a03bf502ad47a9cd720ebbffa3dcc72005a84e7 | [
"MIT"
] | null | null | null | tales/systems/system.py | MrTrustworthy/tales | 4a03bf502ad47a9cd720ebbffa3dcc72005a84e7 | [
"MIT"
] | null | null | null | from abc import abstractmethod
from enum import Enum
from typing import List, Type
from tales.components import Component
from tales.entities.entity import Entity
from tales.world import World
| 27.903226 | 90 | 0.695954 | from abc import abstractmethod
from enum import Enum
from typing import List, Type
from tales.components import Component
from tales.entities.entity import Entity
from tales.world import World
class SystemType(Enum):
GAMEPLAY = "Gameplay"
RENDERING = "Rendering"
class System:
COMPONENTS: List[Type[Component]] = []
TYPE: SystemType = SystemType.GAMEPLAY
def __init__(self, world: World):
self.world: World = world
self.system_type = self.TYPE # is set on the subclass
self.components: List[Type[Component]] = self.COMPONENTS # is set on the subclass
def update_all(self, *args, **kwargs):
for entity in self.world.get_entities_with_components(self.components):
self.update(entity, *args, **kwargs)
@abstractmethod
def update(self, entity: Entity, *args, **kwargs):
pass
| 388 | 235 | 46 |
2cc2d98d8fc65e515f91ef0809443ec0dc4e83f9 | 9,470 | py | Python | src/ugentaggregates/tests/test_aggregates.py | UGentPortaal/django-ugentaggregates | 7b7d1db1a4ae23f85be0dca2a020dc820a166cd7 | [
"BSD-3-Clause"
] | null | null | null | src/ugentaggregates/tests/test_aggregates.py | UGentPortaal/django-ugentaggregates | 7b7d1db1a4ae23f85be0dca2a020dc820a166cd7 | [
"BSD-3-Clause"
] | null | null | null | src/ugentaggregates/tests/test_aggregates.py | UGentPortaal/django-ugentaggregates | 7b7d1db1a4ae23f85be0dca2a020dc820a166cd7 | [
"BSD-3-Clause"
] | null | null | null | import unittest
class AggregateTestCase(unittest.TestCase):
"""Tests for class ``Aggregate``.
"""
@classmethod
def setUpClass(cls):
"""Set up the class fixture.
"""
from ugentaggregates.aggregates import Aggregate
cls.Aggregate = Aggregate
def setUp(self):
"""Set up the fixture.
"""
self.aggr1 = self.Aggregate()
def tearDown(self):
"""Tear down the fixture.
"""
self.aggr1 = None
def test_creation(self):
"""Test for creating an ``Aggregate`` object.
"""
self.assertTrue(isinstance(self.aggr1, self.Aggregate))
def test_callable(self):
"""Test for calling an ``Aggregate`` object.
"""
with self.assertRaises(NotImplementedError):
self.aggr1("field1", [])
class FirstAggregateTestCase(unittest.TestCase):
"""Tests for class ``FirstAggregate``.
"""
@classmethod
def setUpClass(cls):
"""Set up the class fixture.
"""
from ugentaggregates.aggregates import Aggregate
from ugentaggregates.aggregates import FirstAggregate
from ugentaggregates.aggregates import NO_DEFAULT
cls.Aggregate = Aggregate
cls.FirstAggregate = FirstAggregate
cls.NO_DEFAULT = NO_DEFAULT
def test_creation(self):
"""Test for creating a ``FirstAggregate`` object.
"""
aggr1 = self.FirstAggregate()
self.assertTrue(isinstance(aggr1, self.FirstAggregate))
def test_inheritance(self):
"""Test for the inheritance of class ``FirstAggregate``.
"""
self.assertTrue(issubclass(self.FirstAggregate, self.Aggregate))
def test_callable(self):
"""Test for calling a ``FirstAggregate`` object.
"""
aggr1 = self.FirstAggregate()
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
1)
self.assertEqual(aggr1("field1", [{"field2": 1},
{"field1": 2}]),
2)
def test_missing(self):
"""Test for calling a ``FirstAggregate`` object for a missing
attribute.
"""
aggr1 = self.FirstAggregate()
with self.assertRaises(AttributeError):
aggr1("field1", [{"field2": 1},
{"field2": 2}])
def test_empty(self):
"""Test for calling a ``FirstAggregate`` object with an empty list.
"""
aggr1 = self.FirstAggregate()
with self.assertRaises(AttributeError):
aggr1("field1", [])
def test_name(self):
"""Test for the attribute ``name`.
"""
aggr1 = self.FirstAggregate(name="field1")
self.assertEqual(aggr1.name, "field1")
self.assertEqual(aggr1("field2", [{"field1": 1,
"field2": 2}]),
1)
def test_valid(self):
"""Test for the attribute ``valid``.
"""
is_even = lambda i: i % 2 == 0
aggr1 = self.FirstAggregate(valid=is_even)
self.assertEqual(aggr1.valid, is_even)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
2)
self.assertEqual(aggr1("field1", [{"field1": 2},
{"field1": 4}]),
2)
def test_format(self):
"""Test for the attribute ``format``.
"""
double = lambda i: i * 2
aggr1 = self.FirstAggregate(format=double)
self.assertEqual(aggr1.format, double)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
2)
def test_default(self):
"""Test for the attribute ``default``.
"""
# Without default.
aggr1 = self.FirstAggregate()
self.assertEqual(aggr1.default, self.NO_DEFAULT)
# With default.
aggr2 = self.FirstAggregate(default=-1)
self.assertEqual(aggr2.default, -1)
self.assertEqual(aggr2("field1", []),
-1)
self.assertEqual(aggr2("field1", [{"field2": 1},
{"field2": 2}]),
-1)
def test_callable_value(self):
"""Test for callable values.
"""
aggr1 = self.FirstAggregate()
self.assertEqual(aggr1("field1", [{"field1": lambda: 1},
{"field1": 2}]),
1)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": lambda: 2}]),
1)
class AllAggregateTestCase(unittest.TestCase):
"""Tests for class ``AllAggregate``.
"""
@classmethod
def setUpClass(cls):
"""Set up the class fixture.
"""
from ugentaggregates.aggregates import Aggregate
from ugentaggregates.aggregates import AllAggregate
cls.Aggregate = Aggregate
cls.AllAggregate = AllAggregate
def test_creation(self):
"""Test for creating a ``AllAggregate`` object.
"""
aggr1 = self.AllAggregate()
self.assertTrue(isinstance(aggr1, self.AllAggregate))
def test_inheritance(self):
"""Test for the inheritance of class ``AllAggregate``.
"""
self.assertTrue(issubclass(self.AllAggregate, self.Aggregate))
def test_callable(self):
"""Test for calling an ``AllAggregate`` object.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
[1, 2])
self.assertEqual(aggr1("field1", [{"field2": 1},
{"field1": 2}]),
[2])
def test_list(self):
"""Test for calling an ``AllAggregate`` object with list
attributes.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field1": [1, 2]},
{"field1": [3, 4]}]),
[1, 2, 3, 4])
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": [2, 3]}]),
[1, 2, 3])
self.assertEqual(aggr1("field1", [{"field1": [1, 2]},
{"field1": 3}]),
[1, 2, 3])
def test_missing(self):
"""Test for calling an ``AllAggregate`` object for a missing
attribute.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field2": 1},
{"field2": 2}]),
[])
def test_empty(self):
"""Test for calling an ``AllAggregate`` object with an empty list.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", []),
[])
def test_name(self):
"""Test for the attribute ``name`.
"""
aggr1 = self.AllAggregate(name="field1")
self.assertEqual(aggr1.name, "field1")
self.assertEqual(aggr1("field2", [{"field1": 1, "field2": 2},
{"field1": 3, "field2": 4}]),
[1, 3])
def test_valid(self):
"""Test for the attribute ``valid``.
"""
is_even = lambda i: i % 2 == 0
aggr1 = self.AllAggregate(valid=is_even)
self.assertEqual(aggr1.valid, is_even)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
[2])
self.assertEqual(aggr1("field1", [{"field1": 2},
{"field1": 4}]),
[2, 4])
def test_format(self):
"""Test for the attribute ``format``.
"""
double = lambda i: i * 2
aggr1 = self.AllAggregate(format=double)
self.assertEqual(aggr1.format, double)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
[2, 4])
def test_unique(self):
"""Test for the attribute ``unique``.
"""
# Unique is false.
aggr1 = self.AllAggregate()
self.assertEqual(aggr1.unique, False)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 1}]),
[1, 1])
# Unique is true.
aggr2 = self.AllAggregate(unique=True)
self.assertEqual(aggr2.unique, True)
self.assertEqual(aggr2("field1", [{"field1": 1},
{"field1": 1}]),
[1])
def test_callable_value(self):
"""Test for callable values.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field1": lambda: 1},
{"field1": 2}]),
[1, 2])
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": lambda: 2}]),
[1, 2])
| 34.064748 | 75 | 0.477614 | import unittest
class AggregateTestCase(unittest.TestCase):
"""Tests for class ``Aggregate``.
"""
@classmethod
def setUpClass(cls):
"""Set up the class fixture.
"""
from ugentaggregates.aggregates import Aggregate
cls.Aggregate = Aggregate
def setUp(self):
"""Set up the fixture.
"""
self.aggr1 = self.Aggregate()
def tearDown(self):
"""Tear down the fixture.
"""
self.aggr1 = None
def test_creation(self):
"""Test for creating an ``Aggregate`` object.
"""
self.assertTrue(isinstance(self.aggr1, self.Aggregate))
def test_callable(self):
"""Test for calling an ``Aggregate`` object.
"""
with self.assertRaises(NotImplementedError):
self.aggr1("field1", [])
class FirstAggregateTestCase(unittest.TestCase):
"""Tests for class ``FirstAggregate``.
"""
@classmethod
def setUpClass(cls):
"""Set up the class fixture.
"""
from ugentaggregates.aggregates import Aggregate
from ugentaggregates.aggregates import FirstAggregate
from ugentaggregates.aggregates import NO_DEFAULT
cls.Aggregate = Aggregate
cls.FirstAggregate = FirstAggregate
cls.NO_DEFAULT = NO_DEFAULT
def test_creation(self):
"""Test for creating a ``FirstAggregate`` object.
"""
aggr1 = self.FirstAggregate()
self.assertTrue(isinstance(aggr1, self.FirstAggregate))
def test_inheritance(self):
"""Test for the inheritance of class ``FirstAggregate``.
"""
self.assertTrue(issubclass(self.FirstAggregate, self.Aggregate))
def test_callable(self):
"""Test for calling a ``FirstAggregate`` object.
"""
aggr1 = self.FirstAggregate()
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
1)
self.assertEqual(aggr1("field1", [{"field2": 1},
{"field1": 2}]),
2)
def test_missing(self):
"""Test for calling a ``FirstAggregate`` object for a missing
attribute.
"""
aggr1 = self.FirstAggregate()
with self.assertRaises(AttributeError):
aggr1("field1", [{"field2": 1},
{"field2": 2}])
def test_empty(self):
"""Test for calling a ``FirstAggregate`` object with an empty list.
"""
aggr1 = self.FirstAggregate()
with self.assertRaises(AttributeError):
aggr1("field1", [])
def test_name(self):
"""Test for the attribute ``name`.
"""
aggr1 = self.FirstAggregate(name="field1")
self.assertEqual(aggr1.name, "field1")
self.assertEqual(aggr1("field2", [{"field1": 1,
"field2": 2}]),
1)
def test_valid(self):
"""Test for the attribute ``valid``.
"""
is_even = lambda i: i % 2 == 0
aggr1 = self.FirstAggregate(valid=is_even)
self.assertEqual(aggr1.valid, is_even)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
2)
self.assertEqual(aggr1("field1", [{"field1": 2},
{"field1": 4}]),
2)
def test_format(self):
"""Test for the attribute ``format``.
"""
double = lambda i: i * 2
aggr1 = self.FirstAggregate(format=double)
self.assertEqual(aggr1.format, double)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
2)
def test_default(self):
"""Test for the attribute ``default``.
"""
# Without default.
aggr1 = self.FirstAggregate()
self.assertEqual(aggr1.default, self.NO_DEFAULT)
# With default.
aggr2 = self.FirstAggregate(default=-1)
self.assertEqual(aggr2.default, -1)
self.assertEqual(aggr2("field1", []),
-1)
self.assertEqual(aggr2("field1", [{"field2": 1},
{"field2": 2}]),
-1)
def test_callable_value(self):
"""Test for callable values.
"""
aggr1 = self.FirstAggregate()
self.assertEqual(aggr1("field1", [{"field1": lambda: 1},
{"field1": 2}]),
1)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": lambda: 2}]),
1)
class AllAggregateTestCase(unittest.TestCase):
"""Tests for class ``AllAggregate``.
"""
@classmethod
def setUpClass(cls):
"""Set up the class fixture.
"""
from ugentaggregates.aggregates import Aggregate
from ugentaggregates.aggregates import AllAggregate
cls.Aggregate = Aggregate
cls.AllAggregate = AllAggregate
def test_creation(self):
"""Test for creating a ``AllAggregate`` object.
"""
aggr1 = self.AllAggregate()
self.assertTrue(isinstance(aggr1, self.AllAggregate))
def test_inheritance(self):
"""Test for the inheritance of class ``AllAggregate``.
"""
self.assertTrue(issubclass(self.AllAggregate, self.Aggregate))
def test_callable(self):
"""Test for calling an ``AllAggregate`` object.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
[1, 2])
self.assertEqual(aggr1("field1", [{"field2": 1},
{"field1": 2}]),
[2])
def test_list(self):
"""Test for calling an ``AllAggregate`` object with list
attributes.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field1": [1, 2]},
{"field1": [3, 4]}]),
[1, 2, 3, 4])
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": [2, 3]}]),
[1, 2, 3])
self.assertEqual(aggr1("field1", [{"field1": [1, 2]},
{"field1": 3}]),
[1, 2, 3])
def test_missing(self):
"""Test for calling an ``AllAggregate`` object for a missing
attribute.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field2": 1},
{"field2": 2}]),
[])
def test_empty(self):
"""Test for calling an ``AllAggregate`` object with an empty list.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", []),
[])
def test_name(self):
"""Test for the attribute ``name`.
"""
aggr1 = self.AllAggregate(name="field1")
self.assertEqual(aggr1.name, "field1")
self.assertEqual(aggr1("field2", [{"field1": 1, "field2": 2},
{"field1": 3, "field2": 4}]),
[1, 3])
def test_valid(self):
"""Test for the attribute ``valid``.
"""
is_even = lambda i: i % 2 == 0
aggr1 = self.AllAggregate(valid=is_even)
self.assertEqual(aggr1.valid, is_even)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
[2])
self.assertEqual(aggr1("field1", [{"field1": 2},
{"field1": 4}]),
[2, 4])
def test_format(self):
"""Test for the attribute ``format``.
"""
double = lambda i: i * 2
aggr1 = self.AllAggregate(format=double)
self.assertEqual(aggr1.format, double)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 2}]),
[2, 4])
def test_unique(self):
"""Test for the attribute ``unique``.
"""
# Unique is false.
aggr1 = self.AllAggregate()
self.assertEqual(aggr1.unique, False)
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": 1}]),
[1, 1])
# Unique is true.
aggr2 = self.AllAggregate(unique=True)
self.assertEqual(aggr2.unique, True)
self.assertEqual(aggr2("field1", [{"field1": 1},
{"field1": 1}]),
[1])
def test_callable_value(self):
"""Test for callable values.
"""
aggr1 = self.AllAggregate()
self.assertEqual(aggr1("field1", [{"field1": lambda: 1},
{"field1": 2}]),
[1, 2])
self.assertEqual(aggr1("field1", [{"field1": 1},
{"field1": lambda: 2}]),
[1, 2])
| 0 | 0 | 0 |
469ec131a089da59dfea02fbcbd86bac2621a4f7 | 8,534 | py | Python | pipe-cli/src/utilities/pipe_shell.py | madmongoose/cloud-pipeline | e4e85faeee895373480f3bfa389eae1bed0db6e8 | [
"Apache-2.0"
] | null | null | null | pipe-cli/src/utilities/pipe_shell.py | madmongoose/cloud-pipeline | e4e85faeee895373480f3bfa389eae1bed0db6e8 | [
"Apache-2.0"
] | null | null | null | pipe-cli/src/utilities/pipe_shell.py | madmongoose/cloud-pipeline | e4e85faeee895373480f3bfa389eae1bed0db6e8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is combined from the following sources:
# https://github.com/paramiko/paramiko/blob/master/demos/interactive.py@c091e756084ce017d8d872ffeaf95422f79140f1
# https://github.com/sirosen/paramiko-shell/blob/master/interactive_shell.py@5a743a4e1eccff2d88b273aa108d0d1bb7268771
# Corresponding license notices are available in the respective repositories
from __future__ import print_function
import paramiko
import sys
import os
import re
import select
import socket
import shutil
from paramiko.py3compat import u
DEFAULT_TERMINAL_COLUMNS = 100
DEFAULT_TERMINAL_LINES = 30
PYTHON3 = sys.version_info.major == 3
try:
import termios
import tty
has_termios = True
except ImportError:
has_termios = False
# Python < 3.4 does not have shutil.get_terminal_size
# If it's the case - use stty in posix and a fallback in Windows
| 35.707113 | 117 | 0.620342 | # Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is combined from the following sources:
# https://github.com/paramiko/paramiko/blob/master/demos/interactive.py@c091e756084ce017d8d872ffeaf95422f79140f1
# https://github.com/sirosen/paramiko-shell/blob/master/interactive_shell.py@5a743a4e1eccff2d88b273aa108d0d1bb7268771
# Corresponding license notices are available in the respective repositories
from __future__ import print_function
import paramiko
import sys
import os
import re
import select
import socket
import shutil
from paramiko.py3compat import u
DEFAULT_TERMINAL_COLUMNS = 100
DEFAULT_TERMINAL_LINES = 30
PYTHON3 = sys.version_info.major == 3
try:
import termios
import tty
has_termios = True
except ImportError:
has_termios = False
def plain_shell(channel):
open_shell(channel, is_interactive=False)
def interactive_shell(channel):
open_shell(channel, is_interactive=True)
def open_shell(channel, is_interactive=True):
if has_termios:
posix_shell(channel, is_interactive=is_interactive)
else:
windows_shell(channel, is_interactive=is_interactive)
def transmit_to_std_out(channel, encoding='UTF-8'):
# Read from the channel
if PYTHON3:
out = str(channel.recv(1024), encoding=encoding, errors='replace')
# Strip out SO/SI control characters for windows
if sys.platform.startswith('win'):
out = re.sub(r'[\x0E-\x0F]+', '', out)
else:
out = channel.recv(1024)
# Channel is closed - give up
if len(out) == 0:
return False
# Write to stdout
if PYTHON3:
sys.stdout.write(out)
else:
print(out, end='')
sys.stdout.flush()
return True
# Python < 3.4 does not have shutil.get_terminal_size
# If it's the case - use stty in posix and a fallback in Windows
def get_term_size():
columns = DEFAULT_TERMINAL_COLUMNS
lines = DEFAULT_TERMINAL_LINES
if not hasattr(shutil, 'get_terminal_size'):
import subprocess
try:
stty_size = subprocess.check_output(
['stty', 'size'],
stderr=subprocess.PIPE,
).decode('utf-8')
lines_str, columns_str = stty_size.split()
columns = int(columns_str)
lines = int(lines_str)
except:
# If ssty fails - defaults will be used
pass
else:
columns, lines = \
shutil.get_terminal_size(fallback=(DEFAULT_TERMINAL_COLUMNS, DEFAULT_TERMINAL_LINES))
return (columns, lines)
def resize_pty(channel):
# resize to match terminal size
tty_width, tty_height = get_term_size()
# try to resize, and catch it if we fail due to a closed connection
try:
channel.resize_pty(width=int(tty_width), height=int(tty_height))
except paramiko.ssh_exception.SSHException:
pass
def posix_shell(channel, is_interactive=True):
# get the current TTY attributes to reapply after
# the remote shell is closed
oldtty_attrs = termios.tcgetattr(sys.stdin) if is_interactive else None
stdout_encoding = sys.stdout.encoding if sys.stdout.encoding else "UTF-8"
# wrap the whole thing in a try/finally construct to ensure
# that exiting code for TTY handling runs
try:
if is_interactive:
tty.setraw(sys.stdin.fileno())
tty.setcbreak(sys.stdin.fileno())
channel.settimeout(0.0)
is_alive = True
while is_alive:
if is_interactive:
# resize on every iteration of the main loop
resize_pty(channel)
# use a unix select call to wait until the remote shell
# and stdin are ready for reading
# this is the block until data is ready
select_targets = [channel, sys.stdin] if is_interactive else [channel]
read_ready, write_ready, exception_list = \
select.select(select_targets, [], [])
# if the channel is one of the ready objects, print
# it out 1024 chars at a time
if channel in read_ready:
# try to do a read from the remote end and print to screen
try:
is_alive = transmit_to_std_out(channel, encoding=stdout_encoding)
# do nothing on a timeout, as this is an ordinary condition
except socket.timeout:
pass
# if stdin is ready for reading
if is_interactive and sys.stdin in read_ready and is_alive:
# send a single character out at a time
# this is typically human input, so sending it one character at
# a time is the only correct action we can take
# use an os.read to prevent nasty buffering problem with shell
# history
char = os.read(sys.stdin.fileno(), 1)
# if this side of the connection closes, shut down gracefully
if len(char) == 0:
is_alive = False
else:
channel.send(char)
# close down the channel for send/recv
# this is an explicit call most likely redundant with the operations
# that caused an exit from the REPL, but unusual exit conditions can
# cause this to be reached uncalled
channel.shutdown(2)
# regardless of errors, restore the TTY to working order
# upon exit and print that connection is closed
finally:
if is_interactive:
termios.tcsetattr(sys.stdin, termios.TCSAFLUSH, oldtty_attrs)
def windows_shell(channel, is_interactive=True):
# Map functional keys to the ansi escape sequences
codes_to_ansi = {
0x48 : '\x1b[A', # Up
0x50 : '\x1b[B', # Down
0x4D : '\x1b[C', # Right
0x4B : '\x1b[D', # Left
0x47 : '\x1b[H', # Home
0x4F : '\x1b[F', # End
0x53 : '\x1b[3~', # Delete
0x49 : '\x1b[5~', # PageUp
0x51 : '\x1b[6~', # PageDown
0x52 : '\x1b[2~', # Insert
0x3B : '\x1b[[A', # F1
0x3C : '\x1b[[B', # F2
0x3D : '\x1b[[C', # F3
0x3E : '\x1b[[D', # F4
0x3F : '\x1b[[E', # F5
0x40 : '\x1b[17~', # F6
0x41 : '\x1b[18~', # F7
0x42 : '\x1b[19~', # F8
0x43 : '\x1b[20~', # F9
0x44 : '\x1b[21~', # F10
0x85 : '\x1b[23~', # F11
0x86 : '\x1b[24~', # F12
}
import threading
import colorama
import msvcrt
colorama.init()
def writeall(sock):
while transmit_to_std_out(sock):
# resize on every iteration of the main loop
resize_pty(sock)
writer = threading.Thread(target=writeall, args=(channel,))
writer.start()
# Don't need stdin reading for the non-interactive shell
if is_interactive:
while True:
# Using Windows-native getch() to skip cmd echoing (emulate tty.cbreak)
# https://stackoverflow.com/questions/510357/python-read-a-single-character-from-the-user
char = msvcrt.getch()
# When reading a function key or an arrow key, each function must be called twice.
# The first call returns 0 or 0xE0, and the second call returns the actual key code
# https://docs.microsoft.com/en-us/previous-versions/visualstudio/visual-studio-2012/078sfkak(v=vs.110)
char_code = ord(char)
if char_code == 0x00 or char_code == 0xE0:
char = msvcrt.getch()
char_code = ord(char)
# If we know how to translate the actual key code to the ANSI escape code - use ANSI
if char_code in codes_to_ansi:
char = codes_to_ansi[char_code]
if not char or channel.closed:
break
channel.send(char)
channel.shutdown(2)
| 6,890 | 0 | 183 |
e9faf4d2308d5743525c61401b5134006844edbd | 171 | py | Python | cgxsh_edit_config.py | ebob9/cgxsh | 0682922bae4354d2e306147e314dd309da968059 | [
"MIT"
] | null | null | null | cgxsh_edit_config.py | ebob9/cgxsh | 0682922bae4354d2e306147e314dd309da968059 | [
"MIT"
] | 3 | 2020-02-10T00:01:18.000Z | 2022-03-28T00:26:45.000Z | cgxsh_edit_config.py | ebob9/cgxsh | 0682922bae4354d2e306147e314dd309da968059 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from cgxsh_lib.file_crypto import edit_config_file
if __name__ == '__main__':
sys.exit(edit_config_file())
| 19 | 50 | 0.71345 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from cgxsh_lib.file_crypto import edit_config_file
if __name__ == '__main__':
sys.exit(edit_config_file())
| 0 | 0 | 0 |
ce26d3a18a902cb650143b53d2f13fcece06ffe4 | 536 | py | Python | node_manager/migrations/0005_auto_20190123_2041.py | Jennypies/catnet | 8c715e1ad638c9843e116b3c3926163b7dde1618 | [
"MIT"
] | null | null | null | node_manager/migrations/0005_auto_20190123_2041.py | Jennypies/catnet | 8c715e1ad638c9843e116b3c3926163b7dde1618 | [
"MIT"
] | null | null | null | node_manager/migrations/0005_auto_20190123_2041.py | Jennypies/catnet | 8c715e1ad638c9843e116b3c3926163b7dde1618 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.4 on 2019-01-23 20:41
from django.conf import settings
from django.db import migrations, models
| 23.304348 | 71 | 0.585821 | # Generated by Django 2.1.4 on 2019-01-23 20:41
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('node_manager', '0004_auto_20190123_2035'),
]
operations = [
migrations.DeleteModel(
name='Contact',
),
migrations.AlterField(
model_name='node',
name='contact',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL),
),
]
| 0 | 382 | 25 |
6ec54a58a019798e86849181545ddb49465660f8 | 3,308 | py | Python | transformer_2/models/transformer_config.py | mingruimingrui/Transformer2 | 2b44289ee7c7312d699f2261c1e4ebccce0f21e2 | [
"MIT"
] | null | null | null | transformer_2/models/transformer_config.py | mingruimingrui/Transformer2 | 2b44289ee7c7312d699f2261c1e4ebccce0f21e2 | [
"MIT"
] | 1 | 2020-06-01T02:13:10.000Z | 2020-06-01T02:13:10.000Z | transformer_2/models/transformer_config.py | mingruimingrui/Transformer2 | 2b44289ee7c7312d699f2261c1e4ebccce0f21e2 | [
"MIT"
] | null | null | null | """
Configurables for Transformer
"""
from transformer_2.utils.config_system import ConfigSystem
__all__ = ['make_config']
_C = ConfigSystem(validate_config_fn=validate_config)
# --------------------------------------------------------------------------- #
# Start of configs
# --------------------------------------------------------------------------- #
# Required
# The vocab size of the encoder language
# Should be the number of tokens including <bos> <pad> <eos> <unk>
_C.encoder_vocab_size = None
# Required
# The index of the padding token
_C.encoder_padding_idx = None
# The size of the token/positional embeddings for the encoder
_C.encoder_embed_dim = 512
# The size of the hidden states embeddings in the encoder
_C.encoder_hidden_dim = 512
# The size of the hidden states in the encoder transistor
_C.encoder_transistor_dim = 1024
# The number of multi-head attention layers
_C.encoder_num_layers = 6
# The number of heads in multi-head attention
_C.encoder_num_heads = 4
# Should bias be used in the encoder
_C.encoder_use_bias = True
# The number of positional embeddings to use
_C.encoder_max_positions = 1024
# Should positional embeddings not be used?
_C.encoder_no_pos_embeds = False
# Should positional embeddings be learned? Default uses sinusoidal
_C.encoder_learned_pos_embeds = False
# Required
# The vocab size of the decoder language
# Should be the number of tokens including <bos> <pad> <eos> <unk>
_C.decoder_vocab_size = None
# Required
# The index of the padding token
_C.decoder_padding_idx = None
# The size of the token/positional embeddings for the encoder
_C.decoder_embed_dim = 512
# The size of the hidden states embeddings in the decoder
_C.decoder_hidden_dim = 512
# The size of the hidden states in the decoder transistor
_C.decoder_transistor_dim = 1024
# The number of multi-head attention layers
_C.decoder_num_layers = 6
# The number of heads in multi-head attention
_C.decoder_num_heads = 4
# Should bias be used in the decoder
_C.decoder_use_bias = True
# The number of positional embeddings to use
_C.decoder_max_positions = 1024
# Should positional embeddings not be used?
_C.decoder_no_pos_embeds = False
# Should positional embeddings be learned? Default uses sinusoidal
_C.decoder_learned_pos_embeds = False
# Should the decoder not attend to the encoder? Default the
# decoder will attend to the encoder.
_C.decoder_no_encoder_attn = False
# Dropout probability
_C.dropout = 0.0
# Dropout probability for attention weights
_C.attn_dropout = 0.0
# Dropout probability after attention in transistor
_C.activation_dropout = 0.0
# Activation function to use in transistor
_C.activation_fn = 'relu'
# Should layer norm be applied before multi-headed attention?
# Default is before
_C.normalize_before = True
# Should encoder input embeddings, decoder input embeddings and decoder output
# embeddings be the same tensor?
_C.share_all_embeddings = False
# Should decoder input and output embeddings be the same tensor?
_C.share_decoder_input_output_embed = True
# --------------------------------------------------------------------------- #
# End of configs
# --------------------------------------------------------------------------- #
_C.immutable(True)
make_config = _C.make_config
| 26.894309 | 79 | 0.713724 | """
Configurables for Transformer
"""
from transformer_2.utils.config_system import ConfigSystem
__all__ = ['make_config']
def validate_config(config):
pass
_C = ConfigSystem(validate_config_fn=validate_config)
# --------------------------------------------------------------------------- #
# Start of configs
# --------------------------------------------------------------------------- #
# Required
# The vocab size of the encoder language
# Should be the number of tokens including <bos> <pad> <eos> <unk>
_C.encoder_vocab_size = None
# Required
# The index of the padding token
_C.encoder_padding_idx = None
# The size of the token/positional embeddings for the encoder
_C.encoder_embed_dim = 512
# The size of the hidden states embeddings in the encoder
_C.encoder_hidden_dim = 512
# The size of the hidden states in the encoder transistor
_C.encoder_transistor_dim = 1024
# The number of multi-head attention layers
_C.encoder_num_layers = 6
# The number of heads in multi-head attention
_C.encoder_num_heads = 4
# Should bias be used in the encoder
_C.encoder_use_bias = True
# The number of positional embeddings to use
_C.encoder_max_positions = 1024
# Should positional embeddings not be used?
_C.encoder_no_pos_embeds = False
# Should positional embeddings be learned? Default uses sinusoidal
_C.encoder_learned_pos_embeds = False
# Required
# The vocab size of the decoder language
# Should be the number of tokens including <bos> <pad> <eos> <unk>
_C.decoder_vocab_size = None
# Required
# The index of the padding token
_C.decoder_padding_idx = None
# The size of the token/positional embeddings for the encoder
_C.decoder_embed_dim = 512
# The size of the hidden states embeddings in the decoder
_C.decoder_hidden_dim = 512
# The size of the hidden states in the decoder transistor
_C.decoder_transistor_dim = 1024
# The number of multi-head attention layers
_C.decoder_num_layers = 6
# The number of heads in multi-head attention
_C.decoder_num_heads = 4
# Should bias be used in the decoder
_C.decoder_use_bias = True
# The number of positional embeddings to use
_C.decoder_max_positions = 1024
# Should positional embeddings not be used?
_C.decoder_no_pos_embeds = False
# Should positional embeddings be learned? Default uses sinusoidal
_C.decoder_learned_pos_embeds = False
# Should the decoder not attend to the encoder? Default the
# decoder will attend to the encoder.
_C.decoder_no_encoder_attn = False
# Dropout probability
_C.dropout = 0.0
# Dropout probability for attention weights
_C.attn_dropout = 0.0
# Dropout probability after attention in transistor
_C.activation_dropout = 0.0
# Activation function to use in transistor
_C.activation_fn = 'relu'
# Should layer norm be applied before multi-headed attention?
# Default is before
_C.normalize_before = True
# Should encoder input embeddings, decoder input embeddings and decoder output
# embeddings be the same tensor?
_C.share_all_embeddings = False
# Should decoder input and output embeddings be the same tensor?
_C.share_decoder_input_output_embed = True
# --------------------------------------------------------------------------- #
# End of configs
# --------------------------------------------------------------------------- #
_C.immutable(True)
make_config = _C.make_config
| 16 | 0 | 23 |
c6ffbe15828fe37550081127ba99b2873f871df3 | 11,457 | py | Python | auctions/views.py | Avinash-Murugappan/AuctionsApp | d05e4511d2422de0e543562e2726b537d5baa54f | [
"MIT"
] | null | null | null | auctions/views.py | Avinash-Murugappan/AuctionsApp | d05e4511d2422de0e543562e2726b537d5baa54f | [
"MIT"
] | null | null | null | auctions/views.py | Avinash-Murugappan/AuctionsApp | d05e4511d2422de0e543562e2726b537d5baa54f | [
"MIT"
] | null | null | null | from django.contrib.auth import authenticate, login, logout
from django.db import IntegrityError
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from django import forms
from django.utils.safestring import mark_safe
from django.contrib import messages
from django.core.exceptions import ObjectDoesNotExist
from .models import User, Category, Auction, Bid, Comment, Watchlist
from .forms import CreateForm, CommentForm, BidForm, SearchForm
@login_required(redirect_field_name='')
@login_required(redirect_field_name='')
@login_required(redirect_field_name='')
@login_required(redirect_field_name='')
@login_required(redirect_field_name='')
@login_required(redirect_field_name='')
@login_required(redirect_field_name='')
@login_required(redirect_field_name='') | 43.896552 | 133 | 0.634808 | from django.contrib.auth import authenticate, login, logout
from django.db import IntegrityError
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from django import forms
from django.utils.safestring import mark_safe
from django.contrib import messages
from django.core.exceptions import ObjectDoesNotExist
from .models import User, Category, Auction, Bid, Comment, Watchlist
from .forms import CreateForm, CommentForm, BidForm, SearchForm
def index(request):
auctions = Auction.objects.filter(active=True).order_by("-date")
return render(request, "auctions/index.html", {
"auctions": auctions,
"searchform": SearchForm()
})
def login_view(request):
if request.method == "POST":
# Attempt to sign user in
username = request.POST["username"]
password = request.POST["password"]
user = authenticate(request, username=username, password=password)
# Check if authentication successful
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse("auctions:index"))
else:
messages.add_message(request, message.ERROR, 'Invalid username and/or password.')
return HttpResponseRedirect(reverse("auctions:login"))
else:
return render(request, "auctions/login.html", {
"searchform": SearchForm()
})
def logout_view(request):
logout(request)
return HttpResponseRedirect(reverse("auctions:index"))
def register(request):
if request.method == "POST":
username = request.POST["username"]
email = request.POST["email"]
# Ensure password matches confirmation
password = request.POST["password"]
confirmation = request.POST["confirmation"]
if password != confirmation:
messages.add_message(request, message.ERROR, 'Passwords must match.')
return HttpResponseRedirect(reverse("auctions:register"))
# Attempt to create new user
try:
user = User.objects.create_user(username, email, password)
user.save()
except IntegrityError:
messages.add_message(request, message.ERROR, 'Username already taken.')
return HttpResponseRedirect(reverse("auctions:register"))
login(request, user)
return HttpResponseRedirect(reverse("auctions:index"))
else:
return render(request, "auctions/register.html", {
"searchform": SearchForm()
})
@login_required(redirect_field_name='')
def create(request):
if request.method == "POST":
form = CreateForm(request.POST)
if form.is_valid():
data = form.save(commit=False)
data.author = request.user
data.save()
return HttpResponseRedirect(reverse("auctions:index"))
else:
return render(request, "auctions/create.html", {
"createform": form,
"searchform": SearchForm()
})
else:
return render(request, "auctions/create.html", {
"createform": CreateForm()
})
@login_required(redirect_field_name='')
def getlist(request, list_id):
auction = Auction.objects.get(pk=list_id)
comments = Comment.objects.filter(auction_id=list_id).order_by('-date')
highestbid = Bid.objects.filter(auction_id=list_id).order_by('-amount').first()
return render(request, "auctions/list.html", {
"auction": auction,
"comments": comments,
"commentform": CommentForm(),
"bidform": BidForm(),
"highestbid": highestbid,
"searchform": SearchForm()
})
@login_required(redirect_field_name='')
def watchlist_add(request, list_id):
if request.method == "POST":
auction = Auction.objects.get(pk=list_id)
check = Watchlist.objects.filter(id=request.user.id, product=list_id).first()
if check is not None:
messages.add_message(request, messages.WARNING, 'This auction is already in your watchlist.')
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
else:
try:
usrlist = Watchlist.objects.get(id=request.user.id)
usrlist.product.add(auction)
usrlist.save()
messages.add_message(request, messages.SUCCESS, 'Successfully added to your watchlist.')
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
except ObjectDoesNotExist:
usrlist = Watchlist(id=request.user.id, owner_id=request.user.id)
usrlist.save()
usrlist.product.add(auction)
messages.add_message(request, messages.SUCCESS, 'Successfully added to your watchlist.')
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
@login_required(redirect_field_name='')
def watchlist_remove(request, list_id):
if request.method == "POST":
auction = Auction.objects.get(pk=list_id)
check = Watchlist.objects.filter(id=request.user.id, product=list_id).first()
if check is None:
messages.add_message(request, messages.WARNING, 'This auction is not in your watchlist.')
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
else:
try:
usrlist = Watchlist.objects.get(id=request.user.id)
usrlist.product.remove(auction)
usrlist.save()
messages.add_message(request, messages.ERROR, 'Successfully removed from your watchlist.')
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
except ObjectDoesNotExist:
messages.add_message(request, messages.WARNING, 'This auction is not in your watchlist.')
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
@login_required(redirect_field_name='')
def comment(request, list_id):
if request.method == "POST":
form = CommentForm(request.POST)
if form.is_valid():
auction = Auction.objects.get(pk=list_id)
data = form.save(commit=False)
data.author = request.user
data.auction = auction
data.save()
messages.add_message(request, messages.SUCCESS, "Your comment was successfully posted.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
else:
messages.add_message(request, messages.ERROR, "Your comment form is invalid.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
@login_required(redirect_field_name='')
def bid(request, list_id):
if request.method == "POST":
form = BidForm(request.POST)
if form.is_valid():
bidamount = form.cleaned_data["amount"]
auction = Auction.objects.get(pk=list_id)
if request.user.id == auction.author_id:
messages.add_message(request, messages.ERROR, "You can't bid on your own auction.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
if not auction.active:
messages.add_message(request, messages.ERROR, "You can't bid on a closed auction.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
if bidamount <= auction.starting_price:
messages.add_message(request, messages.ERROR, "You must place a higher bid than the current price.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
higherbid = Bid.objects.filter(auction_id=list_id).order_by('-amount').first()
if higherbid is not None:
if bidamount <= higherbid.amount:
messages.add_message(request, messages.ERROR, "You must place a higher bid than the current price.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
else:
data = form.save(commit=False)
auction.current_price = bidamount
data.author_id = request.user.id
data.auction_id = list_id
data.save()
auction.save()
messages.add_message(request, messages.SUCCESS, "Your bid was successfully placed :)")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
else:
data = form.save(commit=False)
auction.current_price = bidamount
data.author_id = request.user.id
data.auction_id = list_id
data.save()
auction.save()
messages.add_message(request, messages.SUCCESS, "Your bid was successfully placed :)")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
else:
messages.add_message(request, messages.ERROR, "Your form is invalid.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
def categories(request):
categories = Category.objects.all().order_by("name")
return render(request, "auctions/categories.html", {
"categories": categories,
"searchform": SearchForm()
})
@login_required(redirect_field_name='')
def mylist(request):
auctions = Auction.objects.filter(watchlist__id=request.user.id, active=True)
return render(request, "auctions/mylist.html", {
"auctions": auctions,
"searchform": SearchForm()
})
def getcategory(request, cat_id):
auctions = Auction.objects.filter(category_id=cat_id).order_by('-date')
category = Category.objects.get(id=cat_id)
return render(request, "auctions/category.html", {
"auctions": auctions,
"category": category,
"searchform": SearchForm()
})
@login_required(redirect_field_name='')
def close(request, list_id):
if request.method == "POST":
auction = Auction.objects.get(pk=list_id)
if request.user.id == auction.author_id:
auction.active = False
auction.save()
messages.add_message(request, messages.SUCCESS, "Your auction was successfully closed.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
else:
messages.add_message(request, messages.ERROR, "You did not create this auction.")
return HttpResponseRedirect(reverse("auctions:list", args=(list_id,)))
def search(request):
if request.method == "POST":
form = SearchForm(request.POST)
if form.is_valid():
usrsearch = form.cleaned_data["search"]
result = Auction.objects.filter(title__icontains=usrsearch) | Auction.objects.filter(category__name__icontains=usrsearch)
return render(request, "auctions/search.html", {
"auctions": result,
"search_str": usrsearch,
"searchform": SearchForm()
})
else:
messages.add_message(request, messages.ERROR, "Your search is invalid.")
return HttpResponseRedirect(reverse("auctions:index")) | 10,187 | 0 | 337 |
760bd97489459ca7421ffb023b45e07766736779 | 55 | py | Python | djdwod/login.py | 17675647287/login | 55d0a444e4ca0dc549b0dfaaa006b49c6f0bd953 | [
"MIT"
] | null | null | null | djdwod/login.py | 17675647287/login | 55d0a444e4ca0dc549b0dfaaa006b49c6f0bd953 | [
"MIT"
] | null | null | null | djdwod/login.py | 17675647287/login | 55d0a444e4ca0dc549b0dfaaa006b49c6f0bd953 | [
"MIT"
] | null | null | null | number1 = 10
number2 = 20
number4 = 40
number3 = 30
| 6.875 | 12 | 0.654545 | number1 = 10
number2 = 20
number4 = 40
number3 = 30
| 0 | 0 | 0 |
bdad247f70b5f8b5d83ff4db4cbd7118ca39ee87 | 2,603 | py | Python | Section9-Build_the_Real_Robot/arduinobot_ws/src/arduinobot_controller/scripts/angles_converter.py | AntoBrandi/Arduino-Bot-Tutorial | 56ef08e3e60c6523ac4d2d2c9816d86c73f42d3a | [
"MIT",
"Unlicense"
] | 6 | 2021-09-24T06:38:29.000Z | 2022-02-25T20:00:31.000Z | arduinobot_ws/src/arduinobot_controller/scripts/angles_converter.py | surajbilung/Arduino-Bot | c12e6bfb07ff66b8ac28a616ba4f8c5d4951f091 | [
"Unlicense",
"MIT"
] | null | null | null | arduinobot_ws/src/arduinobot_controller/scripts/angles_converter.py | surajbilung/Arduino-Bot | c12e6bfb07ff66b8ac28a616ba4f8c5d4951f091 | [
"Unlicense",
"MIT"
] | 3 | 2021-09-09T16:55:42.000Z | 2022-03-16T06:58:24.000Z | #!/usr/bin/env python3
import rospy
import math
from arduinobot_controller.srv import AnglesConverter, AnglesConverterResponse
"""
arduinobot - angles_converter
This script implements two services on the topics
- radians_to_degrees
- degrees_to_radians
Both of them receives a request with the format:
float64 base
float64 shoulder
float64 elbow
float64 gripper
and sends a response in the same format to the client
The first service (radians_to_degrees) receives the angles in radians and convert
those in degrees according to the boundaries defined inthe URDF file
The second service (degrees_to_radians) receives the angles in degrees and convert
those in radians according to the boundaries defined inthe URDF file
This conversion is needed for the control of the real robot in order to convert the radians angle of each joint
as used in ROS in degrees angles as used in Arduino for the actuation of the Servo motors
Copyright (c) 2021 Antonio Brandi. All right reserved.
"""
if __name__ == "__main__":
# Inizialize a ROS node called angles_converter
rospy.init_node('angles_converter')
# Inizialize two services for the angle conversions
radians_to_degrees = rospy.Service('radians_to_degrees', AnglesConverter, convert_radians_to_degrees)
degrees_to_radians = rospy.Service('degrees_to_radians', AnglesConverter, convert_degrees_to_radians)
# keeps the node up and running
rospy.spin() | 40.671875 | 113 | 0.739531 | #!/usr/bin/env python3
import rospy
import math
from arduinobot_controller.srv import AnglesConverter, AnglesConverterResponse
"""
arduinobot - angles_converter
This script implements two services on the topics
- radians_to_degrees
- degrees_to_radians
Both of them receives a request with the format:
float64 base
float64 shoulder
float64 elbow
float64 gripper
and sends a response in the same format to the client
The first service (radians_to_degrees) receives the angles in radians and convert
those in degrees according to the boundaries defined inthe URDF file
The second service (degrees_to_radians) receives the angles in degrees and convert
those in radians according to the boundaries defined inthe URDF file
This conversion is needed for the control of the real robot in order to convert the radians angle of each joint
as used in ROS in degrees angles as used in Arduino for the actuation of the Servo motors
Copyright (c) 2021 Antonio Brandi. All right reserved.
"""
def convert_radians_to_degrees(req):
# Function that is called every time the service radians_to_degrees is called
# It receives the Request message as input with the angles in radians
# and returns the Result message as output with the angles in degrees
res = AnglesConverterResponse()
res.base = int(((req.base+(math.pi/2))*180)/math.pi)
res.shoulder = 180-int(((req.shoulder+(math.pi/2))*180)/math.pi)
res.elbow = int(((req.elbow+(math.pi/2))*180)/math.pi)
res.gripper = int(((-req.gripper)*180)/(math.pi/2))
return res
def convert_degrees_to_radians(req):
# Function that is called every time the service radians_to_degrees is called
# It receives the Request message as input with the angles in degrees
# and returns the Result message as output with the angles in radians
res = AnglesConverterResponse()
res.base = ((math.pi*req.base) - ((math.pi/2)*180))/180
res.shoulder = (((180-req.shoulder)*math.pi)-((math.pi/2)*180))/180
res.elbow = ((math.pi*req.elbow) - ((math.pi/2)*180))/180
res.gripper = -((math.pi/2)*req.gripper)/180
return res
if __name__ == "__main__":
# Inizialize a ROS node called angles_converter
rospy.init_node('angles_converter')
# Inizialize two services for the angle conversions
radians_to_degrees = rospy.Service('radians_to_degrees', AnglesConverter, convert_radians_to_degrees)
degrees_to_radians = rospy.Service('degrees_to_radians', AnglesConverter, convert_degrees_to_radians)
# keeps the node up and running
rospy.spin() | 1,076 | 0 | 46 |
6e78031705a76491b0ca78baedb4fa8dc8da32ca | 8,494 | py | Python | tests/python_optimizer.py | primitiv/primitiv-python | aebd0fbca84e1d7cb9c74a265d035a52c0bd745f | [
"Apache-2.0"
] | 16 | 2017-11-30T00:49:54.000Z | 2020-08-14T14:20:12.000Z | tests/python_optimizer.py | primitiv/primitiv-python | aebd0fbca84e1d7cb9c74a265d035a52c0bd745f | [
"Apache-2.0"
] | 25 | 2017-11-30T14:50:44.000Z | 2022-03-08T09:01:37.000Z | tests/python_optimizer.py | primitiv/primitiv-python | aebd0fbca84e1d7cb9c74a265d035a52c0bd745f | [
"Apache-2.0"
] | 2 | 2017-12-01T01:02:12.000Z | 2017-12-05T00:21:40.000Z | from primitiv import optimizers as O
from primitiv import Optimizer, Parameter, Device, Graph, Shape
from primitiv import initializers as I
from primitiv import devices as D
from primitiv import functions as F
from primitiv import tensor_functions as tF
import unittest
import tempfile
import numpy as np
| 38.089686 | 115 | 0.63068 | from primitiv import optimizers as O
from primitiv import Optimizer, Parameter, Device, Graph, Shape
from primitiv import initializers as I
from primitiv import devices as D
from primitiv import functions as F
from primitiv import tensor_functions as tF
import unittest
import tempfile
import numpy as np
class TestAdam(Optimizer):
def __init__(self, alpha, beta1, beta2, eps):
super().__init__()
self.alpha_ = np.float32(alpha)
self.beta1_ = np.float32(beta1)
self.beta2_ = np.float32(beta2)
self.eps_ = np.float32(eps)
def configure_parameter(self, param):
for name in ("testadam-m1", "testadam-m2"):
if name not in param.stats:
param.add_stats(name, param.shape())
param.stats[name].reset(0)
def update_parameter(self, scale, param):
epoch = self.get_epoch() + 1
g = param.gradient
param.stats["testadam-m1"] = self.beta1_ * param.stats["testadam-m1"] + (1 - self.beta1_) * g
param.stats["testadam-m2"] = self.beta2_ * param.stats["testadam-m2"] + (1 - self.beta2_) * g * g
mm1 = param.stats["testadam-m1"] / (1 - self.beta1_ ** epoch)
mm2 = param.stats["testadam-m2"] / (1 - self.beta2_ ** epoch)
param.value -= (scale * self.alpha_) * mm1 / (tF.sqrt(mm2) + self.eps_)
def get_configs(self):
uint_configs = {}
float_configs = {
"TestAdam.alpha": self.alpha_,
"TestAdam.beta1": self.beta1_,
"TestAdam.beta2": self.beta2_,
"TestAdam.eps": self.eps_,
}
return uint_configs, float_configs
def set_configs(self, uint_configs, float_configs):
self.alpha_ = float_configs["TestAdam.alpha"]
self.beta1_ = float_configs["TestAdam.beta1"]
self.beta2_ = float_configs["TestAdam.beta2"]
self.eps_ = float_configs["TestAdam.eps"]
class TestException(Exception):
pass
class ExceptionOptimizer(Optimizer):
def configure_parameter(self, param):
raise TestException("configure_parameter")
def update_parameter(self, scale, param):
raise TestException("update_parameter")
def get_configs(self):
raise TestException("get_configs")
def set_configs(self, uint_configs, float_configs):
raise TestException("set_configs")
class IncompleteOptimizer(Optimizer):
pass
def train_func(optimizer):
dev = D.Naive(12345)
Device.set_default(dev)
g = Graph()
Graph.set_default(g)
pw1 = Parameter([8, 2], I.XavierUniform())
pb1 = Parameter([8], I.Constant(0))
pw2 = Parameter([1, 8], I.XavierUniform())
pb2 = Parameter([1], I.Constant(0))
optimizer.add(pw1, pb1, pw2, pb2)
input_data = [1, 1, 1, -1, -1, 1, -1, -1]
output_data = [1, -1, -1, 1]
for i in range(10):
g.clear()
x = F.raw_input(Shape([2], 4), input_data)
w1 = F.parameter(pw1)
b1 = F.parameter(pb1)
w2 = F.parameter(pw2)
b2 = F.parameter(pb2)
h = F.tanh(w1 @ x + b1)
y = w2 @ h + b2
t = F.raw_input(Shape([], 4), output_data)
diff = t - y
loss = F.batch.mean(diff * diff)
optimizer.reset_gradients()
loss.backward()
optimizer.update()
return [pw1.value.to_list(),
pb1.value.to_list(),
pw2.value.to_list(),
pb2.value.to_list()]
class PythonOptimizerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
self.t = TestAdam(alpha = 0.001, beta1 = 0.9, beta2 = 0.999, eps = 1e-8)
def tearDown(self):
pass
def test_pyoptimizer_get_set_config(self):
uint_configs, float_configs = Optimizer.get_configs(self.t)
self.assertAlmostEqual(uint_configs['Optimizer.epoch'], 0)
self.assertAlmostEqual(float_configs['TestAdam.alpha'], 0.001)
self.assertAlmostEqual(float_configs['TestAdam.beta1'], 0.9)
self.assertAlmostEqual(float_configs['TestAdam.beta2'], 0.999)
self.assertAlmostEqual(float_configs['TestAdam.eps'], 1e-8, places=10)
float_configs['TestAdam.beta1'] = 200
Optimizer.set_configs(self.t, uint_configs, float_configs)
self.assertEqual(self.t.beta1_, 200)
def test_pyoptimizer_parameter(self):
dev = D.Naive()
Device.set_default(dev)
pw1 = Parameter([8, 2], I.XavierUniform())
self.t.add(pw1)
self.assertIn("testadam-m1", pw1.stats)
self.assertIn("testadam-m2", pw1.stats)
def test_pyoptimizer_compare_with_cpp(self):
c_optimizer = O.Adam(alpha = 0.001, beta1 = 0.9, beta2 = 0.999, eps = 1e-8)
py_params = train_func(self.t)
c_params = train_func(c_optimizer)
py_uint_configs, py_float_configs = Optimizer.get_configs(self.t)
c_uint_configs, c_float_configs = c_optimizer.get_configs()
self.assertEqual(py_uint_configs["Optimizer.epoch"], c_uint_configs["Optimizer.epoch"])
self.assertEqual(py_float_configs["TestAdam.alpha"], c_float_configs["Adam.alpha"])
self.assertEqual(py_float_configs["TestAdam.beta1"], c_float_configs["Adam.beta1"])
self.assertEqual(py_float_configs["TestAdam.beta2"], c_float_configs["Adam.beta2"])
self.assertEqual(py_float_configs["TestAdam.eps"], c_float_configs["Adam.eps"])
self.assertEqual(py_float_configs["Optimizer.clip_threshold"], c_float_configs["Optimizer.clip_threshold"])
self.assertEqual(py_float_configs["Optimizer.l2_strength"], c_float_configs["Optimizer.l2_strength"])
self.assertEqual(py_float_configs["Optimizer.lr_scale"], c_float_configs["Optimizer.lr_scale"])
self.assertTrue(np.isclose(py_params[0], c_params[0]).all())
self.assertTrue(np.isclose(py_params[1], c_params[1]).all())
self.assertTrue(np.isclose(py_params[2], c_params[2]).all())
self.assertTrue(np.isclose(py_params[3], c_params[3]).all())
def test_pyoptimizer_loadsave(self):
t_loaded = TestAdam(alpha = 0, beta1 = 0, beta2 = 0, eps = 0)
self.assertEqual(t_loaded.alpha_, 0)
self.assertEqual(t_loaded.beta1_, 0)
self.assertEqual(t_loaded.beta2_, 0)
self.assertEqual(t_loaded.eps_, 0)
with tempfile.NamedTemporaryFile() as fp:
self.t.save(fp.name)
t_loaded.load(fp.name)
self.assertAlmostEqual(t_loaded.alpha_, 0.001)
self.assertAlmostEqual(t_loaded.beta1_, 0.9)
self.assertAlmostEqual(t_loaded.beta2_, 0.999)
self.assertAlmostEqual(t_loaded.eps_, 1e-8, places=10)
def test_pyoptimizer_propagate_exception(self):
dev = D.Naive()
Device.set_default(dev)
optimizer = ExceptionOptimizer()
p = Parameter()
with self.assertRaises(TestException) as ctx:
optimizer.add(p)
self.assertEqual(str(ctx.exception), "configure_parameter")
with self.assertRaises(TestException) as ctx:
optimizer.update()
self.assertEqual(str(ctx.exception), "update_parameter")
with self.assertRaises(TestException) as ctx:
Optimizer.get_configs(optimizer)
self.assertEqual(str(ctx.exception), "get_configs")
with self.assertRaises(TestException) as ctx:
Optimizer.set_configs(optimizer, {'Optimizer.epoch': 1},
{'Optimizer.clip_threshold': 0.0,
'Optimizer.lr_scale': 1.0,
'Optimizer.l2_strength': 0.0})
self.assertEqual(str(ctx.exception), "set_configs")
def test_pyoptimizer_not_implemented(self):
dev = D.Naive()
Device.set_default(dev)
optimizer = IncompleteOptimizer()
p = Parameter()
with self.assertRaises(NotImplementedError):
optimizer.add(p)
with self.assertRaises(NotImplementedError):
optimizer.update()
with self.assertRaises(NotImplementedError):
Optimizer.get_configs(optimizer)
with self.assertRaises(NotImplementedError):
Optimizer.set_configs(optimizer, {'Optimizer.epoch': 1},
{'Optimizer.clip_threshold': 0.0,
'Optimizer.lr_scale': 1.0,
'Optimizer.l2_strength': 0.0})
| 7,408 | 393 | 380 |
6e300bfbb9e8b185db58281a283ea05bbc9c1a0b | 630 | py | Python | train.py | bckho/gym | 54adba8c164814caa11a637b62bcc4d3c7ca4559 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | train.py | bckho/gym | 54adba8c164814caa11a637b62bcc4d3c7ca4559 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | train.py | bckho/gym | 54adba8c164814caa11a637b62bcc4d3c7ca4559 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | import gym
from gym.envs.box2d import CarRacing
from stable_baselines.common.policies import CnnPolicy
from stable_baselines.common.vec_env import DummyVecEnv
from stable_baselines import PPO2
if __name__ == '__main__':
env = lambda : CarRacing(
grayscale=1,
show_info_panel=0,
discretize_actions="hard",
frames_per_state=4,
num_lanes=1,
num_tracks=1,
)
env = DummyVecEnv([env])
model = PPO2('MlpPolicy', env, verbose=1, tensorboard_log='tensor_logs/ppo')
model.learn(total_timesteps=200000)
model.save('learned_models/car_racing_weights_200k')
| 27.391304 | 80 | 0.703175 | import gym
from gym.envs.box2d import CarRacing
from stable_baselines.common.policies import CnnPolicy
from stable_baselines.common.vec_env import DummyVecEnv
from stable_baselines import PPO2
if __name__ == '__main__':
env = lambda : CarRacing(
grayscale=1,
show_info_panel=0,
discretize_actions="hard",
frames_per_state=4,
num_lanes=1,
num_tracks=1,
)
env = DummyVecEnv([env])
model = PPO2('MlpPolicy', env, verbose=1, tensorboard_log='tensor_logs/ppo')
model.learn(total_timesteps=200000)
model.save('learned_models/car_racing_weights_200k')
| 0 | 0 | 0 |
d9c5557a1963aee8082460a979d07dda2b360bc4 | 491 | py | Python | dcms/content/permissions.py | yifei-fu/dcms | 568b727a58dd080f0dafc028d7723f865d9b7303 | [
"MIT"
] | 1 | 2021-04-03T20:07:11.000Z | 2021-04-03T20:07:11.000Z | dcms/content/permissions.py | yifei-fu/dcms | 568b727a58dd080f0dafc028d7723f865d9b7303 | [
"MIT"
] | null | null | null | dcms/content/permissions.py | yifei-fu/dcms | 568b727a58dd080f0dafc028d7723f865d9b7303 | [
"MIT"
] | null | null | null | from rest_framework import permissions
from rest_framework.permissions import BasePermission
| 35.071429 | 100 | 0.708758 | from rest_framework import permissions
from rest_framework.permissions import BasePermission
class IsAuthorOrAdminOtherwiseReadOnly(BasePermission):
def has_object_permission(self, request, view, instance):
if request.method in permissions.SAFE_METHODS: # read-only methods
return True
else:
if not request.user:
return False
return request.user.is_staff or (request.user and instance.author.id == request.user.id)
| 313 | 34 | 50 |
81eea449c3dbc24f53d2564aa6a669ee95c4015d | 2,209 | py | Python | athena/transform/feats/write_wav.py | godjealous/athena | 5b7bf48ae7477196f2773108f19cf6ae5605f6bd | [
"Apache-2.0"
] | 119 | 2019-12-20T05:26:23.000Z | 2022-03-22T06:10:45.000Z | athena/transform/feats/write_wav.py | leixiaoning/athena-2 | 826c3bda241afd388e156bbefcd6ca2e8d88afc9 | [
"Apache-2.0"
] | 14 | 2019-12-20T07:10:28.000Z | 2022-02-20T01:14:35.000Z | athena/transform/feats/write_wav.py | leixiaoning/athena-2 | 826c3bda241afd388e156bbefcd6ca2e8d88afc9 | [
"Apache-2.0"
] | 44 | 2019-12-20T05:27:20.000Z | 2022-03-14T10:04:16.000Z | # Copyright (C) 2017 Beijing Didi Infinity Technology and Development Co.,Ltd.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The model write audio sample to wav file."""
import tensorflow as tf
from athena.utils.hparam import HParams
from athena.transform.feats.base_frontend import BaseFrontend
class WriteWav(BaseFrontend):
"""
Encode audio data (input) using sample rate (input),
return a write wav opration.
"""
@classmethod
def params(cls, config=None):
"""
Set params.
:param config: contains one optional parameters:sample_rate(int, default=16000).
:return: An object of class HParams, which is a set of hyperparameters as
name-value pairs.
"""
sample_rate = 16000
hparams = HParams(cls=cls)
hparams.add_hparam('sample_rate', sample_rate)
if config is not None:
hparams.override_from_dict(config)
return hparams
def call(self, filename, audio_data, sample_rate):
"""
Write wav using audio_data[tensor].
:param filename: filepath of wav.
:param audio_data: a tensor containing data of a wav.
:param sample_rate: the samplerate of the signal we working with.
:return: write wav opration.
"""
filename = tf.constant(filename)
with tf.name_scope('writewav'):
audio_data = tf.cast(audio_data, dtype=tf.float32)
contents = tf.audio.encode_wav(
tf.expand_dims(audio_data, 1), tf.cast(sample_rate, dtype=tf.int32))
w_op = tf.io.write_file(filename, contents)
return w_op
| 32.485294 | 87 | 0.683115 | # Copyright (C) 2017 Beijing Didi Infinity Technology and Development Co.,Ltd.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The model write audio sample to wav file."""
import tensorflow as tf
from athena.utils.hparam import HParams
from athena.transform.feats.base_frontend import BaseFrontend
class WriteWav(BaseFrontend):
"""
Encode audio data (input) using sample rate (input),
return a write wav opration.
"""
def __init__(self, config: dict):
super().__init__(config)
@classmethod
def params(cls, config=None):
"""
Set params.
:param config: contains one optional parameters:sample_rate(int, default=16000).
:return: An object of class HParams, which is a set of hyperparameters as
name-value pairs.
"""
sample_rate = 16000
hparams = HParams(cls=cls)
hparams.add_hparam('sample_rate', sample_rate)
if config is not None:
hparams.override_from_dict(config)
return hparams
def call(self, filename, audio_data, sample_rate):
"""
Write wav using audio_data[tensor].
:param filename: filepath of wav.
:param audio_data: a tensor containing data of a wav.
:param sample_rate: the samplerate of the signal we working with.
:return: write wav opration.
"""
filename = tf.constant(filename)
with tf.name_scope('writewav'):
audio_data = tf.cast(audio_data, dtype=tf.float32)
contents = tf.audio.encode_wav(
tf.expand_dims(audio_data, 1), tf.cast(sample_rate, dtype=tf.int32))
w_op = tf.io.write_file(filename, contents)
return w_op
| 41 | 0 | 25 |
ce2999e6ce3e5a354aab62b9e3dae21d351f884d | 2,400 | py | Python | zvm/zfilesystem.py | grayarea11235/zvm2 | 50afb650b730ab36918d3a4ac1b650d2d48e56bd | [
"BSD-3-Clause"
] | 27 | 2016-01-02T09:00:54.000Z | 2022-02-05T20:28:28.000Z | zvm/zfilesystem.py | grayarea11235/zvm2 | 50afb650b730ab36918d3a4ac1b650d2d48e56bd | [
"BSD-3-Clause"
] | 3 | 2019-08-26T13:18:59.000Z | 2021-04-11T15:58:07.000Z | zvm/zfilesystem.py | grayarea11235/zvm2 | 50afb650b730ab36918d3a4ac1b650d2d48e56bd | [
"BSD-3-Clause"
] | 11 | 2016-06-19T08:59:40.000Z | 2021-04-11T10:54:41.000Z | #
# A template class representing the interactions that the end-user has
# with the filesystem in a z-machine.
#
# Third-party programs are expected to subclass ZFilesystem and
# override all the methods, then pass an instance of their class to be
# driven by the main z-machine engine.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZFilesystem(object):
"""Encapsulates the interactions that the end-user has with the
filesystem."""
def save_game(self, data, suggested_filename=None):
"""Prompt for a filename (possibly using suggested_filename), and
attempt to write DATA as a saved-game file. Return True on
success, False on failure.
Note that file-handling errors such as 'disc corrupt' and 'disc
full' should be reported directly to the player by the method in
question method, and they should also cause this function to
return False. If the user clicks 'cancel' or its equivalent,
this function should return False."""
raise NotImplementedError()
def restore_game(self):
"""Prompt for a filename, and return file's contents. (Presumably
the interpreter will attempt to use those contents to restore a
saved game.) Returns None on failure.
Note that file-handling errors such as 'disc corrupt' and 'disc
full' should be reported directly to the player by the method in
question method, and they should also cause this function to
return None. The error 'file not found' should cause this function
to return None. If the user clicks 'cancel' or its equivalent,
this function should return None."""
raise NotImplementedError()
def open_transcript_file_for_writing(self):
"""Prompt for a filename in which to save either a full game
transcript or just a list of the user's commands. Return standard
python file object that can be written to.
If an error occurs, or if the user clicks 'cancel' or its
equivalent, return None."""
raise NotImplementedError()
def open_transcript_file_for_reading(self):
"""Prompt for a filename contain user commands, which can be used
to drive the interpreter. Return standard python file object that
can be read from.
If an error occurs, or if the user clicks 'cancel' or its
equivalent, return None."""
raise NotImplementedError()
| 36.363636 | 70 | 0.735417 | #
# A template class representing the interactions that the end-user has
# with the filesystem in a z-machine.
#
# Third-party programs are expected to subclass ZFilesystem and
# override all the methods, then pass an instance of their class to be
# driven by the main z-machine engine.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZFilesystem(object):
"""Encapsulates the interactions that the end-user has with the
filesystem."""
def save_game(self, data, suggested_filename=None):
"""Prompt for a filename (possibly using suggested_filename), and
attempt to write DATA as a saved-game file. Return True on
success, False on failure.
Note that file-handling errors such as 'disc corrupt' and 'disc
full' should be reported directly to the player by the method in
question method, and they should also cause this function to
return False. If the user clicks 'cancel' or its equivalent,
this function should return False."""
raise NotImplementedError()
def restore_game(self):
"""Prompt for a filename, and return file's contents. (Presumably
the interpreter will attempt to use those contents to restore a
saved game.) Returns None on failure.
Note that file-handling errors such as 'disc corrupt' and 'disc
full' should be reported directly to the player by the method in
question method, and they should also cause this function to
return None. The error 'file not found' should cause this function
to return None. If the user clicks 'cancel' or its equivalent,
this function should return None."""
raise NotImplementedError()
def open_transcript_file_for_writing(self):
"""Prompt for a filename in which to save either a full game
transcript or just a list of the user's commands. Return standard
python file object that can be written to.
If an error occurs, or if the user clicks 'cancel' or its
equivalent, return None."""
raise NotImplementedError()
def open_transcript_file_for_reading(self):
"""Prompt for a filename contain user commands, which can be used
to drive the interpreter. Return standard python file object that
can be read from.
If an error occurs, or if the user clicks 'cancel' or its
equivalent, return None."""
raise NotImplementedError()
| 0 | 0 | 0 |
54e319f380ed51382d970fc04e3a476fb0fb80be | 4,886 | py | Python | app.py | adrisj7/scouter-app | aaaf3d7cb9ac7bcd9dae0c2f9766e0dc5197012b | [
"MIT"
] | null | null | null | app.py | adrisj7/scouter-app | aaaf3d7cb9ac7bcd9dae0c2f9766e0dc5197012b | [
"MIT"
] | null | null | null | app.py | adrisj7/scouter-app | aaaf3d7cb9ac7bcd9dae0c2f9766e0dc5197012b | [
"MIT"
] | null | null | null | #!/usr/bin/python
import sys
import os.path
from functools import wraps
from flask import Flask, g, render_template, redirect, request
from flask_login import login_required, current_user, login_user, logout_user
from flask_bcrypt import Bcrypt
from util.database import *
# 5000 seems a bit... basic. Feel free to change later to something more
# interesting.
SITE_PORT = 5000
# If testing on localhost, set to True
# Otherwise if running on server, set to False
SERVER_LOCAL = True
# Init app
app = Flask(__name__)
# Setup bcrypt
bcrypt = Bcrypt(app)
# Initialize SQL database
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///database.db"
db.init_app(app)
# Setup login manage
login_manager.init_app(app)
##### Other constants
RESPONSE_SUCCESS = "success"
# Run before first request: We cannot create all inline
# (unless we do db = SQLAlchemy(app) )
@app.before_first_request
@app.route("/")
@app.route("/home")
@app.route("/scout")
@login_required
# Checks for login
@app.route("/login", methods=["POST"])
@app.route("/logout")
@login_required
# Registers user
@app.route("/register", methods=["POST"])
# Login page
@app.route("/login", methods=["GET"])
# Register page
@app.route("/register", methods=["GET"])
# Context Processor, automatically passing data to EVERY template
# Makes sure we don't have to manually pass data every time we render
@app.context_processor
# Login wrapper. If no user exists, redirect to '/login'
# Gets and sets the secret key from a file
if __name__ == "__main__":
if SERVER_LOCAL:
host = "127.0.0.1"
else:
host = "0.0.0.0"
set_secret_key("secret/secretkey")
app.run(host = host,
port = SITE_PORT,
debug = True
)
| 26.846154 | 77 | 0.647155 | #!/usr/bin/python
import sys
import os.path
from functools import wraps
from flask import Flask, g, render_template, redirect, request
from flask_login import login_required, current_user, login_user, logout_user
from flask_bcrypt import Bcrypt
from util.database import *
# 5000 seems a bit... basic. Feel free to change later to something more
# interesting.
SITE_PORT = 5000
# If testing on localhost, set to True
# Otherwise if running on server, set to False
SERVER_LOCAL = True
# Init app
app = Flask(__name__)
# Setup bcrypt
bcrypt = Bcrypt(app)
# Initialize SQL database
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///database.db"
db.init_app(app)
# Setup login manage
login_manager.init_app(app)
##### Other constants
RESPONSE_SUCCESS = "success"
# Run before first request: We cannot create all inline
# (unless we do db = SQLAlchemy(app) )
@app.before_first_request
def initialize_database():
print "Initialized"
db.create_all()
@app.route("/")
def route_default():
return redirect("/home")
@app.route("/home")
def route_home():
return render_template("home.html")
@app.route("/scout")
@login_required
def route_scout():
return render_template("scout.html")
# Checks for login
@app.route("/login", methods=["POST"])
def route_login_post():
print "Users"
print User.query.all()
# Login user.
validation = request.form
user = load_user(validation["email"])
# If no email or invalid password, return valid error
print "logging in"
if not user:
return "email,Email not found"
if not bcrypt.check_password_hash(user.password, validation["password"]):
return "password,Invalid password"
# Otherwise user login was successful
user.authenticated = True
db.session.add(user)
db.session.commit()
login_user(user, remember=True)
print "SUCCESS"
# Login success
return RESPONSE_SUCCESS
@app.route("/logout")
@login_required
def route_logout():
user = current_user
user.authenticated = False
db.session.add(user)
db.session.commit()
logout_user()
return redirect("/home")
# Registers user
@app.route("/register", methods=["POST"])
def route_register():
form = request.form
if ( form["email"] == "" ):
return "email,Email required"
if ( load_user( form["email"] ) ):
return "email,Email already exists"
if ( form["email"].find("@") == -1 or form["email"].find(".com") == -1):
return "email,Not a valid email"
if ( form["password"] == "" ):
return "password,Password is required"
if ( form["name"] == "" ):
return "name,Name is required"
if ( form["team"] == "" ):
return "team,Team is required"
if ( int(form["team"]) <= 0 ):
return "team,Invalid team number"
# If none of the checks returned an error, we're good.
print "ok1"
user = User(email=form["email"],
password=bcrypt.generate_password_hash(form["password"]),
name=form["name"],
teamNumber=int(form["team"])
)
print "ok4"
db.session.add(user)
db.session.commit()
print "ok5 we done"
return RESPONSE_SUCCESS
# Login page
@app.route("/login", methods=["GET"])
def route_login_page():
return render_template("login.html")
# Register page
@app.route("/register", methods=["GET"])
def route_register_page():
return render_template("register.html");
# Context Processor, automatically passing data to EVERY template
# Makes sure we don't have to manually pass data every time we render
@app.context_processor
def inject_data_for_all_templates():
return dict(
#user=current_user
)
# Login wrapper. If no user exists, redirect to '/login'
def login_required(f):
@wraps(f)
def decorated_function(*args,**kwargs):
if g.user is None:
return redirect('/login')
return f(*args, **kwargs)
return decorated_function
# Gets and sets the secret key from a file
def set_secret_key(fname):
try:
app.config["SECRET_KEY"] = unicode(open(fname, "rb").read())
except IOError:
print "Error: No secret key. Create it with:"
if not os.path.isdir(os.path.dirname(fname)):
print "mkdir", os.path.dirname(fname)
print 'head -c 24 /dev/urandom >', fname
print "And fill it in with the secret key"
print "If flask user_login is giving you issues with this, \
just generate your own random key by mashing the keyboard \
until you have 32 random characters"
sys.exit(1)
if __name__ == "__main__":
if SERVER_LOCAL:
host = "127.0.0.1"
else:
host = "0.0.0.0"
set_secret_key("secret/secretkey")
app.run(host = host,
port = SITE_PORT,
debug = True
)
| 2,851 | 0 | 265 |
98858423b4f12c4129ca397e47bff9c4c29c4c76 | 2,608 | py | Python | mudparser/acl_entry.py | elmiomar/mudparser | 7bae239fe2e7e82746ef88150f62673793872973 | [
"MIT"
] | null | null | null | mudparser/acl_entry.py | elmiomar/mudparser | 7bae239fe2e7e82746ef88150f62673793872973 | [
"MIT"
] | null | null | null | mudparser/acl_entry.py | elmiomar/mudparser | 7bae239fe2e7e82746ef88150f62673793872973 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from mudparser.matches import (IPv4Match, IPv6Match, TCPMatch, UDPMatch, EthMatch, MUDMatch)
__all__ = ['AccessListEntry']
| 37.257143 | 92 | 0.522239 | #!/usr/bin/env python
from mudparser.matches import (IPv4Match, IPv6Match, TCPMatch, UDPMatch, EthMatch, MUDMatch)
__all__ = ['AccessListEntry']
class AccessListEntry:
def __init__(self, json_obj):
self._json_obj = json_obj
self.name = ''
self.matches = {}
self.actions = {}
self.__parse()
def __parse(self):
obj = self._json_obj
self.name = obj['name']
matches_obj = obj['matches']
matches_keys = matches_obj.keys()
for matches_key in matches_keys:
self.__add_match(match_on=matches_key, json_obj=matches_obj[matches_key])
actions_obj = obj['actions']
actions_keys = actions_obj.keys()
for actions_key in actions_keys:
self.__add_action(action_type=actions_key, action_name=actions_obj[actions_key])
def __add_action(self, action_type, action_name):
self.actions[action_type] = action_name
def __add_match(self, match_on, json_obj):
if match_on == 'ipv4':
self.matches['ipv4'] = IPv4Match(json_obj)
elif match_on == 'ipv6':
self.matches['ipv6'] = IPv6Match(json_obj)
elif match_on == 'udp':
self.matches['udp'] = UDPMatch(json_obj)
elif match_on == 'tcp':
self.matches['tcp'] = TCPMatch(json_obj)
elif match_on == 'eth':
self.matches['eth'] = EthMatch(json_obj)
elif match_on == 'ietf-mud:mud':
self.matches['mud'] = MUDMatch(json_obj)
def print_rules(self, direction):
rule = "[" + direction + "] " # "[" + self.name + "] "
for k, a in self.actions.items():
if k == 'logging':
pass
elif k == 'forwarding':
rule += a + " "
for k, match in self.matches.items():
if k == 'tcp':
if direction == 'to':
rule += "tcp from" + str(match.src_port.port) + " to any"
elif direction == 'from':
rule += "tcp from any to " + str(match.dst_port.port)
elif k == 'udp':
if direction == 'to':
rule += "udp from" + str(match.src_port.port) + " to any"
elif direction == 'from':
rule += "udp from any to " + str(match.dst_port.port)
elif k == 'eth':
rule += "ethertype " + match.ethertype
print(rule)
# TODO: is __str__() better in this case?
| 2,300 | 1 | 157 |
4d9dd9a332f0dd6dac4ae2140de6807bf424df60 | 8,529 | py | Python | tests/test_stow.py | harkabeeparolus/dploy | 0545a22dc80c3eb8103c61306be2fa726fb6a0e8 | [
"MIT"
] | null | null | null | tests/test_stow.py | harkabeeparolus/dploy | 0545a22dc80c3eb8103c61306be2fa726fb6a0e8 | [
"MIT"
] | null | null | null | tests/test_stow.py | harkabeeparolus/dploy | 0545a22dc80c3eb8103c61306be2fa726fb6a0e8 | [
"MIT"
] | null | null | null | """
Tests for the stow stub command
"""
# pylint: disable=missing-docstring
# disable lint errors for function names longer that 30 characters
# pylint: disable=invalid-name
import os
import pytest
import dploy
from dploy import error
from tests import utils
SUBCMD = "stow"
| 35.098765 | 87 | 0.709579 | """
Tests for the stow stub command
"""
# pylint: disable=missing-docstring
# disable lint errors for function names longer that 30 characters
# pylint: disable=invalid-name
import os
import pytest
import dploy
from dploy import error
from tests import utils
SUBCMD = "stow"
def test_stow_with_simple_senario(source_only_files, dest):
dploy.stow([source_only_files], dest)
assert os.readlink(os.path.join(dest, "aaa")) == os.path.join(
"..", "source_only_files", "aaa"
)
def test_stow_with_basic_senario(source_a, dest):
dploy.stow([source_a], dest)
assert os.readlink(os.path.join(dest, "aaa")) == os.path.join(
"..", "source_a", "aaa"
)
def test_stow_with_the_same_tree_twice(source_a, dest):
dploy.stow([source_a], dest)
dploy.stow([source_a], dest)
assert os.readlink(os.path.join(dest, "aaa")) == os.path.join(
"..", "source_a", "aaa"
)
def test_stow_with_existing_file_conflicts(source_a, source_c, dest):
dploy.stow([source_a], dest)
source_file = os.path.join(source_c, "aaa", "aaa")
conflicting_file = os.path.join(dest, "aaa", "aaa")
message = str(
error.ConflictsWithExistingFile(
subcmd=SUBCMD, source=source_file, dest=conflicting_file
)
)
with pytest.raises(error.ConflictsWithExistingFile, match=message):
dploy.stow([source_c], dest)
def test_stow_with_existing_broken_link(source_a, dest):
conflicting_link = os.path.join(dest, "aaa")
os.symlink("non_existant_source", conflicting_link)
source_file = os.path.join(source_a, "aaa")
message = str(
error.ConflictsWithExistingLink(
subcmd=SUBCMD, source=source_file, dest=conflicting_link
)
)
with pytest.raises(error.ConflictsWithExistingLink):
dploy.stow([source_a], dest)
def test_stow_with_source_conflicts(source_a, source_c, dest):
conflicting_source_files = [
os.path.join(source_a, "aaa", "aaa"),
os.path.join(source_c, "aaa", "aaa"),
]
message = str(
error.ConflictsWithAnotherSource(subcmd=SUBCMD, files=conflicting_source_files)
)
with pytest.raises(error.ConflictsWithAnotherSource, match=message):
dploy.stow([source_a, source_c], dest)
def test_stow_with_non_existant_source(dest):
non_existant_source = "source"
message = str(error.NoSuchDirectory(subcmd=SUBCMD, file=non_existant_source))
with pytest.raises(error.NoSuchDirectory, match=message):
dploy.stow([non_existant_source], dest)
def test_stow_with_duplicate_source(source_a, dest):
message = str(error.DuplicateSource(subcmd=SUBCMD, file=source_a))
with pytest.raises(error.DuplicateSource, match=message):
dploy.stow([source_a, source_a], dest)
def test_stow_with_non_existant_dest(source_a):
non_existant_dest = "dest"
message = str(
error.NoSuchDirectoryToSubcmdInto(subcmd=SUBCMD, file=non_existant_dest)
)
with pytest.raises(error.NoSuchDirectoryToSubcmdInto, match=message):
dploy.stow([source_a], "dest")
def test_stow_with_file_as_source(file_a, dest):
message = str(error.NoSuchDirectory(subcmd=SUBCMD, file=file_a))
with pytest.raises(error.NoSuchDirectory, match=message):
dploy.stow([file_a], dest)
def test_stow_with_file_as_dest(source_a, file_a):
message = str(error.NoSuchDirectoryToSubcmdInto(subcmd=SUBCMD, file=file_a))
with pytest.raises(error.NoSuchDirectoryToSubcmdInto, match=message):
dploy.stow([source_a], file_a)
def test_stow_with_file_as_dest_and_source(file_a, file_b):
message = str(error.NoSuchDirectoryToSubcmdInto(subcmd=SUBCMD, file=file_b))
with pytest.raises(error.NoSuchDirectoryToSubcmdInto, match=message):
dploy.stow([file_a], file_b)
def test_stow_with_same_directory_used_as_source_and_dest(source_a):
message = str(error.SourceIsSameAsDest(subcmd=SUBCMD, file=source_a))
with pytest.raises(error.SourceIsSameAsDest, match=message):
dploy.stow([source_a], source_a)
def test_stow_with_same_simple_directory_used_as_source_and_dest(source_only_files):
message = str(error.SourceIsSameAsDest(subcmd=SUBCMD, file=source_only_files))
with pytest.raises(error.SourceIsSameAsDest, match=message):
dploy.stow([source_only_files], source_only_files)
def test_stow_with_read_only_dest(source_a, dest):
utils.remove_write_permission(dest)
message = str(error.InsufficientPermissionsToSubcmdTo(subcmd=SUBCMD, file=dest))
with pytest.raises(error.InsufficientPermissionsToSubcmdTo, match=message):
dploy.stow([source_a], dest)
def test_stow_with_write_only_source(source_a, source_c, dest):
utils.remove_read_permission(source_a)
message = str(
error.InsufficientPermissionsToSubcmdFrom(subcmd=SUBCMD, file=source_a)
)
with pytest.raises(error.InsufficientPermissionsToSubcmdFrom, match=message):
dploy.stow([source_a, source_c], dest)
utils.add_read_permission(source_a) # cleanup
def test_stow_with_source_with_no_executue_permissions(source_a, source_c, dest):
utils.remove_execute_permission(source_a)
message = str(
error.InsufficientPermissionsToSubcmdFrom(subcmd=SUBCMD, file=source_a)
)
with pytest.raises(error.InsufficientPermissionsToSubcmdFrom, match=message):
dploy.stow([source_a, source_c], dest)
def test_stow_with_source_dir_with_no_executue_permissions(source_a, source_c, dest):
source_dir = os.path.join(source_a, "aaa")
utils.remove_execute_permission(source_dir)
message = str(
error.InsufficientPermissionsToSubcmdFrom(subcmd=SUBCMD, file=source_dir)
)
with pytest.raises(error.InsufficientPermissionsToSubcmdFrom, match=message):
dploy.stow([source_a, source_c], dest)
def test_stow_with_write_only_source_file(source_a, dest):
source_file = os.path.join(source_a, "aaa")
utils.remove_read_permission(source_file)
dploy.stow([source_a], dest)
def verify_unfolded_source_a_and_source_b(dest):
common_dest_dir = os.path.join(dest, "aaa")
common_source_a_dir = os.path.join("..", "..", "source_a", "aaa")
common_source_b_dir = os.path.join("..", "..", "source_b", "aaa")
file_maps = (
{
"dest": os.path.join(common_dest_dir, "aaa"),
"source": os.path.join(common_source_a_dir, "aaa"),
},
{
"dest": os.path.join(common_dest_dir, "bbb"),
"source": os.path.join(common_source_a_dir, "bbb"),
},
{
"dest": os.path.join(common_dest_dir, "ccc"),
"source": os.path.join(common_source_a_dir, "ccc"),
},
{
"dest": os.path.join(common_dest_dir, "ddd"),
"source": os.path.join(common_source_b_dir, "ddd"),
},
{
"dest": os.path.join(common_dest_dir, "eee"),
"source": os.path.join(common_source_b_dir, "eee"),
},
{
"dest": os.path.join(common_dest_dir, "fff"),
"source": os.path.join(common_source_b_dir, "fff"),
},
)
assert os.path.isdir(os.path.join(common_dest_dir))
for file_map in file_maps:
assert os.readlink(file_map["dest"]) == file_map["source"]
def test_stow_unfolding_with_two_invocations(source_a, source_b, dest):
dploy.stow([source_a], dest)
assert os.readlink(os.path.join(dest, "aaa")) == os.path.join(
"..", "source_a", "aaa"
)
dploy.stow([source_b], dest)
verify_unfolded_source_a_and_source_b(dest)
def test_stow_unfolding_with_mutliple_sources(source_a, source_b, dest):
dploy.stow([source_a, source_b], dest)
verify_unfolded_source_a_and_source_b(dest)
def test_stow_unfolding_with_first_sources_execute_permission_removed(
source_a, source_b, dest
):
dploy.stow([source_a], dest)
utils.remove_execute_permission(source_a)
dest_dir = os.path.join(dest, "aaa")
message = str(error.PermissionDenied(subcmd=SUBCMD, file=dest_dir))
with pytest.raises(error.PermissionDenied, match=message):
dploy.stow([source_b], dest)
def test_stow_unfolding_with_write_only_source_file(source_a, source_b, dest):
source_file = os.path.join(source_a, "aaa")
utils.remove_read_permission(source_file)
message = str(
error.InsufficientPermissionsToSubcmdFrom(subcmd=SUBCMD, file=source_file)
)
with pytest.raises(error.InsufficientPermissionsToSubcmdFrom):
dploy.stow([source_a, source_b], dest)
| 7,676 | 0 | 552 |
0ed9476799122925feec3ca93850df67c13e570a | 360 | py | Python | bindings/pydeck-carto/tests/test_layer.py | ehtick/deck.gl | ac59a28a6ff03000072f11c9a5520eb87f11944c | [
"MIT"
] | null | null | null | bindings/pydeck-carto/tests/test_layer.py | ehtick/deck.gl | ac59a28a6ff03000072f11c9a5520eb87f11944c | [
"MIT"
] | null | null | null | bindings/pydeck-carto/tests/test_layer.py | ehtick/deck.gl | ac59a28a6ff03000072f11c9a5520eb87f11944c | [
"MIT"
] | null | null | null | import pydeck as pdk
from pydeck_carto import register_carto_layer
| 25.714286 | 63 | 0.652778 | import pydeck as pdk
from pydeck_carto import register_carto_layer
def test_register_carto_layer():
assert pdk.settings.custom_libraries == []
register_carto_layer()
assert pdk.settings.custom_libraries == [
{
"libraryName": "CartoLayerLibrary",
"resourceUri": "http://127.0.0.1:8888/dist.min.js",
}
]
| 269 | 0 | 23 |
556b56f576d5301f23ff64c2b9ada4f26a4824a3 | 2,487 | py | Python | blendernc/nodes/outputs/BlenderNC_NT_preloader.py | StephanSiemen/blendernc | 590c252dc3d6e1092bd713bbf9111ffe8ca04999 | [
"MIT"
] | 39 | 2020-06-15T15:58:43.000Z | 2022-02-02T01:11:57.000Z | blendernc/nodes/outputs/BlenderNC_NT_preloader.py | netgodz/blendernc | 09ed7c7791da46abb2c5fd3ee83286ef0bf82302 | [
"MIT"
] | 137 | 2020-06-19T15:29:06.000Z | 2022-03-30T11:18:55.000Z | blendernc/nodes/outputs/BlenderNC_NT_preloader.py | netgodz/blendernc | 09ed7c7791da46abb2c5fd3ee83286ef0bf82302 | [
"MIT"
] | 8 | 2020-06-17T09:33:12.000Z | 2022-01-21T00:26:19.000Z | #!/usr/bin/env python3
# Imports
import bpy
class BlenderNC_NT_preloader(bpy.types.Node):
# === Basics ===
# Description string
"""A datacube node"""
# Optional identifier string. If not explicitly defined,
# the python class name is used.
bl_idname = "datacubePreloadNode"
# Label for nice name display
bl_label = "Load datacube"
# Icon identifier
bl_icon = "SOUND"
blb_type = "NETCDF"
# TODO: This node will receive a datacube as
# input and store all the images in disk for easier import and animation.
# === Optional Functions ===
# Initialization function, called when a new node is created.
# This is the most common place to create the sockets for a node,
# as shown below.
# Copy function to initialize a copied node from an existing one.
# Free function to clean up on removal.
# Additional buttons displayed on the node.
# Detail buttons in the sidebar.
# If this function is not defined,
# the draw_buttons function is used instead
# Optional: custom label
# Explicit user label overrides this,
# but here we can define a label dynamically
| 32.298701 | 77 | 0.60394 | #!/usr/bin/env python3
# Imports
import bpy
class BlenderNC_NT_preloader(bpy.types.Node):
# === Basics ===
# Description string
"""A datacube node"""
# Optional identifier string. If not explicitly defined,
# the python class name is used.
bl_idname = "datacubePreloadNode"
# Label for nice name display
bl_label = "Load datacube"
# Icon identifier
bl_icon = "SOUND"
blb_type = "NETCDF"
# TODO: This node will receive a datacube as
# input and store all the images in disk for easier import and animation.
# === Optional Functions ===
# Initialization function, called when a new node is created.
# This is the most common place to create the sockets for a node,
# as shown below.
def init(self, context):
pass
# Copy function to initialize a copied node from an existing one.
def copy(self, node):
print("Copying from node ", node)
# Free function to clean up on removal.
def free(self):
print("Removing node ", self, ", Goodbye!")
# Additional buttons displayed on the node.
def draw_buttons(self, context, layout):
# scene = context.scene
layout.label(text="INFO: Work in progress", icon="INFO")
# if scene.blendernc_dict:
# layout.prop(self, "file_name")
# else:
# layout.label(text="No datacube loaded")
# if self.file_name:
# layout.prop(self, "var_name")
# if self.var_name:
# layout.prop(self, "frame_start")
# layout.prop(self, "frame_end")
# if self.frame_end > self.frame_start:
# op = layout.operator("blendernc.preloader",
# icon="FILE_REFRESH",)
# op.file_name = self.file_name
# op.var_name = self.var_name
# op.frame_start = self.frame_start
# op.frame_end = self.frame_end
# else:
# layout.label(text="Cannot preload!")
# Detail buttons in the sidebar.
# If this function is not defined,
# the draw_buttons function is used instead
def draw_buttons_ext(self, context, layout):
pass
# Optional: custom label
# Explicit user label overrides this,
# but here we can define a label dynamically
def draw_label(self):
return "Load datacube"
def update_value(self, context):
self.update()
def update(self):
pass
| 1,113 | 0 | 210 |
9affda41ae10365c43f3ff4b169d6d2f0e233bc0 | 1,480 | py | Python | examples/echoss.py | RalphWalters/thredo | ea109c693036764dd192527f9b6bba18d3b18042 | [
"MIT"
] | 340 | 2018-07-23T18:21:56.000Z | 2021-12-11T05:50:58.000Z | examples/echoss.py | RalphWalters/thredo | ea109c693036764dd192527f9b6bba18d3b18042 | [
"MIT"
] | 6 | 2018-07-31T11:52:56.000Z | 2019-11-25T19:52:32.000Z | examples/echoss.py | RalphWalters/thredo | ea109c693036764dd192527f9b6bba18d3b18042 | [
"MIT"
] | 25 | 2018-07-27T06:09:05.000Z | 2022-03-13T12:53:23.000Z | # Echo server implemented using socket server and
# a ThredoMixIn class. This class replaces the normal
# socket with one that can be cancelled. Also uses spawn()
# internally to launch threads.
from thredo.socket import *
import thredo
import socketserver
import signal
thredo.run(main)
| 29.6 | 66 | 0.660135 | # Echo server implemented using socket server and
# a ThredoMixIn class. This class replaces the normal
# socket with one that can be cancelled. Also uses spawn()
# internally to launch threads.
from thredo.socket import *
import thredo
import socketserver
import signal
class EchoHandler(socketserver.BaseRequestHandler):
def handle(self):
print("Connection from", self.client_address)
try:
while True:
data = self.request.recv(100000)
if not data:
break
self.request.sendall(data)
except thredo.ThreadCancelled:
print('Handler Cancelled')
print('Connection closed')
class EchoStreamHandler(socketserver.StreamRequestHandler):
def handle(self):
print("Stream Connection from", self.client_address)
try:
for line in self.rfile:
self.wfile.write(line)
except thredo.ThreadCancelled:
print('Stream Handler Cancelled')
print('Stream Connection closed')
class ThredoTCPServer(thredo.ThredoMixIn, socketserver.TCPServer):
pass
allow_reuse_address = True
def main():
# serv = ThredoTCPServer(('', 25000), EchoHandler)
serv = ThredoTCPServer(('', 25000), EchoStreamHandler)
serv.allow_reuse_address = True
t = thredo.spawn(serv.serve_forever)
thredo.SignalEvent(signal.SIGINT).wait()
print('Cancelling')
t.cancel()
thredo.run(main)
| 883 | 153 | 144 |
0f7b748e205f47f53854aa928e93bf09651b81f6 | 1,697 | py | Python | backend/tutors/serializers.py | ProgrammingLanguageLeader/TutorsApp | f2d5968b5c29ce75f5f634d6076a6e66efc76801 | [
"MIT"
] | 3 | 2019-02-24T23:30:19.000Z | 2019-03-27T20:06:53.000Z | backend/tutors/serializers.py | ProgrammingLanguageLeader/TutorsApp | f2d5968b5c29ce75f5f634d6076a6e66efc76801 | [
"MIT"
] | 1 | 2019-03-30T08:58:06.000Z | 2019-03-30T08:58:06.000Z | backend/tutors/serializers.py | ProgrammingLanguageLeader/TutorsApp | f2d5968b5c29ce75f5f634d6076a6e66efc76801 | [
"MIT"
] | 1 | 2019-03-01T20:10:19.000Z | 2019-03-01T20:10:19.000Z | from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from tutors.models import StudentRequest, TutorStudents
from users.models import User
from users.serializers import UserSerializer
| 29.258621 | 72 | 0.655863 | from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from tutors.models import StudentRequest, TutorStudents
from users.models import User
from users.serializers import UserSerializer
class StudentRequestSerializer(serializers.ModelSerializer):
student = serializers.HiddenField(
default=serializers.CurrentUserDefault()
)
tutor = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all()
)
class Meta:
model = StudentRequest
fields = '__all__'
read_only_fields = ('creation_time', )
def run_validators(self, value):
try:
StudentRequest.objects.get(
student=value.get('student'),
tutor=value.get('tutor')
)
except StudentRequest.DoesNotExist:
return super().run_validators(value)
raise serializers.ValidationError(
_('request already exists')
)
def validate(self, attrs):
student = attrs.get('student')
tutor = attrs.get('tutor')
if student == tutor:
raise serializers.ValidationError(
_('tutor and student must be different users')
)
tutor, created = TutorStudents.objects.get_or_create(user=tutor)
if student in tutor.students.all():
raise serializers.ValidationError(
_('student already exists in a list of students')
)
return attrs
class ReadStudentRequestSerializer(StudentRequestSerializer):
student = UserSerializer()
tutor = UserSerializer()
class AcceptStudentRequestSerializer(serializers.Serializer):
pass
| 848 | 548 | 69 |
ffef01f595ce788224a97b8b12c5b56d821de0de | 125 | py | Python | commons/math_utils.py | bertl4398/semaphore-demo-python-pants | bd6e44804f71542bbc89474c527cc61bbc5ba099 | [
"MIT"
] | 4 | 2021-05-21T13:43:08.000Z | 2021-12-29T02:36:34.000Z | commons/math_utils.py | bertl4398/semaphore-demo-python-pants | bd6e44804f71542bbc89474c527cc61bbc5ba099 | [
"MIT"
] | null | null | null | commons/math_utils.py | bertl4398/semaphore-demo-python-pants | bd6e44804f71542bbc89474c527cc61bbc5ba099 | [
"MIT"
] | 4 | 2021-06-25T02:32:49.000Z | 2022-03-11T19:06:40.000Z | """Shared math functions."""
def math_sum(num1, num2):
"""Returns the sum of two parameters."""
return num1 + num2
| 17.857143 | 44 | 0.64 | """Shared math functions."""
def math_sum(num1, num2):
"""Returns the sum of two parameters."""
return num1 + num2
| 0 | 0 | 0 |
2081c4db723b72973994b2f51131c2fbe6dd94e7 | 3,913 | py | Python | training/training/report/pending_list_kvs/pending_list_kvs.py | vhrsramya/Training | 0415109c66fae8a58e48bcad0ae8cd210e8a95bb | [
"MIT"
] | null | null | null | training/training/report/pending_list_kvs/pending_list_kvs.py | vhrsramya/Training | 0415109c66fae8a58e48bcad0ae8cd210e8a95bb | [
"MIT"
] | null | null | null | training/training/report/pending_list_kvs/pending_list_kvs.py | vhrsramya/Training | 0415109c66fae8a58e48bcad0ae8cd210e8a95bb | [
"MIT"
] | 1 | 2020-02-14T12:56:53.000Z | 2020-02-14T12:56:53.000Z | # Copyright (c) 2013, Minda Sai Pvt LTd and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import math
from calendar import monthrange
from datetime import datetime,timedelta,date
from dateutil.rrule import *
# def get_conditions(filters):
# conditions = ""
# # if filters.get("employee"):conditions += "AND att.employee = '%s'" % filters["employee"]
# if filters.get("from_date"): conditions += "and c.date_of_skill_evaluatation >= %(from_date)s"
# if filters.get("to_date"): conditions += " and c.date_of_skill_evaluatation <= %(to_date)s"
# return conditions, filters
| 34.628319 | 232 | 0.576795 | # Copyright (c) 2013, Minda Sai Pvt LTd and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import math
from calendar import monthrange
from datetime import datetime,timedelta,date
from dateutil.rrule import *
def execute(filters=None):
if not filters:
filters = {}
columns = get_columns()
data = []
row = []
employee = employee_details(filters)
for e in employee:
row = [e.biometric_id,e.employee_name,e.line,e.shift,e.date_of_joining]
if frappe.db.exists("KVS Type Assignment",{"employee":e.biometric_id}):
doc = frappe.get_doc("KVS Type Assignment",{"employee":e.biometric_id})
row += ["Assigned",doc.knowledge_verification_type]
if doc.knowledge_verification_type:
kvs_type = doc.knowledge_verification_type
emp_id = e.biometric_id
t = training(emp_id,kvs_type)
if t == 1:
row += ["Completed"]
else:
row += ["Pending"]
t = production(emp_id,kvs_type)
if t == 1:
row += ["Completed"]
else:
row += ["Pending"]
t = quality(emp_id,kvs_type)
if t == 1:
row += ["Completed"]
else:
row += ["Pending"]
else:
row += ["Not Assigned","-","-","-"]
data.append(row)
return columns, data
def get_columns():
columns = [
_("Employee") + ":Link/Employee:150",
_("Employee Name") + ":Data:150",
_("Line") + ":Link/Line:150",
_("Shift") + ":Link/Shift:150",
_("Date of Joining") + ":Date:150",
_("Status") + ":Link/IT Status:150",
_("KVS Type") + "::150",
_("Training Observation") + ":Link/IT Status:150",
_("Production Observation") + ":Link/IT Status:150",
_("Quality Observation") + ":Link/IT Status:150",
]
return columns
# def get_conditions(filters):
# conditions = ""
# # if filters.get("employee"):conditions += "AND att.employee = '%s'" % filters["employee"]
# if filters.get("from_date"): conditions += "and c.date_of_skill_evaluatation >= %(from_date)s"
# if filters.get("to_date"): conditions += " and c.date_of_skill_evaluatation <= %(to_date)s"
# return conditions, filters
def employee_details(filters):
employee = frappe.db.sql(
"""select biometric_id,employee_name,shift,department,designation,date_of_joining,line from `tabEmployee` where status = "Active" and date_of_joining between %s and %s """,(filters.get("from"),filters.get("to")),as_dict = 1)
return employee
def training(emp_id,kvs_type):
type_list = ["Komax Operator","Parts fitting Operator","Tapping Operator","Checker Board Operator","Sub assy process"]
for tl in type_list:
if tl == kvs_type:
if frappe.db.exists(tl+" Training Observation",emp_id):
return True
else:
return False
def production(emp_id,kvs_type):
type_list = ["Komax Operator","Parts fitting Operator","Tapping Operator","Checker Board Operator","Sub assy process"]
for tl in type_list:
if tl == kvs_type:
if frappe.db.exists(tl+" Production Observation",emp_id):
return True
else:
return False
def quality(emp_id,kvs_type):
type_list = ["Komax Operator","Parts fitting Operator","Tapping Operator","Checker Board Operator","Sub assy process"]
for tl in type_list:
if tl == kvs_type:
if frappe.db.exists(tl+" Quality Observation",emp_id):
return True
else:
return False
| 3,065 | 0 | 146 |
127e78027d3a86dbedef35a4008738c8f31c259c | 2,116 | py | Python | tests/testDQN.py | 1696012928/RoomAI | 37be09590489ab5f7c85083173e83ea31c40b76c | [
"MIT"
] | 1 | 2018-03-02T00:49:31.000Z | 2018-03-02T00:49:31.000Z | tests/testDQN.py | 1696012928/RoomAI | 37be09590489ab5f7c85083173e83ea31c40b76c | [
"MIT"
] | null | null | null | tests/testDQN.py | 1696012928/RoomAI | 37be09590489ab5f7c85083173e83ea31c40b76c | [
"MIT"
] | null | null | null |
import unittest
from models.dqn.dqnalgorithm import DqnAlgorithm
import random
import roomai
import roomai.sevenking
from models.dqn.dqnalgorithm import DqnPlayer
from models.dqn.sevenking import SevenKingModel_ThreePlayers
import roomai.common
| 33.0625 | 147 | 0.664934 |
import unittest
from models.dqn.dqnalgorithm import DqnAlgorithm
import random
import roomai
import roomai.sevenking
from models.dqn.dqnalgorithm import DqnPlayer
from models.dqn.sevenking import SevenKingModel_ThreePlayers
class ExamplePlayer(DqnPlayer):
def terminal_info_feat(self):
return [1]
def terminal_action_feat(self):
return [0]
def gen_info_feat(self, info):
return [1]
def gen_action_feat(self, info, action):
return [0]
def update_model(self, experiences):
print ("update_model")
def reset(self):
pass
def receive_info(self, info):
self.info = info
def take_action(self):
info = self.info
action_list = list(info.person_state.available_actions.values())
idx = int(random.random() * len(action_list))
return action_list[idx]
import roomai.common
class DQNTester(unittest.TestCase):
def setUp(self):
import logging
roomai.set_loglevel(logging.DEBUG)
def test_dqn(self):
import roomai.sevenking
env = roomai.sevenking.SevenKingEnv()
player = ExamplePlayer()
dqn = DqnAlgorithm()
opponents = [roomai.common.RandomPlayer() for i in range(2)]
dqn.train(env=env, players = [player] + opponents + [roomai.common.RandomPlayerChance()], params={})
dqn.eval(env=env, players = [player] + opponents + [roomai.common.RandomPlayerChance()], params={})
def test_sevenking_dqn(self):
import logging
roomai.set_loglevel(logging.DEBUG)
env = roomai.sevenking.SevenKingEnv()
player = SevenKingModel_ThreePlayers()
algo = DqnAlgorithm()
opponents = [roomai.common.RandomPlayer() for i in range(2)]
algo.train(env=env, players = [player] + opponents + [roomai.common.RandomPlayerChance()], params={"num_normal_players": 3, "num_iters":1})
opponents = [roomai.common.RandomPlayer() for i in range(2)]
#scores = algo.eval(players = [player] + opponents + [roomai.common.RandomPlayerChance()], env=env)
#print(scores)
| 1,505 | 24 | 338 |
144dce5682a278cd2617daaba66905d0960fc317 | 3,425 | py | Python | stanCode_Projects/my_drawing/my_drawing.py | LeeHsuanHsuan/MystanCodeProjects | 229f75cdb8717637bea2d83e41e84776dbb9a7c8 | [
"MIT"
] | 1 | 2022-01-26T06:04:35.000Z | 2022-01-26T06:04:35.000Z | stanCode_Projects/my_drawing/my_drawing.py | LeeHsuanHsuan/MystanCodeProjects | 229f75cdb8717637bea2d83e41e84776dbb9a7c8 | [
"MIT"
] | null | null | null | stanCode_Projects/my_drawing/my_drawing.py | LeeHsuanHsuan/MystanCodeProjects | 229f75cdb8717637bea2d83e41e84776dbb9a7c8 | [
"MIT"
] | null | null | null | """
File: my_drawing
Name: LEE HSUAN HSUAN
----------------------
TODO:
"""
from campy.graphics.gobjects import GOval, GRect, GArc, GPolygon ,GLabel
from campy.graphics.gwindow import GWindow
def main():
"""
Sometimes little things in daily life that bring us happiness.
Hope everyone can enjoy every moment of your life:)
"""
window = GWindow(500,500, title='The little things')
tail = GOval(50,30,x=140,y=380)
tail.filled = True
tail.fill_color = 'lightgray'
tail.color = 'lightgray'
window.add(tail)
tail_2 = GOval(80,35,x=130,y=365)
tail_2.filled = True
tail_2.fill_color = 'white'
tail_2.color = 'white'
window.add(tail_2)
body_2 = GPolygon()
body_2.add_vertex((180,425))
body_2.add_vertex((300,425))
body_2.add_vertex((230,320))
body_2.filled = True
body_2.fill_color = 'lightgrey'
body_2.color = 'lightgrey'
window.add(body_2)
body = GPolygon()
body.add_vertex((170,440))
body.add_vertex((270,440))
body.add_vertex((200,290))
body.filled = True
body.fill_color = 'lightgrey'
body.color = 'lightgrey'
window.add(body)
ear_2 = GOval(100,120,x=120,y=175)
ear_2.filled = True
ear_2.fill_color = 'lightgray'
ear_2.color = 'lightgray'
window.add(ear_2)
ear_1 = GOval(110,100,x=110,y=240)
ear_1.filled = True
ear_1.fill_color = 'lightgrey'
ear_1.color = 'lightgrey'
window.add(ear_1)
nose_1 = GOval(80,110,x=260,y=190)
nose_1.filled = True
nose_1.fill_color = 'lightgrey'
nose_1.color = 'lightgrey'
window.add(nose_1)
nose_2 = GOval(60,90,x=260,y=175)
nose_2.filled = True
nose_2.fill_color = 'white'
nose_2.color = 'white'
window.add(nose_2)
head = GOval(150,150,x=150,y=190)
head.filled = True
head.fill_color = 'lightgrey'
head.color = 'lightgrey'
window.add(head)
eye = GOval(30,30,x=233,y=240)
eye.filled = True
eye.fill_color = 'darkgray'
eye.color = 'darkgray'
window.add(eye)
eye_2 = GOval(10,10,x=248,y=242)
eye_2.filled = True
eye_2.fill_color = 'white'
eye_2.color = 'white'
window.add(eye_2)
mouth = GArc(30,50,180,180,x=248,y=289)
mouth.filled = True
mouth.fill_color = 'darkgray'
mouth.color = 'darkgray'
window.add(mouth)
mouth_2 = GOval(32,12,x=247,y=297)
mouth_2.filled = True
mouth_2.fill_color = 'lightgrey'
mouth_2.color = 'lightgrey'
window.add(mouth_2)
bubble = GOval(90,90,x=285,y=138)
bubble.filled = True
bubble.fill_color = 'skyblue'
bubble.color = 'skyblue'
window.add(bubble)
bubble = GOval(10,25,x=295,y=160)
bubble.filled = True
bubble.fill_color = 'snow'
bubble.color = 'snow'
window.add(bubble)
bubble_2 = GOval(10,10,x=295,y=193)
bubble_2.filled = True
bubble_2.fill_color = 'snow'
bubble_2.color = 'snow'
window.add(bubble_2)
word = GLabel('"What makes you happy?"')
word.color = "slategray"
word.font = "Times New Roman-18-bold"
window.add(word,125,80)
word_2 = GLabel('"Blowing bubbles."')
word_2.color = "slategray"
word_2.font = "Times New Roman-18-bold"
window.add(word_2,150,110)
word_3 = GLabel('"The little things."')
word_3.color = "slategray"
word_3.font = "Times-14-bold-italic"
window.add(word_3,350,445)
if __name__ == '__main__':
main()
| 24.640288 | 72 | 0.635036 | """
File: my_drawing
Name: LEE HSUAN HSUAN
----------------------
TODO:
"""
from campy.graphics.gobjects import GOval, GRect, GArc, GPolygon ,GLabel
from campy.graphics.gwindow import GWindow
def main():
"""
Sometimes little things in daily life that bring us happiness.
Hope everyone can enjoy every moment of your life:)
"""
window = GWindow(500,500, title='The little things')
tail = GOval(50,30,x=140,y=380)
tail.filled = True
tail.fill_color = 'lightgray'
tail.color = 'lightgray'
window.add(tail)
tail_2 = GOval(80,35,x=130,y=365)
tail_2.filled = True
tail_2.fill_color = 'white'
tail_2.color = 'white'
window.add(tail_2)
body_2 = GPolygon()
body_2.add_vertex((180,425))
body_2.add_vertex((300,425))
body_2.add_vertex((230,320))
body_2.filled = True
body_2.fill_color = 'lightgrey'
body_2.color = 'lightgrey'
window.add(body_2)
body = GPolygon()
body.add_vertex((170,440))
body.add_vertex((270,440))
body.add_vertex((200,290))
body.filled = True
body.fill_color = 'lightgrey'
body.color = 'lightgrey'
window.add(body)
ear_2 = GOval(100,120,x=120,y=175)
ear_2.filled = True
ear_2.fill_color = 'lightgray'
ear_2.color = 'lightgray'
window.add(ear_2)
ear_1 = GOval(110,100,x=110,y=240)
ear_1.filled = True
ear_1.fill_color = 'lightgrey'
ear_1.color = 'lightgrey'
window.add(ear_1)
nose_1 = GOval(80,110,x=260,y=190)
nose_1.filled = True
nose_1.fill_color = 'lightgrey'
nose_1.color = 'lightgrey'
window.add(nose_1)
nose_2 = GOval(60,90,x=260,y=175)
nose_2.filled = True
nose_2.fill_color = 'white'
nose_2.color = 'white'
window.add(nose_2)
head = GOval(150,150,x=150,y=190)
head.filled = True
head.fill_color = 'lightgrey'
head.color = 'lightgrey'
window.add(head)
eye = GOval(30,30,x=233,y=240)
eye.filled = True
eye.fill_color = 'darkgray'
eye.color = 'darkgray'
window.add(eye)
eye_2 = GOval(10,10,x=248,y=242)
eye_2.filled = True
eye_2.fill_color = 'white'
eye_2.color = 'white'
window.add(eye_2)
mouth = GArc(30,50,180,180,x=248,y=289)
mouth.filled = True
mouth.fill_color = 'darkgray'
mouth.color = 'darkgray'
window.add(mouth)
mouth_2 = GOval(32,12,x=247,y=297)
mouth_2.filled = True
mouth_2.fill_color = 'lightgrey'
mouth_2.color = 'lightgrey'
window.add(mouth_2)
bubble = GOval(90,90,x=285,y=138)
bubble.filled = True
bubble.fill_color = 'skyblue'
bubble.color = 'skyblue'
window.add(bubble)
bubble = GOval(10,25,x=295,y=160)
bubble.filled = True
bubble.fill_color = 'snow'
bubble.color = 'snow'
window.add(bubble)
bubble_2 = GOval(10,10,x=295,y=193)
bubble_2.filled = True
bubble_2.fill_color = 'snow'
bubble_2.color = 'snow'
window.add(bubble_2)
word = GLabel('"What makes you happy?"')
word.color = "slategray"
word.font = "Times New Roman-18-bold"
window.add(word,125,80)
word_2 = GLabel('"Blowing bubbles."')
word_2.color = "slategray"
word_2.font = "Times New Roman-18-bold"
window.add(word_2,150,110)
word_3 = GLabel('"The little things."')
word_3.color = "slategray"
word_3.font = "Times-14-bold-italic"
window.add(word_3,350,445)
if __name__ == '__main__':
main()
| 0 | 0 | 0 |
1381e31bdb3f07d1c3f3d70d86b35a122964b40b | 2,581 | py | Python | source/scripts/char_to_bin.py | flamechain/ComputerSimulator | b345b6553d202c166acbe6f254371b8fcad2a574 | [
"MIT"
] | 1 | 2021-02-15T18:52:44.000Z | 2021-02-15T18:52:44.000Z | source/scripts/char_to_bin.py | flamechain/ComputerSimulator | b345b6553d202c166acbe6f254371b8fcad2a574 | [
"MIT"
] | null | null | null | source/scripts/char_to_bin.py | flamechain/ComputerSimulator | b345b6553d202c166acbe6f254371b8fcad2a574 | [
"MIT"
] | null | null | null | symbol_table = {
'00000000': 'A',
'00000001': 'B',
'00000010': 'C',
'00000011': 'D',
'00000100': 'E',
'00000101': 'F',
'00000110': 'G',
'00000111': 'H',
'00001000': 'I',
'00001001': 'J',
'00001010': 'K',
'00001011': 'L',
'00001100': 'M',
'00001101': 'N',
'00001110': 'O',
'00001111': 'P',
'00010000': 'Q',
'00010001': 'R',
'00010010': 'S',
'00010011': 'T',
'00010100': 'U',
'00010101': 'V',
'00010110': 'W',
'00010111': 'X',
'00011000': 'Y',
'00011001': 'Z',
'00011010': 'a',
'00011011': 'b',
'00011100': 'c',
'00011101': 'd',
'00011110': 'e',
'00011111': 'f',
'00100000': 'g',
'00100001': 'h',
'00100010': 'i',
'01011100': 'j',
'00100011': 'k',
'00100100': 'l',
'00100101': 'm',
'00100110': 'n',
'00100111': 'o',
'00101000': 'p',
'00101001': 'q',
'00101010': 'r',
'00101011': 's',
'00101100': 't',
'01011101': 'u',
'00101101': 'v',
'00101110': 'w',
'00101111': 'x',
'00110000': 'y',
'00110001': 'z',
'00110010': '0',
'00110011': '1',
'00110100': '2',
'00110101': '3',
'00110110': '4',
'00110111': '5',
'00111000': '6',
'00111001': '7',
'00111010': '8',
'00111011': '9',
'00111100': ';',
'00111101': ':',
'00111110': "'",
'00111111': '"',
'01000000': '/',
'01000001': '\\',
'01000010': '?',
'01000011': '.',
'01000100': ',',
'01000101': '<',
'01000110': '>',
'01000111': '|',
'01001000': ']',
'01001001': '[',
'01001010': '(',
'01001011': ')',
'01001100': '{',
'01001101': '}',
'01001110': '=',
'01001111': '+',
'01010000': '-',
'01010001': '_',
'01010010': '*',
'01010011': '^',
'01010100': '&',
'01010101': '%',
'01010110': '$',
'01010111': '#',
'01011000': '@',
'01011001': '!',
'01011010': '~',
'01011011': '`',
'01011100': ' ',
'01011101': '',
'01011110': ''
} | 21.155738 | 53 | 0.404882 | symbol_table = {
'00000000': 'A',
'00000001': 'B',
'00000010': 'C',
'00000011': 'D',
'00000100': 'E',
'00000101': 'F',
'00000110': 'G',
'00000111': 'H',
'00001000': 'I',
'00001001': 'J',
'00001010': 'K',
'00001011': 'L',
'00001100': 'M',
'00001101': 'N',
'00001110': 'O',
'00001111': 'P',
'00010000': 'Q',
'00010001': 'R',
'00010010': 'S',
'00010011': 'T',
'00010100': 'U',
'00010101': 'V',
'00010110': 'W',
'00010111': 'X',
'00011000': 'Y',
'00011001': 'Z',
'00011010': 'a',
'00011011': 'b',
'00011100': 'c',
'00011101': 'd',
'00011110': 'e',
'00011111': 'f',
'00100000': 'g',
'00100001': 'h',
'00100010': 'i',
'01011100': 'j',
'00100011': 'k',
'00100100': 'l',
'00100101': 'm',
'00100110': 'n',
'00100111': 'o',
'00101000': 'p',
'00101001': 'q',
'00101010': 'r',
'00101011': 's',
'00101100': 't',
'01011101': 'u',
'00101101': 'v',
'00101110': 'w',
'00101111': 'x',
'00110000': 'y',
'00110001': 'z',
'00110010': '0',
'00110011': '1',
'00110100': '2',
'00110101': '3',
'00110110': '4',
'00110111': '5',
'00111000': '6',
'00111001': '7',
'00111010': '8',
'00111011': '9',
'00111100': ';',
'00111101': ':',
'00111110': "'",
'00111111': '"',
'01000000': '/',
'01000001': '\\',
'01000010': '?',
'01000011': '.',
'01000100': ',',
'01000101': '<',
'01000110': '>',
'01000111': '|',
'01001000': ']',
'01001001': '[',
'01001010': '(',
'01001011': ')',
'01001100': '{',
'01001101': '}',
'01001110': '=',
'01001111': '+',
'01010000': '-',
'01010001': '_',
'01010010': '*',
'01010011': '^',
'01010100': '&',
'01010101': '%',
'01010110': '$',
'01010111': '#',
'01011000': '@',
'01011001': '!',
'01011010': '~',
'01011011': '`',
'01011100': ' ',
'01011101': '',
'01011110': ''
}
def text_to_bin():
binary = ''
text = input()
x = 2
y = 2
for i in text:
for j in symbol_table:
if symbol_table[j] == i:
binary += j + '\n'
new_x = '{0:b}'.format(x)
while len(new_x) < 8:
new_x = '0' + new_x
new_y = '{0:b}'.format(y)
while len(new_y) < 8:
new_y = '0' + new_y
binary += new_x + '\n' + new_y + '\n'
x += 1
print(binary) | 502 | 0 | 23 |
25d87b22ed75cef3afaf9f4577526ead230181c3 | 10,346 | py | Python | src/MAT/build/MAT_distutils.py | wake-forest-ctsi/mist-toolkit | 857e91976fa3b75ef2cad08612fa79cf2f743615 | [
"BSD-3-Clause"
] | 2 | 2015-10-28T17:58:31.000Z | 2021-10-12T10:34:39.000Z | scrubber/MIST_2_0_4/src/MAT/build/MAT_distutils.py | manaswini18/DmD | dd1e865ddb7b43c8478b2b5733385143b1980951 | [
"Apache-2.0"
] | null | null | null | scrubber/MIST_2_0_4/src/MAT/build/MAT_distutils.py | manaswini18/DmD | dd1e865ddb7b43c8478b2b5733385143b1980951 | [
"Apache-2.0"
] | 9 | 2016-12-17T22:50:37.000Z | 2020-09-26T01:08:06.000Z | # Copyright (C) 2010 The MITRE Corporation. See the toplevel
# file LICENSE for license terms.
# This file contains utilities which may be used either by
# build_tarball.py, install.py, or any of the dist.py files in
# the individual tasks.
# THIS FILE DOES NOT RELY ON THE MAT PYTHON MODULE.
import subprocess
import os
# We want to be able to save out task-specific config files.
import ConfigParser
# We're going to introduce the possibility of having multiple
# versions, if we have an additional search path.
import re
#
# Better, more integrated version of the executable chooser.
#
# The candidates are considered first, then the name in the usual path,
# then the name in the extra dirs. If there's a version checker, the
# argument of any acceptability test is the checker and the version, and
# the choice function is the newest acceptable version. Otherwise, the
# argument of the acceptability test is the full pathname, and
# the choice function gets all the paths. If there's no choice function,
# the first acceptable one is chosen. If there's no acceptability
# test, the first element that exists and is a file is returned.
import sys
| 32.949045 | 100 | 0.556737 | # Copyright (C) 2010 The MITRE Corporation. See the toplevel
# file LICENSE for license terms.
# This file contains utilities which may be used either by
# build_tarball.py, install.py, or any of the dist.py files in
# the individual tasks.
# THIS FILE DOES NOT RELY ON THE MAT PYTHON MODULE.
import subprocess
def shellOutput(scmd):
fp = subprocess.Popen(scmd, shell=True, stdout=subprocess.PIPE).stdout
s = fp.read()
fp.close()
return s
import os
class MATManifest(dict):
def __init__(self, bundleRoot):
self.bundleRoot = bundleRoot
self.manifestFile = os.path.join(self.bundleRoot, "MANIFEST")
self.taskEntries = None
def setTaskEntries(self, taskEntries):
self.taskEntries = taskEntries
def getTaskEntries(self):
return self.taskEntries
def load(self):
fp = open(self.manifestFile, "r")
for line in fp.readlines():
k, v = line.split(" : ", 1)
if k == "mat_tasks":
self.taskEntries = v.strip().split()
else:
self[k] = v.strip()
fp.close()
def save(self):
fp = open(self.manifestFile, "w")
for k, v in self.items():
fp.write(k + " : " + v + "\n")
if self.taskEntries is not None:
fp.write("mat_tasks : " + " ".join(self.taskEntries))
fp.close()
def parseTaskFeatures(features):
if not features:
return {}
fdict = {}
for fspec in features.split(","):
fspec = fspec.strip()
toks = fspec.split("=", 1)
if len(toks) == 1:
fdict[fspec] = True
else:
fdict[toks[0].strip()] = toks[1].strip()
return fdict
# We want to be able to save out task-specific config files.
import ConfigParser
def writeTaskSettings(taskDir, d):
path = os.path.join(taskDir, "MAT_settings.config")
p = ConfigParser.RawConfigParser()
p.optionxform = str
prefix = os.path.basename(taskDir)
if os.path.exists(path):
p.read([path])
else:
p.add_section(prefix)
for k, v in d.items():
p.set(prefix, k, v)
fp = open(path, "wb")
p.write(fp)
fp.close()
def readTaskSettings(taskDir):
path = os.path.join(taskDir, "MAT_settings.config")
p = ConfigParser.RawConfigParser()
p.optionxform = str
prefix = os.path.basename(taskDir)
if os.path.exists(path):
p.read([path])
return p
# We're going to introduce the possibility of having multiple
# versions, if we have an additional search path.
import re
class VersionExtractor:
def __init__(self, matchRe, cmdSubstString, groupNames):
self.matchRe = matchRe
self.cmdSubstString = cmdSubstString
self.groupNames = groupNames
def extractVersion(self, seed):
o = shellOutput(self.cmdSubstString % seed)
m = re.search(self.matchRe, o)
if m is None:
return None
else:
version = []
for g in self.groupNames:
if m.group(g) is not None:
version.append(int(m.group(g)))
return version
def atLeastVersion(self, reqTuple, foundTuple, excludeEndpoint = False):
i = 0
while (i < len(reqTuple) and i < len(foundTuple)):
if reqTuple[i] > foundTuple[i]:
return False
if reqTuple[i] < foundTuple[i]:
return True
i = i + 1
# All the digits are equal up to the current i.
# If the reqTuple has more digits than foundTuple,
# (e.g. 3.79.1 vs. 3.79) we fail.
if len(reqTuple) > len(foundTuple):
return False
elif excludeEndpoint:
return False
else:
return True
def atMostVersion(self, reqTuple, foundTuple, excludeEndpoint = False):
return self.atLeastVersion(foundTuple, reqTuple, excludeEndpoint = excludeEndpoint)
#
# Better, more integrated version of the executable chooser.
#
# The candidates are considered first, then the name in the usual path,
# then the name in the extra dirs. If there's a version checker, the
# argument of any acceptability test is the checker and the version, and
# the choice function is the newest acceptable version. Otherwise, the
# argument of the acceptability test is the full pathname, and
# the choice function gets all the paths. If there's no choice function,
# the first acceptable one is chosen. If there's no acceptability
# test, the first element that exists and is a file is returned.
import sys
def chooseExecutable(category, seed = None, execName = None, execExtraDirs = None,
execCandidates = None, versionChecker = None,
filterFn = None, choiceFn = None,
failureString = None, execPrompt = None,
promptIntro = None,
execFailureString = None, exitOnFailure = False):
print "Checking for", category, "..."
# seed, if present, can be either a name or a full path.
if seed is not None:
if os.path.isabs(seed):
if execCandidates is None:
execCandidates = [seed]
else:
execCandidates[0:0] = [seed]
elif os.path.dirname(seed):
print "Seed is neither a full path nor an executable name; ignoring."
else:
# It's an executable name. If execName is present, warn and discard.
if execName is not None:
print "Using", seed, "instead of execName"
execName = seed
if failureString is None:
failureString = "failed."
allCandidates = []
versionHash = {}
# Possible executables may appear in multiple search options.
pathChecked = {}
if versionChecker is not None:
matchRe, cmdSubstString, groupNames, minVersion, maxVersion = versionChecker
versionChecker = VersionExtractor(matchRe, cmdSubstString, groupNames)
def checkVersions(cand):
mVersion = versionHash[cand]
if minVersion is not None and (not versionChecker.atLeastVersion(minVersion, mVersion)):
return False
if maxVersion is not None and (not versionChecker.atMostVersion(maxVersion, mVersion)):
return False
return True
def chooseNewest(allPaths):
curCandidate = allPaths[0]
curVersion = versionHash[curCandidate]
for candidate in allPaths[1:]:
mVersion = versionHash[candidate]
if versionChecker.atLeastVersion(curVersion, mVersion, excludeEndpoint = True):
# If it's newer, use it.
curCandidate = candidate
curVersion = mVersion
return curCandidate
choiceFn = chooseNewest
filterFn = checkVersions
# If there's a filterFn, don't check that it's an executable.
def checkCandidate(c):
print "Checking", c, "...",
if versionChecker is not None:
mVersion = versionChecker.extractVersion(c)
if mVersion is None:
print "not a version."
return False
versionHash[c] = mVersion
elif filterFn is not None:
if not filterFn(c):
print failureString
return False
else:
# Gotta at least make sure it's an executable.
if (not os.path.isfile(c)) or (not os.access(c, os.X_OK)):
print "not an executable."
return False
print "ok."
allCandidates.append(c)
return True
if execCandidates is not None:
for cand in execCandidates:
if not os.path.isabs(cand):
print "%s is not a full pathname; skipping." % cand
try:
pathChecked[cand]
continue
except KeyError:
pathChecked[cand] = True
if checkCandidate(cand) and (choiceFn is None):
print "Chose", cand
return cand
if execName is not None:
if sys.platform == "win32":
if not execName.endswith(".exe"):
execName += ".exe"
envPath = os.environ["PATH"].split(";")
else:
envPath = os.environ["PATH"].split(":")
if execExtraDirs is not None:
envPath += execExtraDirs
for d in envPath:
p = os.path.join(d, execName)
try:
pathChecked[p]
continue
except KeyError:
pathChecked[p] = True
if os.path.exists(p):
if checkCandidate(p) and (choiceFn is None):
print "Chose", p
return p
# If we're still here, we may have a bunch of things in
# allCandidates and choiceFn exists.
if allCandidates:
p = choiceFn(allCandidates)
print "Chose", p
return p
else:
if execPrompt is not None:
if promptIntro is not None:
print promptIntro
def cleanValue(prompt):
v = raw_input(prompt)
if v is None:
return v
else:
v = v.strip()
# Strip trailing slash, in either direction
if v and v[-1] in "/\\":
v = v[:-1]
return v
while True:
v = cleanValue(execPrompt)
if v == "":
if execFailureString is not None:
print execFailureString
if exitOnFailure:
sys.exit(1)
else:
return None
elif checkCandidate(v):
return v
else:
# This may also be a failure - don't ignore the failure cases.
if execFailureString is not None:
print execFailureString
if exitOnFailure:
sys.exit(1)
else:
return None
| 8,762 | 5 | 416 |
4155b51b326b1c6008762de469a32741ddafff32 | 644 | py | Python | fetch_spacex.py | mogubudu/api-lesson-4 | 1b700a6ddd7e04ed1d6a008758b95a90683ca18d | [
"MIT"
] | null | null | null | fetch_spacex.py | mogubudu/api-lesson-4 | 1b700a6ddd7e04ed1d6a008758b95a90683ca18d | [
"MIT"
] | null | null | null | fetch_spacex.py | mogubudu/api-lesson-4 | 1b700a6ddd7e04ed1d6a008758b95a90683ca18d | [
"MIT"
] | null | null | null | import os
import requests
from file_handler import download_image, get_filename
if __name__ == "__main__":
main()
| 24.769231 | 73 | 0.725155 | import os
import requests
from file_handler import download_image, get_filename
def fetch_spacex_last_launch(path_to_save='images/', index_of_launch=13):
spacex_api_url = 'https://api.spacexdata.com/v4/launches/'
os.makedirs(path_to_save, exist_ok=True)
responce = requests.get(spacex_api_url)
responce.raise_for_status()
launch_spacex = responce.json()[index_of_launch]
if 'links' in launch_spacex:
for url in launch_spacex['links']['flickr']['original']:
download_image(get_filename(url), url, path_to_save)
def main():
fetch_spacex_last_launch()
if __name__ == "__main__":
main()
| 476 | 0 | 46 |
b2e1919b70d39ba432c1acf53fcbe5674e29eac5 | 472 | py | Python | snippets/set_line_join.py | iubica/wx-portfolio | 12101986db72bcaffd9b744d514d6f9f651ad5a1 | [
"MIT"
] | 3 | 2018-03-19T07:57:10.000Z | 2021-07-05T08:55:14.000Z | snippets/set_line_join.py | iubica/wx-portfolio | 12101986db72bcaffd9b744d514d6f9f651ad5a1 | [
"MIT"
] | 6 | 2020-03-24T15:40:18.000Z | 2021-12-13T19:46:09.000Z | snippets/set_line_join.py | iubica/wx-portfolio | 12101986db72bcaffd9b744d514d6f9f651ad5a1 | [
"MIT"
] | 4 | 2018-03-29T21:59:55.000Z | 2019-12-16T14:56:38.000Z | snippet_normalize (cr, width, height)
cr.set_line_width (0.16)
cr.move_to (0.3, 0.33)
cr.rel_line_to (0.2, -0.2)
cr.rel_line_to (0.2, 0.2)
cr.set_line_join (cairo.LINE_JOIN_MITER) #/* default */
cr.stroke ()
cr.move_to (0.3, 0.63)
cr.rel_line_to (0.2, -0.2)
cr.rel_line_to (0.2, 0.2)
cr.set_line_join (cairo.LINE_JOIN_BEVEL)
cr.stroke ()
cr.move_to (0.3, 0.93)
cr.rel_line_to (0.2, -0.2)
cr.rel_line_to (0.2, 0.2)
cr.set_line_join (cairo.LINE_JOIN_ROUND)
cr.stroke ()
| 21.454545 | 55 | 0.697034 | snippet_normalize (cr, width, height)
cr.set_line_width (0.16)
cr.move_to (0.3, 0.33)
cr.rel_line_to (0.2, -0.2)
cr.rel_line_to (0.2, 0.2)
cr.set_line_join (cairo.LINE_JOIN_MITER) #/* default */
cr.stroke ()
cr.move_to (0.3, 0.63)
cr.rel_line_to (0.2, -0.2)
cr.rel_line_to (0.2, 0.2)
cr.set_line_join (cairo.LINE_JOIN_BEVEL)
cr.stroke ()
cr.move_to (0.3, 0.93)
cr.rel_line_to (0.2, -0.2)
cr.rel_line_to (0.2, 0.2)
cr.set_line_join (cairo.LINE_JOIN_ROUND)
cr.stroke ()
| 0 | 0 | 0 |
76a5c4d6bb95c79b539e7d45481182b236126beb | 5,490 | py | Python | item_engine/lin_lin_network/test.py | GabrielAmare/TextEngine | 39ceb323a63af35e32c4be34ae35a77e811bc973 | [
"MIT"
] | null | null | null | item_engine/lin_lin_network/test.py | GabrielAmare/TextEngine | 39ceb323a63af35e32c4be34ae35a77e811bc973 | [
"MIT"
] | null | null | null | item_engine/lin_lin_network/test.py | GabrielAmare/TextEngine | 39ceb323a63af35e32c4be34ae35a77e811bc973 | [
"MIT"
] | null | null | null | from typing import Tuple
from item_engine.constants import *
from item_engine.lin_lin_network import Model as LL_Model
from item_engine.textbase import make_characters, Char, Token
if __name__ == '__main__':
calls_to = {}
@memorize
def function(token: Token, char) -> Tuple[ACTION, STATE]:
"""
parser for :
VAR = 'abcdefghijklmnopqrstuvwxyz'+
NUM = '0123456789'+
VAR_NUM = 'abcdefghijklmnopqrstuvwxyz'+ '0123456789'+
EQUAL = '='
PLUS = '+'
PLUS_EQUAL = '+='
PLUS_PLUS = '++'
LP = '('
RP = ')'
"""
if token.value == 0:
if char.value == '=':
return INCLUDE, 'EQUAL'
elif char.value == '+':
return INCLUDE, 7
elif char.value == '(':
return INCLUDE, 'LP'
elif char.value == ')':
return INCLUDE, 'RP'
elif char.value == '/':
return INCLUDE, 'SLASH'
elif char.value == '-':
return INCLUDE, 'DASH'
elif char.value == ' ':
return INCLUDE, 6
elif char.value in 'abcefghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in 'd':
return INCLUDE, 3
elif char.value in '0123456789':
return INCLUDE, 8
else:
return EXCLUDE, '!'
elif token.value == 1:
if char.value in 'abcdefghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR'
elif token.value == 2:
if char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR_NUM'
elif token.value == 3:
if char.value == 'e':
return INCLUDE, 4
elif char.value in 'abcdfghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR'
elif token.value == 4:
if char.value == 'f':
return INCLUDE, 5
elif char.value in 'abcdeghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR'
elif token.value == 5:
if char.value in 'abcdefghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'KW_DEF'
elif token.value == 6:
if char.value == ' ':
return INCLUDE, 6
else:
return EXCLUDE, 'SPACE'
elif token.value == 7:
if char.value == '+':
return INCLUDE, 'PLUS_PLUS'
elif char.value == '=':
return INCLUDE, 'PLUS_EQUAL'
else:
return EXCLUDE, 'PLUS'
elif token.value == 8:
if char.value in '0123456789':
return INCLUDE, 8
else:
return EXCLUDE, 'NUM'
else:
raise Exception(f"invalid value : {token.value!r}")
net = LL_Model(
input_cls=Char,
output_cls=Token,
function=function,
skips=["SPACE"]
)
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789+/=() "
import time
import random
size = 10_000
text = ''.join(random.choice(alphabet) for _ in range(size))
t = time.time()
try:
tokens = net.generate(make_characters(text))
d = time.time() - t
print(f"{round((1e6 * d) / len(text))} ΞΌs/char [{len(text)} chars]")
print(f"{round((1e6 * d) / len(tokens))} ΞΌs/token [{len(tokens)} tokens]")
print(f"total time : {round(d, 3)} seconds")
print()
except SyntaxError as e:
print(repr(text))
print('|' + ''.join('^' if e.args[0].start == index else ' ' for index in range(len(text))) + '|')
raise e
len_keys = len(calls_to.keys())
max_call = max(calls_to.values())
sum_call = sum(calls_to.values())
print(f"memorize\n"
f"number of cases : {len_keys}\n"
f"maximum calls to a single case : {max_call}\n"
f"mean calls to a single case : {sum_call / max_call if max_call != 0 else '?'}")
for key, val in calls_to.items():
if val >= 0.75 * max_call:
print(f"{key} occured {val} times")
text = "abcdef12345 = (x / 120)"
from tools37 import ReprTable
print(ReprTable.from_items(items=net.generate(make_characters(text)), config=dict(
span=lambda token: f"{token.start} β {token.end}",
type=lambda token: token.value,
content=lambda token: token.content
)))
| 31.551724 | 106 | 0.501093 | from typing import Tuple
from item_engine.constants import *
from item_engine.lin_lin_network import Model as LL_Model
from item_engine.textbase import make_characters, Char, Token
if __name__ == '__main__':
calls_to = {}
def memorize(function):
cache = {}
def wrapper(token, char):
key = (token.value, char.value)
global calls_to
calls_to.setdefault(key, 0)
calls_to[key] += 1
if key in cache:
return cache[key]
else:
cache[key] = result = function(token, char)
return result
return wrapper
@memorize
def function(token: Token, char) -> Tuple[ACTION, STATE]:
"""
parser for :
VAR = 'abcdefghijklmnopqrstuvwxyz'+
NUM = '0123456789'+
VAR_NUM = 'abcdefghijklmnopqrstuvwxyz'+ '0123456789'+
EQUAL = '='
PLUS = '+'
PLUS_EQUAL = '+='
PLUS_PLUS = '++'
LP = '('
RP = ')'
"""
if token.value == 0:
if char.value == '=':
return INCLUDE, 'EQUAL'
elif char.value == '+':
return INCLUDE, 7
elif char.value == '(':
return INCLUDE, 'LP'
elif char.value == ')':
return INCLUDE, 'RP'
elif char.value == '/':
return INCLUDE, 'SLASH'
elif char.value == '-':
return INCLUDE, 'DASH'
elif char.value == ' ':
return INCLUDE, 6
elif char.value in 'abcefghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in 'd':
return INCLUDE, 3
elif char.value in '0123456789':
return INCLUDE, 8
else:
return EXCLUDE, '!'
elif token.value == 1:
if char.value in 'abcdefghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR'
elif token.value == 2:
if char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR_NUM'
elif token.value == 3:
if char.value == 'e':
return INCLUDE, 4
elif char.value in 'abcdfghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR'
elif token.value == 4:
if char.value == 'f':
return INCLUDE, 5
elif char.value in 'abcdeghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'VAR'
elif token.value == 5:
if char.value in 'abcdefghijklmnopqrstuvwxyz':
return INCLUDE, 1
elif char.value in '0123456789':
return INCLUDE, 2
else:
return EXCLUDE, 'KW_DEF'
elif token.value == 6:
if char.value == ' ':
return INCLUDE, 6
else:
return EXCLUDE, 'SPACE'
elif token.value == 7:
if char.value == '+':
return INCLUDE, 'PLUS_PLUS'
elif char.value == '=':
return INCLUDE, 'PLUS_EQUAL'
else:
return EXCLUDE, 'PLUS'
elif token.value == 8:
if char.value in '0123456789':
return INCLUDE, 8
else:
return EXCLUDE, 'NUM'
else:
raise Exception(f"invalid value : {token.value!r}")
net = LL_Model(
input_cls=Char,
output_cls=Token,
function=function,
skips=["SPACE"]
)
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789+/=() "
import time
import random
size = 10_000
text = ''.join(random.choice(alphabet) for _ in range(size))
t = time.time()
try:
tokens = net.generate(make_characters(text))
d = time.time() - t
print(f"{round((1e6 * d) / len(text))} ΞΌs/char [{len(text)} chars]")
print(f"{round((1e6 * d) / len(tokens))} ΞΌs/token [{len(tokens)} tokens]")
print(f"total time : {round(d, 3)} seconds")
print()
except SyntaxError as e:
print(repr(text))
print('|' + ''.join('^' if e.args[0].start == index else ' ' for index in range(len(text))) + '|')
raise e
len_keys = len(calls_to.keys())
max_call = max(calls_to.values())
sum_call = sum(calls_to.values())
print(f"memorize\n"
f"number of cases : {len_keys}\n"
f"maximum calls to a single case : {max_call}\n"
f"mean calls to a single case : {sum_call / max_call if max_call != 0 else '?'}")
for key, val in calls_to.items():
if val >= 0.75 * max_call:
print(f"{key} occured {val} times")
text = "abcdef12345 = (x / 120)"
from tools37 import ReprTable
print(ReprTable.from_items(items=net.generate(make_characters(text)), config=dict(
span=lambda token: f"{token.start} β {token.end}",
type=lambda token: token.value,
content=lambda token: token.content
)))
| 394 | 0 | 27 |
5483f8927d2bbfc5ed8cdc8e4bd363e75303c372 | 13,978 | py | Python | src/synth.py | viniciuspjardim/synth-test | 754afce502b1788ad9900eb095ef090acf1b1f26 | [
"Apache-2.0"
] | null | null | null | src/synth.py | viniciuspjardim/synth-test | 754afce502b1788ad9900eb095ef090acf1b1f26 | [
"Apache-2.0"
] | null | null | null | src/synth.py | viniciuspjardim/synth-test | 754afce502b1788ad9900eb095ef090acf1b1f26 | [
"Apache-2.0"
] | null | null | null | """
Author: VinΓcius Jardim
Email: viniciuspjardim@gmail.com
Date: 3/2016
"""
import math
import random
import re
import wave
import matplotlib.pyplot as plt
import numpy as np
import pyaudio as pa
from scipy.io.wavfile import write
from scipy import signal
from src.musics import *
class Notes:
"""Musical notes represents sounds with definite pitches (sound
frequency). Definite pitches come from instruments like piano,
guitar, vocals, etc. Notes often come from the chromatic scale,
with 12 pitches, each a semitone above or below another. The notes
are:
`C, C#, D, D#, E, F, F#, G, G#, A, A#, B`
In this class each of this notes is represented by a number from 0
(C note) to 11 (B note). This 12 notes represents an octave. Each
octave ends in a B note, then a new octave starts (C note).
For example: as we said, the the note number 11 is a B so the number
12 will be another C. This C will be one octave higher the other C.
So we can call the first C, C0, the second C will be C1 and so on.
The letter is the note name and the number is the octave.
Each note in this class can be represented as a number or by the
note name followed by the octave number. Example:
| Note | Name | Frequency (Hz) | Wavelength (m)|
|:----:|:----:|---------------:|--------------:|
| 0 | C0 | 16.351597 | 21.098855 |
| 1 | C#0 | 17.323914 | 19.914667 |
| 2 | D0 | 18.354047 | 18.796943 |
|... | | | |
| 12 | C1 | 32.703195 | 10.549427 |
| 13 | C#1 | 34.647828 | 9.957333 |
|... | | | |
We can see that the C1 is twice C0 frequency. Although C1 is more
acute, it produces a harmonic sound to C0. C2 will be twice the
frequency of C1, and it keeps doubling. The human ear can listen to
frequencies from 20 to 20000 Hz.
"""
names = [
# 0 1 2 3 4 5 6 7 8 9 10 11
'C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']
"""Notes names"""
notes_dict = {
'C': 0, 'C#': 1, 'D': 2, 'D#': 3, 'E': 4, 'F': 5, 'F#': 6,
'G': 7, 'G#': 8, 'A': 9, 'A#': 10, 'B': 11}
"""Notes name dictionary. We can get the number of the note by
passing the note name
"""
scales = [
{
'name': 'Major Diatonic',
'notes': [0, 2, 4, 5, 7, 9, 11]},
{
'name': 'Minor Diatonic',
'notes': [0, 2, 3, 5, 7, 8, 10]},
{
'name': 'Major Pentatonic',
'notes': [0, 2, 4, 7, 9]},
{
'name': 'Minor Pentatonic',
'notes': [0, 3, 5, 7, 10]},
{
'name': 'Major Hexatonic',
'notes': [0, 2, 3, 4, 7, 9],
'blue_note': 3},
{
'name': 'Minor Hexatonic',
'notes': [0, 3, 5, 6, 7, 10],
'blue_note': 6}
]
"""A list of scales with C as the tonic note.
Each scale is a dictionary itself, with name, notes and blue note
when applicable.
"""
note_rgx = re.compile(r"^([CDEFGAB]{1}[#]?)([0-9]*)$")
"""Matches a string note like C, G#, B4, D#3
More about regex can be found on
https://docs.python.org/3/library/re.html
"""
@staticmethod
@staticmethod
@staticmethod
@staticmethod
@staticmethod
@staticmethod
@staticmethod
def frequency(self, note_num):
"""Returns the note frequency of the note represented by note
num. The math formula is T * 2 ^((N -15/12)), where T is the A4
default tune (usually 440 Hz) and N is the number of the note
(starting from C0 = 0).
"""
if note_num is None:
return 0
return self.a4Tune * 2 ** ((note_num - 57) / 12)
if __name__ == "__main__":
main()
| 28.584867 | 72 | 0.5093 | """
Author: VinΓcius Jardim
Email: viniciuspjardim@gmail.com
Date: 3/2016
"""
import math
import random
import re
import wave
import matplotlib.pyplot as plt
import numpy as np
import pyaudio as pa
from scipy.io.wavfile import write
from scipy import signal
from src.musics import *
class Notes:
"""Musical notes represents sounds with definite pitches (sound
frequency). Definite pitches come from instruments like piano,
guitar, vocals, etc. Notes often come from the chromatic scale,
with 12 pitches, each a semitone above or below another. The notes
are:
`C, C#, D, D#, E, F, F#, G, G#, A, A#, B`
In this class each of this notes is represented by a number from 0
(C note) to 11 (B note). This 12 notes represents an octave. Each
octave ends in a B note, then a new octave starts (C note).
For example: as we said, the the note number 11 is a B so the number
12 will be another C. This C will be one octave higher the other C.
So we can call the first C, C0, the second C will be C1 and so on.
The letter is the note name and the number is the octave.
Each note in this class can be represented as a number or by the
note name followed by the octave number. Example:
| Note | Name | Frequency (Hz) | Wavelength (m)|
|:----:|:----:|---------------:|--------------:|
| 0 | C0 | 16.351597 | 21.098855 |
| 1 | C#0 | 17.323914 | 19.914667 |
| 2 | D0 | 18.354047 | 18.796943 |
|... | | | |
| 12 | C1 | 32.703195 | 10.549427 |
| 13 | C#1 | 34.647828 | 9.957333 |
|... | | | |
We can see that the C1 is twice C0 frequency. Although C1 is more
acute, it produces a harmonic sound to C0. C2 will be twice the
frequency of C1, and it keeps doubling. The human ear can listen to
frequencies from 20 to 20000 Hz.
"""
names = [
# 0 1 2 3 4 5 6 7 8 9 10 11
'C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']
"""Notes names"""
notes_dict = {
'C': 0, 'C#': 1, 'D': 2, 'D#': 3, 'E': 4, 'F': 5, 'F#': 6,
'G': 7, 'G#': 8, 'A': 9, 'A#': 10, 'B': 11}
"""Notes name dictionary. We can get the number of the note by
passing the note name
"""
scales = [
{
'name': 'Major Diatonic',
'notes': [0, 2, 4, 5, 7, 9, 11]},
{
'name': 'Minor Diatonic',
'notes': [0, 2, 3, 5, 7, 8, 10]},
{
'name': 'Major Pentatonic',
'notes': [0, 2, 4, 7, 9]},
{
'name': 'Minor Pentatonic',
'notes': [0, 3, 5, 7, 10]},
{
'name': 'Major Hexatonic',
'notes': [0, 2, 3, 4, 7, 9],
'blue_note': 3},
{
'name': 'Minor Hexatonic',
'notes': [0, 3, 5, 6, 7, 10],
'blue_note': 6}
]
"""A list of scales with C as the tonic note.
Each scale is a dictionary itself, with name, notes and blue note
when applicable.
"""
note_rgx = re.compile(r"^([CDEFGAB]{1}[#]?)([0-9]*)$")
"""Matches a string note like C, G#, B4, D#3
More about regex can be found on
https://docs.python.org/3/library/re.html
"""
@staticmethod
def get_note_name(note_num, octave_num=True):
octave = str(note_num // 12)
note_name = Notes.names[note_num % 12]
if octave_num:
return note_name + octave
else:
return note_name
@staticmethod
def get_note_num(note_name):
if isinstance(note_name, int):
return note_name
match = Notes.note_rgx.match(note_name)
if not match:
raise ValueError("note_name arg is not a valid note name")
note = match.group(1)
octave_str = match.group(2)
if not octave_str:
octave_str = "0"
octave = int(octave_str)
return Notes.notes_dict[note] + (octave * 12)
@staticmethod
def print_notes(notes, octave_num=True, scale_tonic=0):
tonic = Notes.get_note_num(scale_tonic)
for i, item in enumerate(notes):
print("%s" % Notes.get_note_name(
item + tonic, octave_num), end="")
if i < len(notes)-1:
print(", ", end="")
@staticmethod
def print_scale(scale, scale_tonic):
print("%s in %s: " % (scale['name'], scale_tonic), end="")
Notes.print_notes(scale['notes'], False, scale_tonic)
tonic = Notes.get_note_num(scale_tonic)
if 'blue_note' in scale:
print(". Blue note: %s" % Notes.get_note_name(
scale['blue_note'] + tonic, False))
@staticmethod
def print_guitar(tune=None, scale=None, scale_tonic=None,
octave_num=False, fret_num=12):
fret_num += 1
if tune is None:
tune = [28, 33, 38, 43, 47, 52]
if scale is not None:
Notes.print_scale(scale, scale_tonic)
print()
for y in range(0, fret_num):
print("%4d " % y, end="")
print()
for x in range(len(tune)-1, -1, -1):
for y in range(0, fret_num):
note = tune[x] + y
fret_divisor = "|"
if y == 0:
fret_divisor = "||"
if scale is None:
print(" %3s " %
Notes.get_note_name(note, octave_num), end="")
print(fret_divisor, end="")
else:
tonic = Notes.get_note_num(scale_tonic)
has_note = False
for scale_note in scale['notes']:
if note % 12 == (scale_note + tonic) % 12:
has_note = True
break
if has_note:
print(" %3s " % Notes.get_note_name(
note, octave_num), end="")
print(fret_divisor, end="")
else:
print(" --- ", end="")
print(fret_divisor, end="")
print()
@staticmethod
def print_guitar_scales():
print("All notes in guitar neck")
Notes.print_guitar(fret_num=22, octave_num=True)
print()
for tonic in Notes.names:
for scale in Notes.scales:
Notes.print_guitar(
scale=scale, scale_tonic=tonic,
fret_num=22, octave_num=True)
print()
@staticmethod
def volume(time, max_time, shape):
if time > max_time:
return 0
# Max volume all time - causes glitches in sound
if shape == 0:
r = 1
# -(x-1)^2 + 1 from 0 to 2
elif shape == 1:
x = time / max_time * 2
r = -(x-1) ** 2 + 1
# interpolate{{0, 0}, {1, 0.98}, {2, 0.5}, {3, 0.3}, {4, 0}}
# -0.0883333 x^4+0.82 x^3-2.57167 x^2+2.82 x
elif shape == 2:
x = time / max_time * 4
r = - 0.0883333 * x ** 4 + 0.82 * x ** 3 - 2.57167 *\
x ** 2 + 2.82 * x
elif shape == 3:
x = time / max_time
# interpolate{{0, 0}, {0.1, 1}}
if x <= 0.1:
r = 10 * x
elif 0.1 < x <= 0.9:
r = 1
# interpolate{{0.9, 1}, {1, 0}}
else:
r = 10 - 10 * x
return r
def __init__(self, a4_tune=440, sound_speed=345):
self.a4Tune = a4_tune
self.sound_speed = sound_speed
def frequency(self, note_num):
"""Returns the note frequency of the note represented by note
num. The math formula is T * 2 ^((N -15/12)), where T is the A4
default tune (usually 440 Hz) and N is the number of the note
(starting from C0 = 0).
"""
if note_num is None:
return 0
return self.a4Tune * 2 ** ((note_num - 57) / 12)
def wavelength(self, note_num):
return self.sound_speed / self.frequency(note_num)
def print_notes_table(self, start=0, end=120):
cont = 0
print('| Note | Name | Frequency | Wavelength')
for x in range(start, end):
print("| %4d | %4s | %14.8f | %11.8f " % (
cont, Notes.get_note_name(x), self.frequency(x),
self.wavelength(x)))
cont += 1
class WavFile:
# length of data to read.
chunk = 1024
def __init__(self, duration):
self.duration = duration
self.samplesPerSecond = 44100
self.samples = int(self.duration * self.samplesPerSecond)
self.data = np.ones(self.samples)
self.scaled = None
self.it = np.nditer(self.data, flags=['f_index'],
op_flags=['readwrite'])
def scale(self):
max_val = np.max(np.abs(self.data))
if max_val == 0:
max_val = 1
self.scaled = np.int16(self.data/max_val * 32767)
def write(self, file_name):
write(file_name, self.samplesPerSecond, self.scaled)
def play(self, file_name):
# open the file for reading.
wf = wave.open(file_name, 'rb')
# create an audio object
p = pa.PyAudio()
# open stream based on the wave object which has been input.
stream = p.open(format=p.get_format_from_width(
wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output=True)
# read data (based on the chunk size)
data = wf.readframes(self.chunk)
# play stream (looping from beginning of file to the end)
while len(data) > 0:
# writing to the stream is what *actually* plays the sound.
stream.write(data)
data = wf.readframes(self.chunk)
# stop stream
stream.stop_stream()
stream.close()
# close PyAudio
p.terminate()
class Music:
SIN = 1
SQUARE = 2
SAWTOOTH = 3
def __init__(self, notes, music, def_time=4, bpm=120, tone_shift=0,
waveform=SIN):
self.notes = notes
self.music = music
self.waveform = waveform
self.fileName = None
self.bpm = bpm
self.defTime = def_time
self.toneShift = tone_shift
self.randomShifts = [tone_shift]
self.duration = 0
self.duration_calc()
self.wav = WavFile(self.duration)
def duration_calc(self):
# Beat time in seconds
b_time = 60 / self.bpm
for note in self.music:
if isinstance(note, list):
max_tone_time = b_time * self.defTime / note[1]
else:
max_tone_time = b_time
self.duration += max_tone_time
def parse(self):
tone_start = 0
# tone index in the music array
i = 0
# Beat time in seconds
b_time = 60 / self.bpm
for x in self.wav.it:
music_time = self.wav.it.index / self.wav.samplesPerSecond
tone_time = music_time - tone_start
if isinstance(self.music[i], list):
tone = Notes.get_note_num(self.music[i][0]) +\
self.toneShift if self.music[i][0]\
is not None else None
max_tone_time = b_time * self.defTime / self.music[i][1]
else:
tone = Notes.get_note_num(self.music[i]) +\
self.toneShift if self.music[i]\
is not None else None
max_tone_time = b_time
volume = Notes.volume(tone_time, max_tone_time, 3)
par = self.wav.it.index * 2 * np.pi / \
self.wav.samplesPerSecond
if self.waveform == Music.SIN:
x[...] = math.sin(
par * self.notes.frequency(tone)) * volume
elif self.waveform == Music.SQUARE:
x[...] = signal.square(
par * self.notes.frequency(tone)) * volume
elif self.waveform == Music.SAWTOOTH:
x[...] = signal.sawtooth(
par * self.notes.frequency(tone)) * volume
# If the tone time has ended go to the next tone
if tone_time >= max_tone_time and i < len(self.music) - 1:
i += 1 # increment tone index
tone_start = music_time
self.toneShift = random.choice(self.randomShifts)
def save(self, file_name):
self.fileName = file_name
self.wav.scale()
self.wav.write(self.fileName)
def play(self):
self.wav.play(self.fileName)
@staticmethod
def random():
scale = Notes.scales[0]['notes']
scale.append(None)
times = [4/1.5, 4, 8, 4, 8, 4]
octaves = [4]
music = []
for i in range(60):
note = random.choice(scale)
note = note + (12 * random.choice(octaves))\
if note is not None else None
time = random.choice(times)
music.append([note, time])
return music
def main():
notes = Notes()
# This notes come from the file musics.py that contain notes samples
music_notes = smoke_on_the_water
music = Music(notes, music_notes, bpm=112, tone_shift=44)
music.parse()
music.save('generated\music1.wav')
print('Playing...')
music.play()
print('End Playing')
plt.plot(music.wav.data)
plt.ylabel('Air Pressure')
plt.xlabel('Time')
plt.show()
if __name__ == "__main__":
main()
| 9,327 | 365 | 332 |
c4cc7cbeda8d27d81365c8e30a7d91223c604b47 | 710 | py | Python | warpworks/model/base.py | storborg/warpworks | a41a0a5bab8b826157309f7d0bafbdcdff66505b | [
"MIT"
] | null | null | null | warpworks/model/base.py | storborg/warpworks | a41a0a5bab8b826157309f7d0bafbdcdff66505b | [
"MIT"
] | null | null | null | warpworks/model/base.py | storborg/warpworks | a41a0a5bab8b826157309f7d0bafbdcdff66505b | [
"MIT"
] | null | null | null | from __future__ import (absolute_import, division, print_function,
unicode_literals)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension
__all__ = ['Base', 'Session']
Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base(cls=_Base)
| 22.1875 | 76 | 0.673239 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension
__all__ = ['Base', 'Session']
Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
class _Base(object):
@classmethod
def get(cls, id):
"""
Get an instance of this class by primary key.
:param id:
Primary key value.
:return:
Instance of the class.
"""
return Session.query(cls).get(id)
Base = declarative_base(cls=_Base)
| 0 | 262 | 23 |
e1690ac57e028e29d21726a6aeb67218a5883687 | 1,105 | py | Python | src/controlmanual/core/typings.py | ControlManual/ControlManual | 3c4bae24dbf27059381b997e535ec05aab37a0c0 | [
"MIT"
] | null | null | null | src/controlmanual/core/typings.py | ControlManual/ControlManual | 3c4bae24dbf27059381b997e535ec05aab37a0c0 | [
"MIT"
] | 12 | 2021-12-27T23:52:55.000Z | 2022-03-09T00:26:23.000Z | src/controlmanual/core/typings.py | ControlManual/ControlManual | 3c4bae24dbf27059381b997e535ec05aab37a0c0 | [
"MIT"
] | null | null | null | from typing import (
Protocol,
Awaitable,
TypeVar,
Callable,
Union,
runtime_checkable,
Dict,
TypedDict,
Generic,
Type,
TYPE_CHECKING,
)
from typing_extensions import ParamSpec
if TYPE_CHECKING:
from .object import Object
T = TypeVar("T", covariant=True)
A = TypeVar("A", bound="Object")
P = ParamSpec("P")
@runtime_checkable
MaybeAwaitable = Union[Awaitable[T], T]
MaybeCoroutine = Callable[P, MaybeAwaitable[T]]
CommandSchema = Dict[str, CommandArgument[A]]
| 19.732143 | 63 | 0.58733 | from typing import (
Protocol,
Awaitable,
TypeVar,
Callable,
Union,
runtime_checkable,
Dict,
TypedDict,
Generic,
Type,
TYPE_CHECKING,
)
from typing_extensions import ParamSpec
if TYPE_CHECKING:
from .object import Object
T = TypeVar("T", covariant=True)
A = TypeVar("A", bound="Object")
P = ParamSpec("P")
class UI(Protocol):
async def print(self, *values: str) -> None:
...
async def write(self, *values: str) -> None:
...
async def success(self, *values: str) -> None:
...
async def error(self, *values: str) -> None:
...
async def input(self) -> str:
...
class CommandArgument(TypedDict, Generic[A]):
required: bool
description: str
type: Type[A]
@runtime_checkable
class EngineCallable(Protocol[P, T]):
def cm_call(self, *args: P.args, **kwargs: P.kwargs) -> T:
...
MaybeAwaitable = Union[Awaitable[T], T]
MaybeCoroutine = Callable[P, MaybeAwaitable[T]]
CommandSchema = Dict[str, CommandArgument[A]]
| 217 | 99 | 243 |
9f61c97ea77ba61d6d50535c2987c323c8c2b7e6 | 3,041 | py | Python | nittymcpick/cls/linter.py | priv-kweihmann/nittymcpick | 4ec0eeca0f888aa9c67a39ca11cc1fe5bb905477 | [
"BSD-2-Clause"
] | 1 | 2020-02-23T17:31:17.000Z | 2020-02-23T17:31:17.000Z | nittymcpick/cls/linter.py | priv-kweihmann/nittymcpick | 4ec0eeca0f888aa9c67a39ca11cc1fe5bb905477 | [
"BSD-2-Clause"
] | 7 | 2020-02-16T20:41:44.000Z | 2021-10-05T18:08:00.000Z | nittymcpick/cls/linter.py | priv-kweihmann/nittymcpick | 4ec0eeca0f888aa9c67a39ca11cc1fe5bb905477 | [
"BSD-2-Clause"
] | null | null | null | import json
import subprocess
import re
import sys
from nittymcpick.cls.comment import Comment
| 37.54321 | 120 | 0.507399 | import json
import subprocess
import re
import sys
from nittymcpick.cls.comment import Comment
class Linter():
def __init__(self, _config, _appargs):
self.__name = _config["linter"]["name"]
self.__exec = _config["linter"]["path"]
self.__args = _config["linter"]["args"]
self.__appargs = _appargs
self.__retregex = _config["linter"]["ret_regex"]
self.__pattern = _config["matches"]["pattern"]
self.__lineadjust = _config["linter"]["tweaks"]["line_count_adjust"]
self.__singlefileexec = _config["linter"]["tweaks"]["single_file_exec"]
def __eval(self, _in, _files):
res = []
for m in re.finditer(self.__retregex, _in, re.MULTILINE):
_tpl = {
"file": "Unknown file",
"severity": "issue",
"line": 1,
"message": ""
}
for k, v in _tpl.items():
if isinstance(v, int):
try:
_t = int(m.group(k).strip())
except (IndexError, ValueError):
_t = _tpl[k]
_tpl[k] = _t + self.__lineadjust
else:
try:
_tpl[k] = m.group(k).strip()
except IndexError:
pass
_f = [x for x in _files if x.file == _tpl["file"]]
if _f:
_f = _f[0]
# if finding is not in change set, don't issue anything
if (_tpl["line"] in _f.affectedlines and self.__appargs.onlynew) or (not self.__appargs.onlynew):
_msg = "{} found a potential {} - {}".format(
self.__name, _tpl["severity"], _tpl["message"])
res.append(Comment(
_msg, _tpl["line"], _f.relpath, _f.base_sha, _f.start_sha, _f.head_sha, self.__appargs.botname))
return res
def Run(self, _files):
_matchfiles = [x for x in _files if re.match(self.__pattern, x.file)]
if self.__singlefileexec:
_loops = [[x] for x in _matchfiles]
else:
_loops = _matchfiles
out = ""
for l in _loops:
try:
out += subprocess.check_output([self.__exec] + self.__args + [x.file for x in l],
universal_newlines=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
out += e.stdout or ""
return self.__eval(out, _matchfiles)
@staticmethod
def SetupLinter(_args):
res = []
__config = []
try:
with open(_args.config) as i:
__config = json.load(i)
except (json.JSONDecodeError, FileNotFoundError, PermissionError) as e:
sys.stderr.write("Can't decode config: {}\n".format(e))
sys.exit(-1)
for c in __config:
res.append(Linter(c, _args))
return res
| 2,801 | 120 | 23 |
cbd9852948ed7e1b902fcc21a2a9dcfff85378db | 1,635 | py | Python | experimental/conversation_go_awry/feature_extraction/user_features/user_toxicity.py | CyberFlameGO/wikidetox | 60ee914c8bb81bada0847a3676e0bf24a6e35221 | [
"Apache-2.0"
] | 66 | 2017-09-10T12:47:37.000Z | 2022-03-18T01:33:10.000Z | experimental/conversation_go_awry/feature_extraction/user_features/user_toxicity.py | CyberFlameGO/wikidetox | 60ee914c8bb81bada0847a3676e0bf24a6e35221 | [
"Apache-2.0"
] | 82 | 2017-09-12T13:01:59.000Z | 2021-11-10T19:40:01.000Z | experimental/conversation_go_awry/feature_extraction/user_features/user_toxicity.py | CyberFlameGO/wikidetox | 60ee914c8bb81bada0847a3676e0bf24a6e35221 | [
"Apache-2.0"
] | 20 | 2017-11-02T21:23:35.000Z | 2022-03-09T01:30:58.000Z | """
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import pandas as pd
from collections import defaultdict
import csv
import numpy as np
folder = 'last_comments'
with open('/scratch/wiki_dumps/expr_with_matching/%s/mapping.json'%(folder)) as f:
mapping = json.load(f)
history_toxicity = {}
for ind in range(13):
with open('/scratch/wiki_dumps/toxicity_scores/toxicity_scored_0%02d.csv'%(ind)) as f:
df = pd.read_csv(f, encoding = 'utf-8', index_col=None, quoting=csv.QUOTE_ALL)
print(ind, len(df))
for index, row in df.iterrows():
conv_id, user = mapping[str(row['id'])]
if not(conv_id in history_toxicity):
history_toxicity[conv_id] = defaultdict(list)
history_toxicity[conv_id][user].append(row['TOXICITY'])
print(ind, 'finished')
output = []
for conv_id, conv in history_toxicity.items():
out = {}
for user, toxicity in conv.items():
out[user] = np.mean(toxicity)
output.append((conv_id, out))
with open('/scratch/wiki_dumps/expr_with_matching/user_features/history_toxicity.json', 'w') as w:
json.dump(output, w)
| 34.0625 | 98 | 0.719266 | """
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import pandas as pd
from collections import defaultdict
import csv
import numpy as np
folder = 'last_comments'
with open('/scratch/wiki_dumps/expr_with_matching/%s/mapping.json'%(folder)) as f:
mapping = json.load(f)
history_toxicity = {}
for ind in range(13):
with open('/scratch/wiki_dumps/toxicity_scores/toxicity_scored_0%02d.csv'%(ind)) as f:
df = pd.read_csv(f, encoding = 'utf-8', index_col=None, quoting=csv.QUOTE_ALL)
print(ind, len(df))
for index, row in df.iterrows():
conv_id, user = mapping[str(row['id'])]
if not(conv_id in history_toxicity):
history_toxicity[conv_id] = defaultdict(list)
history_toxicity[conv_id][user].append(row['TOXICITY'])
print(ind, 'finished')
output = []
for conv_id, conv in history_toxicity.items():
out = {}
for user, toxicity in conv.items():
out[user] = np.mean(toxicity)
output.append((conv_id, out))
with open('/scratch/wiki_dumps/expr_with_matching/user_features/history_toxicity.json', 'w') as w:
json.dump(output, w)
| 0 | 0 | 0 |
71ffbec5cd20308b2d2dc18b8082641b84ae620e | 2,970 | py | Python | app/data.py | ASLive/webserver | af36d194a4e4a5880df9c046518e75a07abfea22 | [
"MIT"
] | null | null | null | app/data.py | ASLive/webserver | af36d194a4e4a5880df9c046518e75a07abfea22 | [
"MIT"
] | 11 | 2018-11-07T21:05:00.000Z | 2022-02-10T00:05:36.000Z | app/data.py | ASLive/webserver | af36d194a4e4a5880df9c046518e75a07abfea22 | [
"MIT"
] | 1 | 2019-06-18T03:32:55.000Z | 2019-06-18T03:32:55.000Z | import os
import numpy as np
import math
import scipy
import pickle
TRAIN_DATA_PATH = "/Users/evanradcliffe/Senior Design/webserver/app/asl-alphabet/asl_alphabet_train"
split_arr = lambda arr: arr[int(len(arr)/7):]
def read_hand3d():
"""read data from files (run setup_asl.py to generate)"""
images = pickle.load( open("./pickle/images.pickle","rb") )
labels = pickle.load( open("./pickle/labels.pickle","rb") )
classes = pickle.load( open("./pickle/classes.pickle","rb") )
return np.array(images), np.array(labels), classes
def read_data():
"""read data from files"""
print("loading data...",end="")
ret_images = []
ret_labels = []
ret_class_names = []
count = 0
for label in list(os.walk(TRAIN_DATA_PATH)): # walk directory
full_path, image_list = label[0], label[2]
letter = full_path[len(TRAIN_DATA_PATH)+1:] # get letter class
if len(letter) > 0:
# get list of file paths to each image
image_path_list = [TRAIN_DATA_PATH+"/"+letter+"/"+file for file in image_list]
ret_class_names.append(letter)
# print(letter, count)
print(".",end="")
if len(image_path_list) > 0:
# iterate each image
for i in range(len(image_path_list)):
# add image, letter to ret array
image = scipy.misc.imread(image_path_list[i])
image = scipy.misc.imresize(image, (28, 28))
image = rgb2gray(image)
# image = np.expand_dims((image.astype('float') / 255.0) - 0.5, 0)
ret_images.append(image)
ret_labels.append(count)
count += 1
print()
return np.array(ret_images), np.array(ret_labels), ret_class_names
def split_data(images, labels):
"""split training and testing data"""
train_percent = 0.7
count = math.floor(len(images)*train_percent)
images, labels = unison_shuffled_copies(images, labels)
train_images, test_images = images[:count], images[count:]
train_labels, test_labels = labels[:count], labels[count:]
return (train_images, train_labels), (test_images, test_labels)
| 38.076923 | 100 | 0.63064 | import os
import numpy as np
import math
import scipy
import pickle
TRAIN_DATA_PATH = "/Users/evanradcliffe/Senior Design/webserver/app/asl-alphabet/asl_alphabet_train"
split_arr = lambda arr: arr[int(len(arr)/7):]
def load_data():
images, labels, class_names = read_data()
(train_images, train_labels), (test_images, test_labels) = split_data(images, labels)
train_images = split_arr(train_images)
train_labels = split_arr(train_labels)
test_images = split_arr(test_images)
test_labels = split_arr(test_labels)
return (train_images, train_labels), (test_images, test_labels), class_names
# return [train_images, train_labels, test_images, test_labels, class_names]
def rgb2gray(rgb):
r, g, b = rgb[:,:,0], rgb[:,:,1], rgb[:,:,2]
gray = 0.3333 * r + 0.3333 * g + 0.3333 * b
return gray
def read_hand3d():
"""read data from files (run setup_asl.py to generate)"""
images = pickle.load( open("./pickle/images.pickle","rb") )
labels = pickle.load( open("./pickle/labels.pickle","rb") )
classes = pickle.load( open("./pickle/classes.pickle","rb") )
return np.array(images), np.array(labels), classes
def read_data():
"""read data from files"""
print("loading data...",end="")
ret_images = []
ret_labels = []
ret_class_names = []
count = 0
for label in list(os.walk(TRAIN_DATA_PATH)): # walk directory
full_path, image_list = label[0], label[2]
letter = full_path[len(TRAIN_DATA_PATH)+1:] # get letter class
if len(letter) > 0:
# get list of file paths to each image
image_path_list = [TRAIN_DATA_PATH+"/"+letter+"/"+file for file in image_list]
ret_class_names.append(letter)
# print(letter, count)
print(".",end="")
if len(image_path_list) > 0:
# iterate each image
for i in range(len(image_path_list)):
# add image, letter to ret array
image = scipy.misc.imread(image_path_list[i])
image = scipy.misc.imresize(image, (28, 28))
image = rgb2gray(image)
# image = np.expand_dims((image.astype('float') / 255.0) - 0.5, 0)
ret_images.append(image)
ret_labels.append(count)
count += 1
print()
return np.array(ret_images), np.array(ret_labels), ret_class_names
def unison_shuffled_copies(a, b):
assert len(a) == len(b)
p = np.random.permutation(len(a))
return a[p], b[p]
def split_data(images, labels):
"""split training and testing data"""
train_percent = 0.7
count = math.floor(len(images)*train_percent)
images, labels = unison_shuffled_copies(images, labels)
train_images, test_images = images[:count], images[count:]
train_labels, test_labels = labels[:count], labels[count:]
return (train_images, train_labels), (test_images, test_labels)
| 671 | 0 | 69 |
2e9f3722170c3a07c50da57bbee3e541b84730ba | 10,222 | py | Python | riscvmodel/isa.py | mss10/riscv-python-model | b48786dd18f61a0a25cb0928bd592dc95f7fd80c | [
"MIT"
] | null | null | null | riscvmodel/isa.py | mss10/riscv-python-model | b48786dd18f61a0a25cb0928bd592dc95f7fd80c | [
"MIT"
] | null | null | null | riscvmodel/isa.py | mss10/riscv-python-model | b48786dd18f61a0a25cb0928bd592dc95f7fd80c | [
"MIT"
] | null | null | null | from .insn import *
from .variant import RV64I,Extensions
@isa("lui", 0x37)
class InstructionLUI(InstructionUType):
"""
The Load Upper Immediate (LUI) instruction loads the given immediate (unsigned 20 bit) to the upper 20 bit
of the destination register. The lower bits are set to zero in the destination register. This instruction
can be used to efficiently form constants, as a sequence of LUI and ORI for example.
"""
@isa("auipc", 0x17)
@isa("jal", 0x6F)
@isa("jalr", 0x67, 0)
@isa("beq", 0x63, 0)
@isa("bne", 0x63, 1)
@isa("blt", 0x63, 4)
@isa("bge", 0x63, 5)
@isa("bltu", 0x63, 6)
@isa("bgeu", 0x63, 7)
@isa("lb", 0x03, 0)
@isa("lh", 0x03, 1)
@isa("lw", 0x03, 2)
@isa("lbu", 0x03, 4)
@isa("lhu", 0x03, 5)
@isa("sb", 0x23, 0)
@isa("sh", 0x23, 1)
@isa("sw", 0x23, 2)
@isa("addi", 0x13, 0)
@isa("slti", 0x13, 2)
@isa("sltiu", 0x13, 3)
@isa("xori", 0x13, 4)
@isa("ori", 0x13, 6)
@isa("andi", 0x13, 7)
@isa("slli", 0x13, 1, 0x00)
@isa("srli", 0x13, 5, 0x00)
@isa("srai", 0x13, 5, 0x20)
@isa("add", 0x33, 0, 0x00)
@isa("sub", 0x33, 0, 0x20)
@isa("sll", 0x33, 1, 0x00)
@isa("slt", 0x33, 2, 0x00)
@isa("sltu", 0x33, 3, 0x00)
@isa("xor", 0x33, 4, 0x00)
@isa("srl", 0x33, 5, 0x00)
@isa("sra", 0x33, 5, 0x20)
@isa("or", 0x33, 6, 0x00)
@isa("and", 0x33, 7, 0x00)
@isa("fence", 0xF, 0, 0x00)
@isa("fence.i", 0xF, 1, 0x00)
@isa("ecall", 0x73, 0)
@isa("ebreak", 0x73, 0)
@isa("csrrw", 0x73, 1)
@isa("csrrs", 0x73, 2)
@isa("csrrc", 0x73, 3)
@isa("csrrwi", 0x73, 5)
@isa("csrrsi", 0x73, 6)
@isa("csrrci", 0x73, 7)
@isa("lwu", 0x3, 6, variant=RV64I)
@isa("ld", 0x3, 3, variant=RV64I)
@isa("sd", 0x23, 3, variant=RV64I)
@isa_pseudo()
@isaC("c.addi", 1, funct3=0)
@isaC("c.andi", 1, funct3=4)
@isaC("c.swsp", 2, funct3=6)
@isaC("c.li", 1, funct3=2)
@isaC("c.mv", 2, funct4=8)
| 26.970976 | 110 | 0.654862 | from .insn import *
from .variant import RV64I,Extensions
@isa("lui", 0x37)
class InstructionLUI(InstructionUType):
"""
The Load Upper Immediate (LUI) instruction loads the given immediate (unsigned 20 bit) to the upper 20 bit
of the destination register. The lower bits are set to zero in the destination register. This instruction
can be used to efficiently form constants, as a sequence of LUI and ORI for example.
"""
def execute(self, model: State):
model.intreg[self.rd] = (self.imm << 12)
@isa("auipc", 0x17)
class InstructionAUIPC(InstructionUType):
def execute(self, model: State):
model.intreg[self.rd] = model.pc + (self.imm << 12)
@isa("jal", 0x6F)
class InstructionJAL(InstructionJType):
def execute(self, model: State):
model.intreg[self.rd] = model.pc + 4
model.pc = self.imm
@isa("jalr", 0x67, 0)
class InstructionJALR(InstructionIType):
def execute(self, model: State):
model.intreg[self.rd] = model.pc + 4
model.pc = model.intreg[self.rs1] + self.imm
@isa("beq", 0x63, 0)
class InstructionBEQ(InstructionBType):
def execute(self, model: State):
# todo: problem with __cmp__
if model.intreg[self.rs1].value == model.intreg[self.rs2].value:
model.pc = model.pc + self.imm
@isa("bne", 0x63, 1)
class InstructionBNE(InstructionBType):
def execute(self, model: State):
if model.intreg[self.rs1].value != model.intreg[self.rs2].value:
model.pc = model.pc + self.imm
@isa("blt", 0x63, 4)
class InstructionBLT(InstructionBType):
def execute(self, model: State):
if model.intreg[self.rs1].value < model.intreg[self.rs2].value:
model.pc = model.pc + self.imm
@isa("bge", 0x63, 5)
class InstructionBGE(InstructionBType):
def execute(self, model: State):
if model.intreg[self.rs1].value >= model.intreg[self.rs2].value:
model.pc = model.pc + self.imm
@isa("bltu", 0x63, 6)
class InstructionBLTU(InstructionBType):
def execute(self, model: State):
if model.intreg[self.rs1].unsigned() < model.intreg[self.rs2].unsigned():
model.pc = model.pc + self.imm
@isa("bgeu", 0x63, 7)
class InstructionBGEU(InstructionBType):
def execute(self, model: State):
if model.intreg[self.rs1].unsigned() >= model.intreg[self.rs2].unsigned():
model.pc = model.pc + self.imm
@isa("lb", 0x03, 0)
class InstructionLB(InstructionILType):
def execute(self, model: State):
data = model.lb((model.intreg[self.rs1] + self.imm).unsigned())
if (data >> 7) & 0x1:
data |= 0xFFFFFF00
model.intreg[self.rd] = data
@isa("lh", 0x03, 1)
class InstructionLH(InstructionILType):
def execute(self, model: State):
data = model.lh((model.intreg[self.rs1] + self.imm).unsigned())
if (data >> 15) & 0x1:
data |= 0xFFFF0000
model.intreg[self.rd] = data
@isa("lw", 0x03, 2)
class InstructionLW(InstructionILType):
def execute(self, model: State):
data = model.lw((model.intreg[self.rs1] + self.imm).unsigned())
model.intreg[self.rd] = data
@isa("lbu", 0x03, 4)
class InstructionLBU(InstructionILType):
def execute(self, model: State):
model.intreg[self.rd] = model.lb((model.intreg[self.rs1] + self.imm).unsigned())
@isa("lhu", 0x03, 5)
class InstructionLHU(InstructionILType):
def execute(self, model: State):
model.intreg[self.rd] = model.lh((model.intreg[self.rs1] + self.imm).unsigned())
@isa("sb", 0x23, 0)
class InstructionSB(InstructionSType):
def execute(self, model: State):
model.sb((model.intreg[self.rs1] + self.imm).unsigned(), model.intreg[self.rs2])
@isa("sh", 0x23, 1)
class InstructionSH(InstructionSType):
def execute(self, model: State):
model.sh((model.intreg[self.rs1] + self.imm).unsigned(), model.intreg[self.rs2])
@isa("sw", 0x23, 2)
class InstructionSW(InstructionSType):
def execute(self, model: State):
model.sw((model.intreg[self.rs1] + self.imm).unsigned(), model.intreg[self.rs2])
@isa("addi", 0x13, 0)
class InstructionADDI(InstructionIType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] + self.imm
@isa("slti", 0x13, 2)
class InstructionSLTI(InstructionIType):
def execute(self, model: State):
if model.intreg[self.rs1] < self.imm:
model.intreg[self.rd] = 1
else:
model.intreg[self.rd] = 0
@isa("sltiu", 0x13, 3)
class InstructionSLTIU(InstructionIType):
def execute(self, model: State):
if model.intreg[self.rs1].unsigned() < int(self.imm):
model.intreg[self.rd] = 1
else:
model.intreg[self.rd] = 0
@isa("xori", 0x13, 4)
class InstructionXORI(InstructionIType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] ^ self.imm
@isa("ori", 0x13, 6)
class InstructionORI(InstructionIType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] | self.imm
@isa("andi", 0x13, 7)
class InstructionANDI(InstructionIType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] & self.imm
@isa("slli", 0x13, 1, 0x00)
class InstructionSLLI(InstructionISType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] << self.shamt
@isa("srli", 0x13, 5, 0x00)
class InstructionSRLI(InstructionISType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1].unsigned() >> int(self.shamt)
@isa("srai", 0x13, 5, 0x20)
class InstructionSRAI(InstructionISType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] >> self.shamt
@isa("add", 0x33, 0, 0x00)
class InstructionADD(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] + model.intreg[self.rs2]
@isa("sub", 0x33, 0, 0x20)
class InstructionSUB(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] - model.intreg[self.rs2]
@isa("sll", 0x33, 1, 0x00)
class InstructionSLL(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] << (model.intreg[self.rs2] & 0x1f)
@isa("slt", 0x33, 2, 0x00)
class InstructionSLT(InstructionRType):
def execute(self, model: State):
if model.intreg[self.rs1] < model.intreg[self.rs2]:
model.intreg[self.rd] = 1
else:
model.intreg[self.rd] = 0
@isa("sltu", 0x33, 3, 0x00)
class InstructionSLTU(InstructionRType):
def execute(self, state: State):
if state.intreg[self.rs1].unsigned() < state.intreg[self.rs2].unsigned():
state.intreg[self.rd] = 1
else:
state.intreg[self.rd] = 0
@isa("xor", 0x33, 4, 0x00)
class InstructionXOR(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] ^ model.intreg[self.rs2]
@isa("srl", 0x33, 5, 0x00)
class InstructionSRL(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] >> model.intreg[self.rs2]
@isa("sra", 0x33, 5, 0x20)
class InstructionSRA(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] >> model.intreg[self.rs2]
@isa("or", 0x33, 6, 0x00)
class InstructionOR(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] | model.intreg[self.rs2]
@isa("and", 0x33, 7, 0x00)
class InstructionAND(InstructionRType):
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs1] & model.intreg[self.rs2]
@isa("fence", 0xF, 0, 0x00)
class InstructionFENCE(Instruction):
pass
@isa("fence.i", 0xF, 1, 0x00)
class InstructionFENCEI(Instruction):
pass
@isa("ecall", 0x73, 0)
class InstructionECALL(InstructionIType):
def execute(self, model: State):
pass
@isa("ebreak", 0x73, 0)
class InstructionEBREAK(Instruction):
def execute(self, model: State):
pass
@isa("csrrw", 0x73, 1)
class InstructionCSRRW(InstructionIType):
def execute(self, model: State):
pass
@isa("csrrs", 0x73, 2)
class InstructionCSRRS(InstructionIType):
def execute(self, model: State):
pass
@isa("csrrc", 0x73, 3)
class InstructionCSRRC(Instruction):
pass
@isa("csrrwi", 0x73, 5)
class InstructionCSRRWI(Instruction):
pass
@isa("csrrsi", 0x73, 6)
class InstructionCSRRSI(Instruction):
pass
@isa("csrrci", 0x73, 7)
class InstructionCSRRCI(Instruction):
pass
@isa("lwu", 0x3, 6, variant=RV64I)
class InstructionLWU(InstructionIType):
pass
@isa("ld", 0x3, 3, variant=RV64I)
class InstructionLD(InstructionIType):
pass
@isa("sd", 0x23, 3, variant=RV64I)
class InstructionSD(InstructionISType):
pass
@isa_pseudo()
class InstructionNOP(InstructionADDI):
def __init__(self):
super().__init__(0, 0, 0)
@isaC("c.addi", 1, funct3=0)
class InstructionCADDI(InstructionCIType):
def expand(self):
pass
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rd] + self.imm
@isaC("c.andi", 1, funct3=4)
class InstructionCANDI(InstructionCBType):
def expand(self):
pass
@isaC("c.swsp", 2, funct3=6)
class InstructionCSWSP(InstructionCSSType):
def expand(self):
pass
def decode(self, machinecode: int):
self.rs = (machinecode >> 2) & 0x1f
imm12to9 = (machinecode >> 9) & 0xf
imm8to7 = (machinecode >> 7) & 0x3
self.imm.set_from_bits((imm8to7 << 4) | imm12to9)
def execute(self, model: State):
pass
@isaC("c.li", 1, funct3=2)
class InstructionCLI(InstructionCIType):
def expand(self):
pass
def execute(self, model: State):
model.intreg[self.rd] = self.imm
@isaC("c.mv", 2, funct4=8)
class InstructionCMV(InstructionCRType):
def expand(self):
pass
def execute(self, model: State):
model.intreg[self.rd] = model.intreg[self.rs]
| 4,675 | 1,087 | 2,567 |
f51fd445fcb04f80f1d3a4a51fabc963c31f468c | 3,937 | py | Python | models/layers/spatial_attn.py | milesgray/ImageFunctions | 35e4423b94149b0ba291eafb0cd98260a70d5f31 | [
"Apache-2.0"
] | null | null | null | models/layers/spatial_attn.py | milesgray/ImageFunctions | 35e4423b94149b0ba291eafb0cd98260a70d5f31 | [
"Apache-2.0"
] | null | null | null | models/layers/spatial_attn.py | milesgray/ImageFunctions | 35e4423b94149b0ba291eafb0cd98260a70d5f31 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from .learnable import Scale, Balance
from .softmax import SpatialSoftmax2d
from .spectral import SpectralConv2d
from ImageFunctions.utility.torch import get_valid_padding
from .registry import register
from . import create as create_layer
@register("spatial_attn")
@register("cat_pool_spatial_attn")
@register("soft_wave_spatial_attn") | 36.794393 | 94 | 0.557023 | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from .learnable import Scale, Balance
from .softmax import SpatialSoftmax2d
from .spectral import SpectralConv2d
from ImageFunctions.utility.torch import get_valid_padding
from .registry import register
from . import create as create_layer
@register("spatial_attn")
class SpatialAttention(nn.Module):
def __init__(self, kernel_size: int=7):
super().__init__()
assert kernel_size in (3, 7), 'kernel size must be 3 or 7'
padding = 3 if kernel_size == 7 else 1
self.attn = nn.Sequential(
nn.Conv2d(1, 1, kernel_size,
padding=padding,
bias=False),
nn.Sigmoid()
)
def forward(self, x: torch.Tensor):
max_out, _ = torch.max(x, dim=1, keepdim=True)
return self.attn(max_out)
@register("cat_pool_spatial_attn")
class CatPoolSpatialAttention(nn.Module):
def __init__(self, kernel_size: int=7):
super().__init__()
assert kernel_size in (3, 7), 'kernel size must be 3 or 7'
padding = 3 if kernel_size == 7 else 1
self.attn = nn.Sequential(
nn.Conv2d(2, 1, kernel_size,
padding=padding,
bias=False),
nn.Sigmoid()
)
def forward(self, x: torch.Tensor):
avg_out = torch.mean(x, dim=1, keepdim=True)
max_out, _ = torch.max(x, dim=1, keepdim=True)
x = torch.cat([avg_out, max_out], dim=1)
return self.attn(x)
@register("soft_wave_spatial_attn")
class SoftWaveSpatialAttention(nn.Module):
def __init__(self, f_in: int,
f_out: int=None,
kernel: int=3,
modes: int=12,
learn_weight: bool=True,
dropout: float=0.0):
super().__init__()
if f_out is None:
f_out = f_in
self.channels = f_out
self.resize = nn.Identity()
if f_in != f_out:
self.resize = nn.Sequential(*[self.resize, nn.Conv2d(f_in, f_out, 1, bias=False)])
self.spatial_conv = nn.Conv2d(f_out,
f_out,
spatial_k,
padding=get_valid_padding(spatial_k, 0),
bias=False)
self.wave_conv = SpectralConv2d(1, f_out, 1, modes, modes)
self.spatial_softmax = SpatialSoftmax2d(temp=1.0, requires_grad=learn_weight)
self.use_dropout = dropout > 0
if self.use_dropout:
self.dropout = nn.Dropout(p=dropout)
if self.learn_weight:
self.global_scale = Scale(1.0)
self.wave_scale = Scale(0.0)
self.pool_scale = Scale(0.0)
self.global_balance = Balance()
def forward(self, x: torch.Tensor):
x = self.resize(x)
spatial_y = self.spatial_conv(x)
spatial_attn = self.sigmoid(spatial_y)
if self.learn_weight:
spatial_attn = self.spatial_scale(spatial_attn)
spatial_attn = self.spatial_softmax(spatial_attn)
wave_attn = self.wave_conv(x)
if self.learn_weight:
wave_attn = self.wave_scale(wave_attn)
wave_attn = self.sigmoid(wave_attn)
if self.use_dropout:
spatial_attn = self.dropout(spatial_attn)
wave_attn = self.dropout(wave_attn)
spatial_attn = F.interpolate(spatial_attn,
size=wave_attn.shape[-2:],
mode=self.interpolation,
align_corners=True)
attn = self.global_balance(spatial_attn, wave_attn)
attn = self.global_scale(attn)
return attn | 3,249 | 54 | 198 |
f16eabb15db8c0d9c36d7fbebbb6e18c1ddd544c | 1,478 | py | Python | src/Modules/Utilities/coordinate_tools.py | bopopescu/PyHouse_1 | 6444ed0b4c38ab59b9e419e4d54d65d598e6a54e | [
"MIT"
] | 1 | 2016-09-21T19:30:21.000Z | 2016-09-21T19:30:21.000Z | src/Modules/Utilities/coordinate_tools.py | bopopescu/PyHouse_1 | 6444ed0b4c38ab59b9e419e4d54d65d598e6a54e | [
"MIT"
] | null | null | null | src/Modules/Utilities/coordinate_tools.py | bopopescu/PyHouse_1 | 6444ed0b4c38ab59b9e419e4d54d65d598e6a54e | [
"MIT"
] | 1 | 2020-07-23T11:13:36.000Z | 2020-07-23T11:13:36.000Z | """
-*- test-case-name: PyHouse/src/Modules/Utilities/coordinate_tools.py -*-
@name: PyHouse/src/Modules/Utilities/coordinate_tools.py
@author: D. Brian Kimmel
@contact: d.briankimmel@gmail.com
@copyright: 2016-2016 by D. Brian Kimmel
@date: Created on Jun 21, 2016
@licencse: MIT License
@summary: Handle X,Y,Z coordinates
"""
# Import system type stuff
import xml.etree.ElementTree as ET
import datetime
# Import PyMh files
from Modules.Core.data_objects import CoordinateData
class Coords(object):
"""
"""
@staticmethod
def _get_coords(p_coords):
""" get CordinateData() from JSON data returned from the browser
@param p_str: Json returns a list of X, Y and Z values.
It should look like >> [ 1, 2.2, 33.44 ] but it could be deformed by the user.
@return: a CoordinateData() object filled in.
"""
l_ret = CoordinateData()
if isinstance(p_coords, list):
l_list = p_coords
else:
l_list = p_coords.strip('\[\]')
l_list = l_list.split(',')
try:
l_ret.X_Easting = float(l_list[0])
l_ret.Y_Northing = float(l_list[1])
l_ret.Z_Height = float(l_list[2])
except Exception as e_err:
print('Error {}'.format(e_err))
l_ret.X_Easting = 0.0
l_ret.Y_Northing = 0.0
l_ret.Z_Height = 0.0
return l_ret
# ## END DBK
| 28.423077 | 102 | 0.600135 | """
-*- test-case-name: PyHouse/src/Modules/Utilities/coordinate_tools.py -*-
@name: PyHouse/src/Modules/Utilities/coordinate_tools.py
@author: D. Brian Kimmel
@contact: d.briankimmel@gmail.com
@copyright: 2016-2016 by D. Brian Kimmel
@date: Created on Jun 21, 2016
@licencse: MIT License
@summary: Handle X,Y,Z coordinates
"""
# Import system type stuff
import xml.etree.ElementTree as ET
import datetime
# Import PyMh files
from Modules.Core.data_objects import CoordinateData
class Coords(object):
"""
"""
@staticmethod
def _get_coords(p_coords):
""" get CordinateData() from JSON data returned from the browser
@param p_str: Json returns a list of X, Y and Z values.
It should look like >> [ 1, 2.2, 33.44 ] but it could be deformed by the user.
@return: a CoordinateData() object filled in.
"""
l_ret = CoordinateData()
if isinstance(p_coords, list):
l_list = p_coords
else:
l_list = p_coords.strip('\[\]')
l_list = l_list.split(',')
try:
l_ret.X_Easting = float(l_list[0])
l_ret.Y_Northing = float(l_list[1])
l_ret.Z_Height = float(l_list[2])
except Exception as e_err:
print('Error {}'.format(e_err))
l_ret.X_Easting = 0.0
l_ret.Y_Northing = 0.0
l_ret.Z_Height = 0.0
return l_ret
# ## END DBK
| 0 | 0 | 0 |
96c27f0cde89e9aacc4e7df0046aaad96c168cea | 91 | py | Python | analyser/apps.py | gorkemyontem/SWE-573-2020 | 6a9ca57d294066fcc0db640f45d38d7341754a68 | [
"MIT"
] | null | null | null | analyser/apps.py | gorkemyontem/SWE-573-2020 | 6a9ca57d294066fcc0db640f45d38d7341754a68 | [
"MIT"
] | 35 | 2020-11-02T17:06:35.000Z | 2021-03-10T07:56:03.000Z | analyser/apps.py | ripoul/workshop-i2-back | 64f1e4146edad1e8e079aefad92f1751266270cf | [
"MIT"
] | 1 | 2021-02-02T14:38:27.000Z | 2021-02-02T14:38:27.000Z | from django.apps import AppConfig
| 15.166667 | 33 | 0.758242 | from django.apps import AppConfig
class AnalyserConfig(AppConfig):
name = 'analyser'
| 0 | 33 | 23 |
b0903548d1a84b3ec125464556f089e2b14802e0 | 118 | py | Python | pyalgo/algocryption/compiled_functions/compiled_functions.py | gilad-dotan/pyalgo_pkg | 132ff3c032c3fc0ae910201611e5d2cde387eb74 | [
"MIT"
] | 1 | 2021-04-01T08:59:30.000Z | 2021-04-01T08:59:30.000Z | pyalgo/algocryption/compiled_functions/compiled_functions.py | gilad-dotan/pyalgo_pkg | 132ff3c032c3fc0ae910201611e5d2cde387eb74 | [
"MIT"
] | null | null | null | pyalgo/algocryption/compiled_functions/compiled_functions.py | gilad-dotan/pyalgo_pkg | 132ff3c032c3fc0ae910201611e5d2cde387eb74 | [
"MIT"
] | null | null | null | # the pre compiled functions would be written here
# only the algocryption module can access these complied functions
| 39.333333 | 66 | 0.822034 | # the pre compiled functions would be written here
# only the algocryption module can access these complied functions
| 0 | 0 | 0 |
77a8b60002777297117b55b355863c7bb7ef4848 | 872 | py | Python | api_server/enviroment/enviroment.py | RobertoRosa7/python | 449f3908a38814ec7ec3b3ce1051b8abe70069d2 | [
"MIT"
] | null | null | null | api_server/enviroment/enviroment.py | RobertoRosa7/python | 449f3908a38814ec7ec3b3ce1051b8abe70069d2 | [
"MIT"
] | 2 | 2020-07-19T15:36:35.000Z | 2022-02-02T20:30:16.000Z | api_server/enviroment/enviroment.py | RobertoRosa7/python | 449f3908a38814ec7ec3b3ce1051b8abe70069d2 | [
"MIT"
] | null | null | null | import os, sys
sys.path.append(os.path.abspath(os.getcwd()))
from pymongo import MongoClient
API = 'http://localhost:4200' # frontend api
PRIMEIROAPP_API_DEV = os.environ.get('PRIMEIROAPP_API_DEV')
PRIMEIROAPP_API_PROD = os.environ.get('PRIMEIROAPP_API_PROD')
conn = MongoClient(host=PRIMEIROAPP_API_DEV, port=27017)
# conn = MongoClient(host=PRIMEIROAPP_API_PROD, port=27017)
# client = MongoClient("mongodb+srv://beto:beto1234@cluster0.rt58f.mongodb.net/primeiroapp?retryWrites=true&w=majority")
# client = MongoClient("mongodb://beto:beto123@cluster0-shard-00-00.rt58f.mongodb.net:27017,cluster0-shard-00-01.rt58f.mongodb.net:27017,cluster0-shard-00-02.rt58f.mongodb.net:27017/myFirstDatabase?ssl=true&replicaSet=atlas-xu6lpq-shard-0&authSource=admin&retryWrites=true&w=majority")
# db = client.test
# print(client.list_database_names())
db = conn['primeiroapp'] | 43.6 | 285 | 0.792431 | import os, sys
sys.path.append(os.path.abspath(os.getcwd()))
from pymongo import MongoClient
API = 'http://localhost:4200' # frontend api
PRIMEIROAPP_API_DEV = os.environ.get('PRIMEIROAPP_API_DEV')
PRIMEIROAPP_API_PROD = os.environ.get('PRIMEIROAPP_API_PROD')
conn = MongoClient(host=PRIMEIROAPP_API_DEV, port=27017)
# conn = MongoClient(host=PRIMEIROAPP_API_PROD, port=27017)
# client = MongoClient("mongodb+srv://beto:beto1234@cluster0.rt58f.mongodb.net/primeiroapp?retryWrites=true&w=majority")
# client = MongoClient("mongodb://beto:beto123@cluster0-shard-00-00.rt58f.mongodb.net:27017,cluster0-shard-00-01.rt58f.mongodb.net:27017,cluster0-shard-00-02.rt58f.mongodb.net:27017/myFirstDatabase?ssl=true&replicaSet=atlas-xu6lpq-shard-0&authSource=admin&retryWrites=true&w=majority")
# db = client.test
# print(client.list_database_names())
db = conn['primeiroapp'] | 0 | 0 | 0 |
044806f797125af3269ae09a8e5594ea984b2271 | 1,298 | py | Python | analyzer/utils/nlp_factory.py | cut4cut/cul_trip | f56a0b1865c4b6b5cb79c073bb171a819b607c4e | [
"MIT"
] | null | null | null | analyzer/utils/nlp_factory.py | cut4cut/cul_trip | f56a0b1865c4b6b5cb79c073bb171a819b607c4e | [
"MIT"
] | null | null | null | analyzer/utils/nlp_factory.py | cut4cut/cul_trip | f56a0b1865c4b6b5cb79c073bb171a819b607c4e | [
"MIT"
] | null | null | null | import re
import nltk
import gensim
import logging
from pymystem3 import Mystem
from string import punctuation
from nltk.corpus import stopwords
log = logging.getLogger(__name__)
| 25.45098 | 82 | 0.660247 | import re
import nltk
import gensim
import logging
from pymystem3 import Mystem
from string import punctuation
from nltk.corpus import stopwords
log = logging.getLogger(__name__)
def clean_html(text: str) -> str:
regexp = re.compile('<.*?>|[a-zA-Z]+|&|;')
return re.sub(regexp, '', text)
def preprocess_text(text: str, mystem: Mystem, stopwords: stopwords) -> list:
text = clean_html(text)
tokens = mystem.lemmatize(text.lower())
return [token for token in tokens if token not in stopwords\
and token != " " \
and token.strip() not in punctuation \
and token not in [' Β«', 'Β» ']]
def create_corpus(texts: list) -> list:
corpus = []
prep_texts = []
mystem = Mystem()
try:
russian_stopwords = stopwords.words("russian")
except:
nltk.download('stopwords', download_dir='./analyzer/utils/cache')
russian_stopwords = stopwords.words("russian")
for id, text in texts:
prep_texts.append(preprocess_text(text, mystem, russian_stopwords))
log.info('Cleaned and lemmatized texts')
for index, text in enumerate(prep_texts):
corpus.append(gensim.models.doc2vec.TaggedDocument(text, texts[index][0]))
log.info('Created taggeddocument (corpus)')
return corpus
| 1,043 | 0 | 69 |
442cb0c494deec67f842f872720e741951f3f855 | 867 | py | Python | lib/googlecloudsdk/sql/tools/ssl_certs/__init__.py | bopopescu/google-cloud-sdk | b34e6a18f1e89673508166acce816111c3421e4b | [
"Apache-2.0"
] | null | null | null | lib/googlecloudsdk/sql/tools/ssl_certs/__init__.py | bopopescu/google-cloud-sdk | b34e6a18f1e89673508166acce816111c3421e4b | [
"Apache-2.0"
] | null | null | null | lib/googlecloudsdk/sql/tools/ssl_certs/__init__.py | bopopescu/google-cloud-sdk | b34e6a18f1e89673508166acce816111c3421e4b | [
"Apache-2.0"
] | 1 | 2020-07-24T20:04:47.000Z | 2020-07-24T20:04:47.000Z | # Copyright 2013 Google Inc. All Rights Reserved.
"""Provide commands for managing SSL certificates of Cloud SQL instances."""
from googlecloudsdk.calliope import base
class SslCerts(base.Group):
"""Provide commands for managing SSL certificates of Cloud SQL instances.
Provide commands for managing SSL certificates of Cloud SQL instances,
including creating, deleting, listing, and getting information about
certificates.
"""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use it to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument(
'--instance',
'-i',
required=True,
help='Cloud SQL instance ID.')
| 27.967742 | 77 | 0.701269 | # Copyright 2013 Google Inc. All Rights Reserved.
"""Provide commands for managing SSL certificates of Cloud SQL instances."""
from googlecloudsdk.calliope import base
class SslCerts(base.Group):
"""Provide commands for managing SSL certificates of Cloud SQL instances.
Provide commands for managing SSL certificates of Cloud SQL instances,
including creating, deleting, listing, and getting information about
certificates.
"""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use it to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument(
'--instance',
'-i',
required=True,
help='Cloud SQL instance ID.')
| 0 | 0 | 0 |
447cd0fd1144a985b45bfcd0b776be5aeb960987 | 6,257 | py | Python | pyglet-fixed/window/cocoa/pyglet_textview.py | briggsmatthew/Wallpaper-Downloader | d8a22e02f35a5436566d14c3bdc6dd65fcddd3eb | [
"MIT"
] | 1 | 2016-11-16T17:22:48.000Z | 2016-11-16T17:22:48.000Z | pyglet-fixed/window/cocoa/pyglet_textview.py | briggsmatthew/Wallpaper-Downloader | d8a22e02f35a5436566d14c3bdc6dd65fcddd3eb | [
"MIT"
] | 1 | 2018-08-27T22:31:16.000Z | 2018-08-27T22:31:16.000Z | pyglet-fixed/window/cocoa/pyglet_textview.py | briggsmatthew/Wallpaper-Downloader | d8a22e02f35a5436566d14c3bdc6dd65fcddd3eb | [
"MIT"
] | 1 | 2019-09-06T03:05:35.000Z | 2019-09-06T03:05:35.000Z | import unicodedata
from pyglet.window import key
from pyglet.libs.darwin.cocoapy import *
NSArray = ObjCClass('NSArray')
NSApplication = ObjCClass('NSApplication')
# This custom NSTextView subclass is used for capturing all of the
# on_text, on_text_motion, and on_text_motion_select events.
PygletTextView = ObjCClass('PygletTextView')
| 39.352201 | 94 | 0.704171 | import unicodedata
from pyglet.window import key
from pyglet.libs.darwin.cocoapy import *
NSArray = ObjCClass('NSArray')
NSApplication = ObjCClass('NSApplication')
# This custom NSTextView subclass is used for capturing all of the
# on_text, on_text_motion, and on_text_motion_select events.
class PygletTextView_Implementation(object):
PygletTextView = ObjCSubclass('NSTextView', 'PygletTextView')
@PygletTextView.method(b'@'+PyObjectEncoding)
def initWithCocoaWindow_(self, window):
self = ObjCInstance(send_super(self, 'init'))
if not self:
return None
self._window = window
# Interpret tab and return as raw characters
self.setFieldEditor_(False)
self.empty_string = CFSTR("")
return self
@PygletTextView.method('v')
def dealloc(self):
self.empty_string.release()
@PygletTextView.method('v@')
def keyDown_(self, nsevent):
array = NSArray.arrayWithObject_(nsevent)
self.interpretKeyEvents_(array)
@PygletTextView.method('v@')
def insertText_(self, text):
text = cfstring_to_string(text)
self.setString_(self.empty_string)
# Don't send control characters (tab, newline) as on_text events.
if unicodedata.category(text[0]) != 'Cc':
self._window.dispatch_event("on_text", text)
@PygletTextView.method('v@')
def insertNewline_(self, sender):
# Distinguish between carriage return (u'\r') and enter (u'\x03').
# Only the return key press gets sent as an on_text event.
event = NSApplication.sharedApplication().currentEvent()
chars = event.charactersIgnoringModifiers()
ch = chr(chars.characterAtIndex_(0))
if ch == '\r':
self._window.dispatch_event("on_text", '\r')
@PygletTextView.method('v@')
def moveUp_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_UP)
@PygletTextView.method('v@')
def moveDown_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_DOWN)
@PygletTextView.method('v@')
def moveLeft_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_LEFT)
@PygletTextView.method('v@')
def moveRight_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_RIGHT)
@PygletTextView.method('v@')
def moveWordLeft_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_PREVIOUS_WORD)
@PygletTextView.method('v@')
def moveWordRight_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_NEXT_WORD)
@PygletTextView.method('v@')
def moveToBeginningOfLine_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_BEGINNING_OF_LINE)
@PygletTextView.method('v@')
def moveToEndOfLine_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_END_OF_LINE)
@PygletTextView.method('v@')
def scrollPageUp_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_PREVIOUS_PAGE)
@PygletTextView.method('v@')
def scrollPageDown_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_NEXT_PAGE)
@PygletTextView.method('v@')
def scrollToBeginningOfDocument_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion", key.MOTION_BEGINNING_OF_FILE)
@PygletTextView.method('v@')
def scrollToEndOfDocument_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion", key.MOTION_END_OF_FILE)
@PygletTextView.method('v@')
def deleteBackward_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_BACKSPACE)
@PygletTextView.method('v@')
def deleteForward_(self, sender):
self._window.dispatch_event("on_text_motion", key.MOTION_DELETE)
@PygletTextView.method('v@')
def moveUpAndModifySelection_(self, sender):
self._window.dispatch_event("on_text_motion_select", key.MOTION_UP)
@PygletTextView.method('v@')
def moveDownAndModifySelection_(self, sender):
self._window.dispatch_event("on_text_motion_select", key.MOTION_DOWN)
@PygletTextView.method('v@')
def moveLeftAndModifySelection_(self, sender):
self._window.dispatch_event("on_text_motion_select", key.MOTION_LEFT)
@PygletTextView.method('v@')
def moveRightAndModifySelection_(self, sender):
self._window.dispatch_event("on_text_motion_select", key.MOTION_RIGHT)
@PygletTextView.method('v@')
def moveWordLeftAndModifySelection_(self, sender):
self._window.dispatch_event("on_text_motion_select", key.MOTION_PREVIOUS_WORD)
@PygletTextView.method('v@')
def moveWordRightAndModifySelection_(self, sender):
self._window.dispatch_event("on_text_motion_select", key.MOTION_NEXT_WORD)
@PygletTextView.method('v@')
def moveToBeginningOfLineAndModifySelection_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion_select", key.MOTION_BEGINNING_OF_LINE)
@PygletTextView.method('v@')
def moveToEndOfLineAndModifySelection_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion_select", key.MOTION_END_OF_LINE)
@PygletTextView.method('v@')
def pageUpAndModifySelection_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion_select", key.MOTION_PREVIOUS_PAGE)
@PygletTextView.method('v@')
def pageDownAndModifySelection_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion_select", key.MOTION_NEXT_PAGE)
@PygletTextView.method('v@')
def moveToBeginningOfDocumentAndModifySelection_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion_select", key.MOTION_BEGINNING_OF_FILE)
@PygletTextView.method('v@')
def moveToEndOfDocumentAndModifySelection_(self, sender): # Mac OS X 10.6
self._window.dispatch_event("on_text_motion_select", key.MOTION_END_OF_FILE)
PygletTextView = ObjCClass('PygletTextView')
| 3,895 | 1,997 | 22 |
0f397ddb2cd34001830ecfdfe2903018b622dff3 | 893 | py | Python | server.py | cristoferz/oqem | cab5c8e9254331cb466d46d5749ca43466e8ca0e | [
"Apache-2.0"
] | null | null | null | server.py | cristoferz/oqem | cab5c8e9254331cb466d46d5749ca43466e8ca0e | [
"Apache-2.0"
] | null | null | null | server.py | cristoferz/oqem | cab5c8e9254331cb466d46d5749ca43466e8ca0e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from BaseHTTPServer import HTTPServer
from threading import Thread
from webserver.context import RequestHandler
if __name__ == "__main__":
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run() | 33.074074 | 114 | 0.711086 | #!/usr/bin/env python
from BaseHTTPServer import HTTPServer
from threading import Thread
from webserver.context import RequestHandler
class ThreadedHTTPServer(HTTPServer):
def process_request(self, request, client_address):
thread = Thread(target=self.__new_request, args=(self.RequestHandlerClass, request, client_address, self))
thread.start()
def __new_request(self, handlerClass, request, address, server):
handlerClass(request, address, server)
self.shutdown_request(request)
def run(server_class=ThreadedHTTPServer, handler_class=RequestHandler, port=9846):
server_address = ('', port)
httpd = server_class(server_address, handler_class)
print 'Starting httpd...'
httpd.serve_forever()
if __name__ == "__main__":
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run() | 502 | 16 | 106 |
f3ff14eca52156dbabb07ee7961cf2e47065293b | 18,466 | py | Python | Assignment 3/tak.py | xjyssg/AI-assignment | d79f37352edb11c8eabd02dd36677302229dfadf | [
"MIT"
] | null | null | null | Assignment 3/tak.py | xjyssg/AI-assignment | d79f37352edb11c8eabd02dd36677302229dfadf | [
"MIT"
] | null | null | null | Assignment 3/tak.py | xjyssg/AI-assignment | d79f37352edb11c8eabd02dd36677302229dfadf | [
"MIT"
] | 1 | 2018-10-07T20:27:57.000Z | 2018-10-07T20:27:57.000Z | from state import State
from mystack import Stack
from myqueue import Queue
"""
Define the number of stones can capstones for each board size.
"""
STONES = {
3: (10,0),
4: (15,0),
5: (21,1),
6: (30,1),
8: (50,2)
}
"""
Define constants representing flat stones, standing stones and cap stones.
"""
FLAT_STONE = 0
STANDING_STONE = 1
CAP_STONE = 2
TYPES = ['-', '|', '*']
"""
Define the four directions.
"""
UP = (-1, 0)
DOWN = (1, 0)
LEFT = (0, -1)
RIGHT = (0, 1)
DIR = [ UP, DOWN, LEFT, RIGHT ]
"""
Class representing the Tak state
"""
class TakState(State):
"""
Return a deep copy of this state.
"""
"""
Return the size of the board.
"""
"""
Return true if and only if the game is over.
"""
"""
Returns a pair (over, winner) where over is true iff the game is over and winner is equal to the winner
(0, 1 or -1 is the game is not over)
"""
"""
Return the number of board position contolled by each player.
"""
"""
Get the winning path if it exists. It retuns an empty path otherwise.
"""
"""
Check whether there is a horizontal winnning path for a given player.
"""
"""
Check whether there is a vertical winning path for a given player.
"""
"""
Check whether there is a path controlled by the given player connecting the
cells in S to the cells in T. Used to check winning paths.
"""
"""
Check whether cell (r, c) is controlled by the given player.
"""
"""
Return the index of the current player.
"""
"""
Get all the actions that the current player can perform.
"""
"""
Get all possible move actions from the current player.
"""
"""
Auxiliary function to generate move actions.
"""
"""
Get all place actions for the current player.
"""
"""
Applies a given action to this state. It assume that the actions is
valid. This must be checked with is_action_valid.
"""
"""
Return the scores of each players.
"""
"""
Get the winner of the game. Call only if the game is over.
"""
"""
Check whether postition (r, c) is empty.
"""
"""
Check whether the current player still has pieces (stones or capstones).
"""
"""
Get the top piece at position (r, c). Returns None if the stack is empty.
"""
"""
Checks whether it is possible to move k the pieces on top of the stack at (r_orig, c_orig)
to (r_dest, c_dest). Also checks whether the positions are adjacent.
"""
"""
Move the top k pieces of stack (r_orig, c_orig) to (r_dest, c_dest).
It assumes that there are enough pieces at origin and enough space at destination.
"""
"""
Return a string representation of the board.
"""
"""
Get a representation of this state that can be loaded in the GUI.
"""
##########################################################################
# YOU SHOULD NOT USE THESE FUNCTION, THEY ARE ONLY USED IN THE INTERFACE #
##########################################################################
"""
Add a piece of type piece_type for the given player at position (r, c).
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
"""
Add a piece of type piece_type for the current player at position (r, c).
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
"""
Replace the top of the stack at position (r, c) by a piece of type piece_type for the given player.
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
"""
Replace the top of the stack at position (r, c) by a piece of type piece_type for the current player.
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
"""
Get a representation of this state that can be loaded in the GUI.
"""
| 32.003466 | 176 | 0.59022 | from state import State
from mystack import Stack
from myqueue import Queue
"""
Define the number of stones can capstones for each board size.
"""
STONES = {
3: (10,0),
4: (15,0),
5: (21,1),
6: (30,1),
8: (50,2)
}
"""
Define constants representing flat stones, standing stones and cap stones.
"""
FLAT_STONE = 0
STANDING_STONE = 1
CAP_STONE = 2
TYPES = ['-', '|', '*']
"""
Define the four directions.
"""
UP = (-1, 0)
DOWN = (1, 0)
LEFT = (0, -1)
RIGHT = (0, 1)
DIR = [ UP, DOWN, LEFT, RIGHT ]
"""
Class representing the Tak state
"""
class TakState(State):
def __init__(self, size, max_height):
State.__init__(self)
self.size = size
self.max_height = max_height
self.stones = [ STONES[size][0] for _ in range(2) ]
self.capstones = [ STONES[size][1] for _ in range(2) ]
self.board = [ [ Stack() for _ in range(size) ] for _ in range(size) ]
self.history = set()
self.turn = 1
def get_data_tuple(self):
data = [ x for x in self.stones ]
data += [ x for x in self.capstones ]
for r in range(self.size):
for c in range(self.size):
data += [ r, c ]
for x in self.board[r][c]:
data += [ x[0], x[1] ]
return tuple(data)
def __eq__(self, other):
return self.get_data_tuple() == other.get_data_tuple()
def __hash__(self):
return hash(self.get_data_tuple())
"""
Return a deep copy of this state.
"""
def copy(self):
cp = TakState(self.size, self.max_height)
for i in range(2):
cp.stones[i] = self.stones[i]
cp.capstones[i] = self.capstones[i]
for r in range(self.size):
for c in range(self.size):
for x in self.board[r][c]:
cp.board[r][c].add(x)
cp.cur_player = self.get_cur_player()
cp.winner = self.winner
cp.invalid_player = self.invalid_player
cp.timeout_player = self.timeout_player
cp.history = set()
for x in self.history:
cp.history.add(x)
cp.turn = self.turn
return cp
"""
Return the size of the board.
"""
def get_size(self):
return self.size
"""
Return true if and only if the game is over.
"""
def game_over_check(self):
over, winner = self.is_over()
return over
"""
Returns a pair (over, winner) where over is true iff the game is over and winner is equal to the winner
(0, 1 or -1 is the game is not over)
"""
def is_over(self):
# check whether someone ran out of stones
for i in range(2):
if self.stones[i] + self.capstones[i] == 0:
r, b = self.control_count()
if r > b: return True, 0
elif r < b: return True, 1
return True, self.cur_player
# check whether the board is full
full = True
for r in range(self.size):
for c in range(self.size):
if len(self.board[r][c]) == 0:
full = False
if full:
r, b = self.control_count()
if r > b: return True, 0
elif r < b: return True, 1
return True, self.cur_player
# check whether there is a path
for i in range(2):
if self.check_horizontal_path(i) != None:
return True, i
if self.check_vertical_path(i) != None:
return True, i
return False, -1
"""
Return the number of board position contolled by each player.
"""
def control_count(self):
count = [ 0, 0 ]
for r in range(self.size):
for c in range(self.size):
for i in range(2):
if self.is_controlled_by(r, c, i):
count[i] += 1
return tuple(count)
"""
Get the winning path if it exists. It retuns an empty path otherwise.
"""
def get_winning_path(self):
path = self.check_horizontal_path(self.winner)
if path != None: return path
path = self.check_vertical_path(self.winner)
if path != None: return path
return []
"""
Check whether there is a horizontal winnning path for a given player.
"""
def check_horizontal_path(self, player):
# initialize left positions that belong to player
L = [ ]
R = [ ]
for r in range(self.size):
if self.is_controlled_by(r, 0, player):
L.append( (r, 0) )
if self.is_controlled_by(r, self.size - 1, player):
R.append( (r, self.size - 1) )
# perform a BFS from the left to see if we can reach the right
return self.bfs(L, R, player)
"""
Check whether there is a vertical winning path for a given player.
"""
def check_vertical_path(self, player):
# initialize the top positions that belong to player
U = [ ]
D = [ ]
for c in range(self.size):
if self.is_controlled_by(0, c, player):
U.append( (0, c) )
if self.is_controlled_by(self.size - 1, c, player):
D.append( (self.size - 1, c) )
# perform a BFS from the top to see if we can reach the bottom
return self.bfs(U, D, player)
"""
Check whether there is a path controlled by the given player connecting the
cells in S to the cells in T. Used to check winning paths.
"""
def bfs(self, S, T, player):
# initialize BFS
parent = [ [ None for _ in range(self.size) ] for _ in range(self.size) ]
Q = Queue()
for s in S:
Q.add(s)
parent[s[0]][s[1]] = -1
# BFS loop
cnt = 0
while len(Q) > 0:
cnt += 1
r, c = Q.remove()
for d in DIR:
rr = r + d[0]
cc = c + d[1]
if 0 <= rr and rr < self.size and 0 <= cc and cc < self.size and parent[rr][cc] == None and self.is_controlled_by(rr, cc, player):
Q.add( (rr, cc) )
parent[rr][cc] = (r, c)
# check whether the other side was reached
for r, c in T:
if parent[r][c] != None:
# build the path
path = [ ]
cur = (r, c)
while cur != -1:
path.append(cur)
cur = parent[cur[0]][cur[1]]
return path
return None
"""
Check whether cell (r, c) is controlled by the given player.
"""
def is_controlled_by(self, r, c, player):
if len(self.board[r][c]) == 0:
# no piece
return False
piece_type, owner = self.board[r][c].top()
if owner != player:
# piece not owned by player
return False
if piece_type == STANDING_STONE:
# piece is standing stone
return False
return True
"""
Return the index of the current player.
"""
def get_cur_player(self):
return self.cur_player
"""
Get all the actions that the current player can perform.
"""
def get_current_player_actions(self):
actions = [ ]
# gather all place actions
for place_action in self.get_place_actions():
tmp = self.copy()
tmp.apply_action(place_action)
if not tmp.get_data_tuple() in self.history:
actions.append(place_action)
# gather all move actions
if self.turn >= 3:
for move_action in self.get_move_actions():
tmp = self.copy()
tmp.apply_action(move_action)
if not tmp.get_data_tuple() in self.history:
actions.append(move_action)
return actions
"""
Get all possible move actions from the current player.
"""
def get_move_actions(self):
move_actions = [ ]
for row in range(self.size):
for col in range(self.size):
if len(self.board[row][col]) > 0 and self.get_top_piece(row, col)[1] == self.cur_player:
# can only move if the stack belongs to the player
for d in DIR:
r = row + d[0]
c = col + d[1]
# check if position is in range
if 0 <= r and r < self.size and 0 <= c and c < self.size:
max_pieces = min(self.size, len(self.board[row][col]))
for k in range(1, max_pieces + 1):
if self.can_move_top_k(row, col, r, c, k):
delta = (r - row, c - col)
move = ('move', row, col, delta, [k])
move_actions.append(move)
state_tmp = self.copy()
state_tmp.apply_action(move)
self.gen_move_actions(r, c, delta, move, move_actions, state_tmp, 0)
return move_actions
"""
Auxiliary function to generate move actions.
"""
def gen_move_actions(self, row, col, delta, move, move_actions, state_tmp, depth):
r = row + delta[0]
c = col + delta[1]
if 0 <= r and r < self.size and 0 <= c and c < self.size:
max_pieces = move[4][-1]
for k in range(1, max_pieces):
if state_tmp.can_move_top_k(row, col, r, c, k):
nb_pieces = [x for x in move[4]]
nb_pieces.append(k)
new_move = (move[0], move[1], move[2], move[3], nb_pieces)
move_actions.append(new_move)
state_tmp = self.copy()
state_tmp.apply_action(new_move)
self.gen_move_actions(r, c, delta, new_move, move_actions, state_tmp, depth + 1)
"""
Get all place actions for the current player.
"""
def get_place_actions(self):
place_actions = [ ]
for row in range(self.size):
for col in range(self.size):
if len(self.board[row][col]) == 0:
# can only place something if the board is empty
if self.stones[self.cur_player] > 0:
place_actions.append( ('place', FLAT_STONE, row, col) )
if self.turn >= 3:
place_actions.append( ('place', STANDING_STONE, row, col) )
if self.capstones[self.cur_player] > 0:
if self.turn >= 3:
place_actions.append( ('place', CAP_STONE, row, col) )
return place_actions
"""
Applies a given action to this state. It assume that the actions is
valid. This must be checked with is_action_valid.
"""
def apply_action(self, action):
self.history.add(self.get_data_tuple())
action_id = action[0]
if action_id == 'place':
piece_type = action[1]
row = action[2]
col = action[3]
player = self.cur_player
if self.turn <= 2:
player = 1 - player
self.add_piece(row, col, piece_type, player)
if piece_type == CAP_STONE:
self.capstones[player] -= 1
else:
self.stones[player] -= 1
elif action_id == 'move':
row = action[1]
col = action[2]
delta = action[3]
assert abs(delta[0]) + abs(delta[1]) == 1, delta
nb_pieces = action[4]
for i in range(len(nb_pieces)):
r_dest = row + delta[0]
c_dest = col + delta[1]
self.move_top_k(row, col, r_dest, c_dest, nb_pieces[i])
row = r_dest
col = c_dest
# check whehter the game is over and set the winner if so
over, winner = self.is_over()
if over:
self.winner = winner
else:
self.cur_player = 1 - self.cur_player
self.turn += 1
"""
Return the scores of each players.
"""
def get_scores(self):
if self.winner == None:
return (0, 0)
elif self.winner == 0:
return (1, 0)
return (0, 1)
"""
Get the winner of the game. Call only if the game is over.
"""
def get_winner(self):
return self.winner
"""
Check whether postition (r, c) is empty.
"""
def is_empty(self, r, c):
return len(self.board[r][c]) == 0
"""
Check whether the current player still has pieces (stones or capstones).
"""
def cur_player_has_pieces(self):
return self.stones[self.cur_player] + self.capstones[self.cur_player] > 0
"""
Get the top piece at position (r, c). Returns None if the stack is empty.
"""
def get_top_piece(self, r, c):
if len(self.board[r][c]) == 0: return None
return self.board[r][c].top()
"""
Checks whether it is possible to move k the pieces on top of the stack at (r_orig, c_orig)
to (r_dest, c_dest). Also checks whether the positions are adjacent.
"""
def can_move_top_k(self, r_orig, c_orig, r_dest, c_dest, k):
if not (0 <= r_dest and r_dest < self.size and 0 <= c_dest and c_dest < self.size): return False
if len(self.board[r_dest][c_dest]) + k > self.max_height: return False
if len(self.board[r_dest][c_dest]) == 0: return True
delta_r = abs(r_orig - r_dest)
delta_c = abs(c_orig - c_dest)
if delta_r + delta_c != 1: return False
piece_type, _ = self.board[r_dest][c_dest].top()
if piece_type == CAP_STONE: return False
if piece_type != STANDING_STONE: return True
piece_type, _ = self.board[r_orig][c_orig].top()
return k == 1 and piece_type == CAP_STONE
"""
Move the top k pieces of stack (r_orig, c_orig) to (r_dest, c_dest).
It assumes that there are enough pieces at origin and enough space at destination.
"""
def move_top_k(self, r_orig, c_orig, r_dest, c_dest, k):
assert 0 <= r_orig and r_orig < self.size and 0 <= c_orig and c_orig < self.size, 'move_top_k orig out of bounds r={0}, c={1}'.format(r_orig, c_orig)
assert 0 <= r_dest and r_dest < self.size and 0 <= c_dest and c_dest < self.size, 'move_top_k dest out of bounds r={0}, c={1}'.format(r_dest, c_dest)
assert 0 < k and k <= len(self.board[r_orig][c_orig]), 'move_top_k number of pieces out of bounds k={0} and board has {1} pieces'.format(k, len(self.board[r_orig][r_dest]))
tmp = Stack()
for i in range(k):
tmp.add(self.board[r_orig][c_orig].remove())
if len(self.board[r_dest][c_dest]) > 0:
piece_type, owner = self.board[r_dest][c_dest].top()
if piece_type == STANDING_STONE:
self.set_top_piece(r_dest, c_dest, FLAT_STONE, owner)
for i in range(k):
self.board[r_dest][c_dest].add(tmp.remove())
"""
Return a string representation of the board.
"""
def __str__(self):
# create the matrix representation
R = self.max_height * self.size + self.size + 1
C = 2 * self.size + self.size + 1
print(R, C)
M = [ [ ' ' for c in range(C) ] for r in range(R) ]
for r in range(R):
for c in range(C):
if r % (self.max_height + 1) == 0 or c % 3 == 0:
M[r][c] = '.'
r0 = self.max_height
c0 = 1
for r in range(self.size):
for c in range(self.size):
rm = r0 + r * (self.max_height + 1)
cm = c0 + c * 3
tmp = [ x for x in self.board[r][c] ]
for t, o in tmp:
M[rm][cm] = TYPES[t]
M[rm][cm + 1] = str(o)
rm -= 1
# convert matrix to string
s = ''
for r in range(R):
for c in range(C):
s += M[r][c]
s += '\n'
# add other info
s += '{0} flat stone\n{1} standing stone\n{2} cap stone\n'.format(TYPES[0], TYPES[1], TYPES[2])
s += 'player0 has {0} stones and {1} capstones\n'.format(self.stones[0], self.capstones[0])
s += 'player1 has {0} stones and {1} capstones\n'.format(self.stones[1], self.capstones[1])
s += 'current palyer: {0}\n'.format(self.cur_player)
return s
"""
Get a representation of this state that can be loaded in the GUI.
"""
def get_data_str(self):
s = ''
s += str(self.cur_player) + '\n'
s += '{0}\n{1}\n'.format(self.size, self.max_height)
s += '{0}\n{1}\n'.format(self.stones[0], self.stones[1])
s += '{0}\n{1}\n'.format(self.capstones[0], self.capstones[1])
types = ['-', '|', '*']
for r in range(self.size):
for c in range(self.size):
l = len(self.board[r][c])
if l != 0:
s += '{0} {1} {2}\n'.format(r, c, l)
data = [ ]
for x in self.board[r][c]:
data.append(x)
data.reverse()
for t, o in data:
s += '{0} {1}\n'.format(o, types[t])
return s
##########################################################################
# YOU SHOULD NOT USE THESE FUNCTION, THEY ARE ONLY USED IN THE INTERFACE #
##########################################################################
"""
Add a piece of type piece_type for the given player at position (r, c).
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
def add_piece(self, r, c, piece_type, player):
if len(self.board[r][c]) > 0:
pt, owner = self.board[r][c].remove()
if pt == STANDING_STONE:
self.board[r][c].add( (FLAT_STONE, owner) )
else:
self.board[r][c].add( (pt, owner) )
self.board[r][c].add( (piece_type, player) )
"""
Add a piece of type piece_type for the current player at position (r, c).
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
def add_piece_cur_player(self, r, c, piece_type):
self.add_piece(r, c, piece_type, self.cur_player)
"""
Replace the top of the stack at position (r, c) by a piece of type piece_type for the given player.
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
def set_top_piece(self, r, c, piece_type, player):
if len(self.board[r][c]) > 0:
self.board[r][c].remove()
self.board[r][c].add( (piece_type, player) )
"""
Replace the top of the stack at position (r, c) by a piece of type piece_type for the current player.
This should not be used by your code, it is just a function used in the interface.
It does not change the current player nor checks whether the game is over.
"""
def set_top_piece_cur_player(self, r, c, piece_type):
self.set_top_piece(r, c, piece_type, self.cur_player)
"""
Get a representation of this state that can be loaded in the GUI.
"""
def get_inginious_str(self):
return '\n' + str(self) + '\n'
def read_state_from_file(fn):
f = open(fn, 'r')
lines = [ line.strip() for line in f.readlines() ]
tmp = [ ]
for line in lines:
if len(line) > 0:
tmp.append(line)
lines = tmp
size = int(lines[0])
max_height = int(lines[1])
state = TakState(size, max_height)
state.cur_player = int(lines[2])
state.stones[0] = int(lines[3])
state.stones[1] = int(lines[4])
state.capstones[0] = int(lines[5])
state.capstones[1] = int(lines[6])
i = 7
while i < len(lines):
data = lines[i].split(' ')
r = int(data[0])
c = int(data[1])
k = int(data[2])
i += 1
for j in range(k):
data = lines[i].split(' ')
owner = int(data[0])
tp = data[1]
if tp == '-':
tp = FLAT_STONE
elif tp == '|':
tp = STANDING_STONE
else:
tp = CAP_STONE
state.add_piece(r, c, tp, owner)
i += 1
return state
| 13,465 | 0 | 847 |
fc5060d2b2feab170585d068f44aadadbe851b21 | 2,968 | py | Python | scheduler/tests/jobrequester_test.py | Yixiao99/deep-learning-containers | 01f078adf5abfb92e802b326511981bdd4a8c85c | [
"Apache-2.0"
] | 383 | 2020-05-19T18:09:10.000Z | 2022-03-29T22:41:05.000Z | scheduler/tests/jobrequester_test.py | Yixiao99/deep-learning-containers | 01f078adf5abfb92e802b326511981bdd4a8c85c | [
"Apache-2.0"
] | 551 | 2020-05-27T17:25:50.000Z | 2022-03-31T18:00:35.000Z | scheduler/tests/jobrequester_test.py | ashahba/deep-learning-containers | 48c3948b3d11f4fe2aac6bb25e5d82230d777076 | [
"Apache-2.0"
] | 263 | 2020-05-19T18:17:12.000Z | 2022-03-29T22:41:10.000Z | import concurrent.futures
import logging
import os
import sys
import boto3
import log_return
from job_requester import JobRequester
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
LOGGER.addHandler(logging.StreamHandler(sys.stdout))
TEST_IMAGE = "763104351884.dkr.ecr.us-west-2.amazonaws.com/tensorflow-training:2.2.0-gpu-py37-cu101-ubuntu18.04"
SAMPLE_XML_MESSAGE = "<note><to>Sample</to><from>XML</from><heading>Report</heading><body>Hello World!</body></note>"
SAMPLE_CB_ARN = "arn:aws:codebuild:us-west-2:754106851545:build/DLCTestJobExecutor:894c9690-f6dc-4a15-b4b8-b9f2ddc51ea9"
def test_requester():
"""
Tests the send_request and receive_logs functions of the Job Requester package.
How tests are executed:
- create one Job Requester object, and multiple threads. Perform send_request with the Job Requester object in
each of these threads.
- send messages to the SQS queue that the Job Requester object created, to imitate the response logs received back
from the Job Executor.
- In each of the threads, perform receive_logs to receive the log correspond to the send_request earlier.
"""
threads = 10
request_object = JobRequester()
identifiers_list = []
input_list = []
# creating unique image names and build_context strings
for _ in range(threads):
input_list.append((TEST_IMAGE, "PR", 3))
# sending requests
with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
futures = [executor.submit(request_object.send_request, x, y, z) for (x, y, z) in input_list]
print("Created tickets......")
for future in futures:
res = future.result()
print(res)
identifiers_list.append(res)
print("\n")
# create sample xml report files
image_tag = TEST_IMAGE.split(":")[-1]
report_path = os.path.join(os.getcwd(), f"{image_tag}.xml")
with open(report_path, "w") as report:
report.write(SAMPLE_XML_MESSAGE)
os.environ["CODEBUILD_BUILD_ARN"] = SAMPLE_CB_ARN
for identifier in identifiers_list:
os.environ["TICKET_KEY"] = f"folder/{identifier.ticket_name}"
log_return.update_pool("completed", identifier.instance_type, 3, identifier.job_type, report_path)
# receiving logs
with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
logs = [executor.submit(request_object.receive_logs, identifier) for identifier in identifiers_list]
LOGGER.info("Receiving logs...")
for log in logs:
assert "XML_REPORT" in log.result(), f"XML Report not found as part of the returned log message."
# clean up test artifacts
S3 = boto3.client("s3")
ticket_names = [item.ticket_name for item in identifiers_list]
for name in ticket_names:
S3.delete_object(Bucket=request_object.s3_ticket_bucket, Key=name)
LOGGER.info("Tests passed.")
if __name__ == "__main__":
test_requester()
| 35.759036 | 120 | 0.722372 | import concurrent.futures
import logging
import os
import sys
import boto3
import log_return
from job_requester import JobRequester
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
LOGGER.addHandler(logging.StreamHandler(sys.stdout))
TEST_IMAGE = "763104351884.dkr.ecr.us-west-2.amazonaws.com/tensorflow-training:2.2.0-gpu-py37-cu101-ubuntu18.04"
SAMPLE_XML_MESSAGE = "<note><to>Sample</to><from>XML</from><heading>Report</heading><body>Hello World!</body></note>"
SAMPLE_CB_ARN = "arn:aws:codebuild:us-west-2:754106851545:build/DLCTestJobExecutor:894c9690-f6dc-4a15-b4b8-b9f2ddc51ea9"
def test_requester():
"""
Tests the send_request and receive_logs functions of the Job Requester package.
How tests are executed:
- create one Job Requester object, and multiple threads. Perform send_request with the Job Requester object in
each of these threads.
- send messages to the SQS queue that the Job Requester object created, to imitate the response logs received back
from the Job Executor.
- In each of the threads, perform receive_logs to receive the log correspond to the send_request earlier.
"""
threads = 10
request_object = JobRequester()
identifiers_list = []
input_list = []
# creating unique image names and build_context strings
for _ in range(threads):
input_list.append((TEST_IMAGE, "PR", 3))
# sending requests
with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
futures = [executor.submit(request_object.send_request, x, y, z) for (x, y, z) in input_list]
print("Created tickets......")
for future in futures:
res = future.result()
print(res)
identifiers_list.append(res)
print("\n")
# create sample xml report files
image_tag = TEST_IMAGE.split(":")[-1]
report_path = os.path.join(os.getcwd(), f"{image_tag}.xml")
with open(report_path, "w") as report:
report.write(SAMPLE_XML_MESSAGE)
os.environ["CODEBUILD_BUILD_ARN"] = SAMPLE_CB_ARN
for identifier in identifiers_list:
os.environ["TICKET_KEY"] = f"folder/{identifier.ticket_name}"
log_return.update_pool("completed", identifier.instance_type, 3, identifier.job_type, report_path)
# receiving logs
with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
logs = [executor.submit(request_object.receive_logs, identifier) for identifier in identifiers_list]
LOGGER.info("Receiving logs...")
for log in logs:
assert "XML_REPORT" in log.result(), f"XML Report not found as part of the returned log message."
# clean up test artifacts
S3 = boto3.client("s3")
ticket_names = [item.ticket_name for item in identifiers_list]
for name in ticket_names:
S3.delete_object(Bucket=request_object.s3_ticket_bucket, Key=name)
LOGGER.info("Tests passed.")
if __name__ == "__main__":
test_requester()
| 0 | 0 | 0 |
b961eff9eb243efa64122c274f8dd1040ded904d | 347 | py | Python | src/pdn/views.py | ricardogsilva/nexus-geonode | b3c1d428fe0c246bcc3195ebcbd54554b726dbe2 | [
"BSD-2-Clause"
] | null | null | null | src/pdn/views.py | ricardogsilva/nexus-geonode | b3c1d428fe0c246bcc3195ebcbd54554b726dbe2 | [
"BSD-2-Clause"
] | null | null | null | src/pdn/views.py | ricardogsilva/nexus-geonode | b3c1d428fe0c246bcc3195ebcbd54554b726dbe2 | [
"BSD-2-Clause"
] | null | null | null | from django.shortcuts import render
from django.views.generic import ListView
from pdn.models import *
| 18.263158 | 41 | 0.746398 | from django.shortcuts import render
from django.views.generic import ListView
from pdn.models import *
class NewsPageView(ListView):
model = News
class ProjectsPageView(ListView):
model = Project
class AlertsPageView(ListView):
model = Alert
ordering = ['-gdacs_fromdate']
class ExpertsPageView(ListView):
model = Expert
| 0 | 150 | 92 |
0e533f4771e19bea8c54b52b140c1909cbaff6b9 | 1,846 | py | Python | src/database/database.py | grigorjevas/Discogs-price-prediction | f1173cba9ed3aa48b1db508bc66d9f768118ec23 | [
"MIT"
] | 5 | 2021-08-22T12:41:16.000Z | 2022-02-22T21:50:02.000Z | src/database/database.py | grigorjevas/Discogs-price-prediction | f1173cba9ed3aa48b1db508bc66d9f768118ec23 | [
"MIT"
] | null | null | null | src/database/database.py | grigorjevas/Discogs-price-prediction | f1173cba9ed3aa48b1db508bc66d9f768118ec23 | [
"MIT"
] | null | null | null | from dotenv import load_dotenv
import os
from psycopg2 import connect, OperationalError
load_dotenv()
| 29.774194 | 116 | 0.604009 | from dotenv import load_dotenv
import os
from psycopg2 import connect, OperationalError
load_dotenv()
class Database:
def __init__(self):
"""
Database class.
Available methods:
* connect
* execute_query
* execute_query_and_fetch
"""
self.__database_url = os.environ["DATABASE_URL"]
def connect(self):
"""
Creates a new database connection. Reads the credentials from .env file.
Returns postgreSQL database session and a new instance of connection class.
By using the connection object you can create a new cursor to execute any SQL statements.
Returns error if connection is unsuccessful.
"""
try:
connection = connect(self.__database_url)
return connection
except OperationalError as err:
print("pg error: ", err.pgerror, "\n")
print("pg code: ", err.pgcode, "\n")
raise err
def execute_query(self, query: str) -> None:
"""
Executes SQL query. To be used for queries which do not return any results, for example:
INSERT, UPDATE, CREATE, ALTER, DROP, etc.
:param query: SQL query
"""
connection = self.connect()
cursor = connection.cursor()
cursor.execute(query)
connection.commit()
connection.close()
def execute_query_and_fetch(self, query: str) -> list:
"""
Executes SQL query and fetches a response. To be used for queries whihc return results, for example: SELECT.
:param query: SQL query
:return: array
"""
connection = self.connect()
cursor = connection.cursor()
cursor.execute(query)
result = cursor.fetchall()
connection.close()
return result
| 0 | 1,720 | 23 |
853c982d62eb798a237e5159657a7222b848e688 | 4,817 | py | Python | deploy/examples/migrationcontainer/migration.py | jakedt/dba-operator | 5f4abcc490b05e5490fdbb704b0e20dbf0821e36 | [
"Apache-2.0"
] | null | null | null | deploy/examples/migrationcontainer/migration.py | jakedt/dba-operator | 5f4abcc490b05e5490fdbb704b0e20dbf0821e36 | [
"Apache-2.0"
] | null | null | null | deploy/examples/migrationcontainer/migration.py | jakedt/dba-operator | 5f4abcc490b05e5490fdbb704b0e20dbf0821e36 | [
"Apache-2.0"
] | null | null | null | import os
import sys
import logging
import time
import argparse
import re
import pymysql.cursors
import pymysql.err
from prometheus_client import CollectorRegistry, Gauge, Counter, push_to_gateway
FORMAT = '%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s'
TABLE_DEF = """
CREATE TABLE `alembic_version` (
`version_num` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=UTF8MB4 COLLATE=utf8mb4_bin;
"""
PROM_LABEL_PREFIX = 'DBA_OP_LABEL_'
logger = logging.getLogger(__name__)
if __name__ == '__main__':
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
check_vars = [
'DBA_OP_PROMETHEUS_PUSH_GATEWAY_ADDR',
'DBA_OP_JOB_ID',
'DBA_OP_CONNECTION_STRING',
]
for env_var_name in check_vars:
if not env_var_name in os.environ:
logger.error('Must provide the environment variable %s', env_var_name)
sys.exit(1)
logger = logging.getLogger(os.environ['DBA_OP_JOB_ID'])
parser = argparse.ArgumentParser(
description='Run a fake migration container.',
)
parser.add_argument(
'--seconds',
default=30,
type=int,
help='Number of seconds for which to run',
)
parser.add_argument(
'--fail_after',
default=sys.maxsize,
type=int,
help='Number of seconds after which to fail (default: succeed)',
)
parser.add_argument(
'--write_version',
required=True,
type=str,
help='Database version to set after completion',
)
args = parser.parse_args()
# Parse the env to find labels that we need to add
labels = {_process_label_key(k): v for k, v in os.environ.items()
if k.startswith(PROM_LABEL_PREFIX)}
run(
os.environ['DBA_OP_CONNECTION_STRING'],
os.environ['DBA_OP_PROMETHEUS_PUSH_GATEWAY_ADDR'],
os.environ['DBA_OP_JOB_ID'],
labels,
args.write_version,
args.seconds,
args.fail_after,
)
| 29.552147 | 82 | 0.653311 | import os
import sys
import logging
import time
import argparse
import re
import pymysql.cursors
import pymysql.err
from prometheus_client import CollectorRegistry, Gauge, Counter, push_to_gateway
FORMAT = '%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s'
TABLE_DEF = """
CREATE TABLE `alembic_version` (
`version_num` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=UTF8MB4 COLLATE=utf8mb4_bin;
"""
PROM_LABEL_PREFIX = 'DBA_OP_LABEL_'
logger = logging.getLogger(__name__)
def run(db_connection_string, push_gateway_addr, job_id, labels, write_version,
run_seconds, fail_seconds):
logger.debug('Starting migration')
registry = CollectorRegistry()
migration_completion_percent = Gauge(
'migration_completion_percent',
'Estimate of the completion percentage of the job',
registry=registry,
)
migration_complete_total = Counter(
'migration_complete_total',
'Binary value of whether or not the job is complete',
registry=registry,
)
migration_failed_total = Counter(
'migration_failed_total',
'Binary value of whether or not the job has failed',
registry=registry,
)
migration_items_completed_total = Counter(
'migration_items_completed_total',
'Number of items this migration has completed',
registry=registry,
)
def update_metrics():
push_to_gateway(push_gateway_addr, job=job_id, registry=registry,
grouping_key=labels)
for i in range(run_seconds):
if i >= fail_seconds:
migration_failed_total.inc()
update_metrics()
sys.exit(1)
migration_items_completed_total.inc()
migration_completion_percent.set(float(i)/run_seconds)
update_metrics()
logger.debug('%s/%s items completed', i, run_seconds)
time.sleep(1)
# Write the completion to the database
_write_database_version(db_connection_string, write_version)
migration_complete_total.inc()
migration_completion_percent.set(1.0)
update_metrics()
def _parse_mysql_dsn(db_connection_string):
# DO NOT use this regex as authoritative for a MySQL DSN
matcher = re.match(
r'([^:]+):([^@]+)@tcp\(([^:]+):([0-9]+)\)\/([a-zA-Z0-9]+)',
db_connection_string,
)
assert matcher is not None
return {
"host": matcher.group(3),
"user": matcher.group(1),
"password": matcher.group(2),
"database": matcher.group(5),
"port": int(matcher.group(4)),
}
def _write_database_version(db_connection_string, version):
connection_params = _parse_mysql_dsn(db_connection_string)
db_conn = pymysql.connect(autocommit=True, **connection_params)
try:
with db_conn.cursor() as cursor:
sql = "UPDATE alembic_version SET version_num = %s"
cursor.execute(sql, (version))
except pymysql.err.ProgrammingError:
# Likely the table was missing
with db_conn.cursor() as cursor:
cursor.execute(TABLE_DEF)
create = "INSERT INTO alembic_version (version_num) VALUES (%s)"
cursor.execute(create, (version))
def _process_label_key(label_key):
return label_key[len(PROM_LABEL_PREFIX):].lower()
if __name__ == '__main__':
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
check_vars = [
'DBA_OP_PROMETHEUS_PUSH_GATEWAY_ADDR',
'DBA_OP_JOB_ID',
'DBA_OP_CONNECTION_STRING',
]
for env_var_name in check_vars:
if not env_var_name in os.environ:
logger.error('Must provide the environment variable %s', env_var_name)
sys.exit(1)
logger = logging.getLogger(os.environ['DBA_OP_JOB_ID'])
parser = argparse.ArgumentParser(
description='Run a fake migration container.',
)
parser.add_argument(
'--seconds',
default=30,
type=int,
help='Number of seconds for which to run',
)
parser.add_argument(
'--fail_after',
default=sys.maxsize,
type=int,
help='Number of seconds after which to fail (default: succeed)',
)
parser.add_argument(
'--write_version',
required=True,
type=str,
help='Database version to set after completion',
)
args = parser.parse_args()
# Parse the env to find labels that we need to add
labels = {_process_label_key(k): v for k, v in os.environ.items()
if k.startswith(PROM_LABEL_PREFIX)}
run(
os.environ['DBA_OP_CONNECTION_STRING'],
os.environ['DBA_OP_PROMETHEUS_PUSH_GATEWAY_ADDR'],
os.environ['DBA_OP_JOB_ID'],
labels,
args.write_version,
args.seconds,
args.fail_after,
)
| 2,712 | 0 | 92 |
3ec6be15ac763ee168cf5cd5303e3d0bef0039b5 | 3,589 | py | Python | minecraft_monitor/whitelist.py | iwcharlton/minecraft-monitor | fd4c10213bfab8c45a1c2884ef2f2fffd77b2e5b | [
"MIT"
] | null | null | null | minecraft_monitor/whitelist.py | iwcharlton/minecraft-monitor | fd4c10213bfab8c45a1c2884ef2f2fffd77b2e5b | [
"MIT"
] | null | null | null | minecraft_monitor/whitelist.py | iwcharlton/minecraft-monitor | fd4c10213bfab8c45a1c2884ef2f2fffd77b2e5b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import json
import os
'''
The whitelist object is for reading the standard minecraft
whitelist.json, and also maintaining a blacklist.json.
'''
| 31.761062 | 89 | 0.648091 | #!/usr/bin/env python
import json
import os
'''
The whitelist object is for reading the standard minecraft
whitelist.json, and also maintaining a blacklist.json.
'''
class Whitelist():
def __init__(self, location):
self.location = location
self.disk_whitelist = []
self.whitelist = None
self.disk_blacklist = []
self.blacklist = None
def parse_whitelist(self):
wl_path = os.path.join(self.location, 'whitelist.json')
if os.path.exists(wl_path):
print('parsing whitelist...')
with open(wl_path) as f:
self.disk_whitelist = json.load(f)
if self.whitelist is None:
self.whitelist = self.disk_whitelist
bl_path = os.path.join(self.location, 'blacklist.json')
if os.path.exists(bl_path):
print('parsing blacklist...')
with open(bl_path) as f:
self.disk_blacklist = json.load(f)
if self.blacklist is None:
self.blacklist = self.disk_blacklist
def restart_needed(self):
return self.blacklist != self.disk_blacklist or self.whitelist != self.disk_whitelist
def save_whitelist(self):
wl_path = os.path.join(self.location, 'whitelist.json')
with open(wl_path, 'w') as f:
print('saving whitelist...')
json.dump(self.whitelist, f)
bl_path = os.path.join(self.location, 'blacklist.json')
with open(bl_path, 'w') as f:
print('saving blacklist...')
json.dump(self.blacklist, f)
self.parse_whitelist()
def player_is_whitelisted(self, player):
for wl in self.whitelist:
if player == wl['name']:
return True
return False
def player_is_blacklisted(self, player):
for wl in self.blacklist:
if player == wl['name']:
return True
return False
def player_uuid(self, player):
for wl in self.whitelist:
if player == wl['name']:
return wl['uuid']
for wl in self.blacklist:
if player == wl['name']:
return wl['uuid']
return False
def add_player(self, player, uuid):
if player is not None and uuid is not None:
if self.player_is_whitelisted(player):
print('ERROR: player {player} is already whitelisted')
return False
if self.player_is_blacklisted(player):
print('ERROR: player {player} is already blacklisted')
return False
self.whitelist.append({ 'name': player, 'uuid': uuid })
print('whitelisted player {player} with uuid {uuid}')
self.save_whitelist()
return True
else:
print('ERROR: player and uuid not provided for add_player')
return False
def whitelist_player(self, player):
if player is not None:
new_whitelist = [wl for wl in self.blacklist if wl['name'] == player]
new_blacklist = [wl for wl in self.blacklist if wl['name'] != player]
if len(new_blacklist) < len(self.blacklist):
self.blacklist = new_blacklist
self.whitelist.extend(new_whitelist)
print(f'Whitelisted player {player}')
self.save_whitelist()
return True
else:
return False
else:
return False
def blacklist_player(self, player):
if player is not None:
new_whitelist = [wl for wl in self.whitelist if wl['name'] != player]
new_blacklist = [wl for wl in self.whitelist if wl['name'] == player]
if len(new_whitelist) < len(self.whitelist):
self.whitelist = new_whitelist
self.blacklist.extend(new_blacklist)
print(f'Blacklisted player {player}')
self.save_whitelist()
return True
else:
return False
else:
return False
| 3,146 | -3 | 279 |
717590a396e6947a38a879746af39da4f3629398 | 776 | py | Python | linda/builder_advanced/urls.py | cbotsikas/LindaWorkbench | b2bfa091fb4ec80ac35b3f68edf46780c1e9ffea | [
"MIT"
] | null | null | null | linda/builder_advanced/urls.py | cbotsikas/LindaWorkbench | b2bfa091fb4ec80ac35b3f68edf46780c1e9ffea | [
"MIT"
] | null | null | null | linda/builder_advanced/urls.py | cbotsikas/LindaWorkbench | b2bfa091fb4ec80ac35b3f68edf46780c1e9ffea | [
"MIT"
] | null | null | null | __author__ = 'dimitris'
"""
URLS for the Advanced Builder
"""
from django.conf.urls import patterns, include, url
from builder_advanced import views
urlpatterns = patterns('',
# Basic pages
url(r'^$', views.index, name='advanced-builder-index'),
# API calls
url(r'^api/active_classes/(?P<dt_name>[\w-]+)/$', views.active_classes),
url(r'^api/object_properties/(?P<dt_name>[\w-]+)/$', views.object_properties),
url(r'^api/active_class_properties/(?P<dt_name>[\w-]+)/$',
views.active_class_properties),
url(r'^api/get_property_type/(?P<dt_name>[\w-]+)/$', views.get_property_type),
)
| 33.73913 | 101 | 0.537371 | __author__ = 'dimitris'
"""
URLS for the Advanced Builder
"""
from django.conf.urls import patterns, include, url
from builder_advanced import views
urlpatterns = patterns('',
# Basic pages
url(r'^$', views.index, name='advanced-builder-index'),
# API calls
url(r'^api/active_classes/(?P<dt_name>[\w-]+)/$', views.active_classes),
url(r'^api/object_properties/(?P<dt_name>[\w-]+)/$', views.object_properties),
url(r'^api/active_class_properties/(?P<dt_name>[\w-]+)/$',
views.active_class_properties),
url(r'^api/get_property_type/(?P<dt_name>[\w-]+)/$', views.get_property_type),
)
| 0 | 0 | 0 |
26c004179ee0e44bc1b547fb0fe2556ca35299be | 1,332 | py | Python | server/coffee_book/serializers.py | carter-harris/CoffeeBook | 79e20ac6abbd3c1d1f156d4b8dffb47b1d548df7 | [
"MIT"
] | null | null | null | server/coffee_book/serializers.py | carter-harris/CoffeeBook | 79e20ac6abbd3c1d1f156d4b8dffb47b1d548df7 | [
"MIT"
] | null | null | null | server/coffee_book/serializers.py | carter-harris/CoffeeBook | 79e20ac6abbd3c1d1f156d4b8dffb47b1d548df7 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from django.contrib.auth.models import User
from coffee_book.models import User, Coffee, BrewMethod, Review, Region
| 39.176471 | 156 | 0.686937 | from rest_framework import serializers
from django.contrib.auth.models import User
from coffee_book.models import User, Coffee, BrewMethod, Review, Region
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username', 'user_type', 'first_name', 'last_name', 'shop_name', 'location')
class BrewMethodSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = BrewMethod
fields = ('id', 'url', 'name', 'coffees')
class RegionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Region
fields = ('id', 'url', 'name')
class CoffeeSerializer(serializers.HyperlinkedModelSerializer):
# do the owner read only for this one and the review
region = serializers.ReadOnlyField(source='region.name')
brew_method = serializers.ReadOnlyField(source='brew_method.name')
class Meta:
model = Coffee
fields = ('id', 'url', 'name', 'notes', 'farm', 'altitude', 'process', 'description', 'region', 'brew_method', 'notes', 'owner', 'image', 'reviews')
class ReviewSerializer(serializers.HyperlinkedModelSerializer):
coffee = CoffeeSerializer()
owner = UserSerializer()
class Meta:
model = Review
fields = ('id', 'url', 'review', 'owner', 'coffee')
| 0 | 1,062 | 115 |
293dd3266bb0518f0b51ded116194d1d499836e0 | 691 | py | Python | wintria/lib/swap_logo.py | codelucas/wintria.com | 99c3f20d64e6ecf3d02cf0117233de349274a607 | [
"MIT"
] | 2 | 2017-10-04T20:53:09.000Z | 2021-11-12T10:02:32.000Z | wintria/lib/swap_logo.py | codelucas/wintria.com | 99c3f20d64e6ecf3d02cf0117233de349274a607 | [
"MIT"
] | null | null | null | wintria/lib/swap_logo.py | codelucas/wintria.com | 99c3f20d64e6ecf3d02cf0117233de349274a607 | [
"MIT"
] | null | null | null | import sys
import cStringIO
import urllib2
from img_utils import prepare_image
from PIL import Image
from wintria.wintria.settings import PROJECT_ROOT
thumbnail_size = 100, 100
dest_thumb_url = PROJECT_ROOT + 'wintria/wintria/logo_static/logo_thumbs/'
dest_gen_url = PROJECT_ROOT + 'wintria/wintria/logo_static/logobank/'
if __name__ == '__main__':
new_url, domain = sys.argv[1], sys.argv[2]
upload_gen = dest_gen_url+domain+'.png'
upload_thumb = dest_thumb_url+domain+'.png'
_file = cStringIO.StringIO(urllib2.urlopen(new_url, timeout=4).read())
img = Image.open(_file)
img.save(dest_gen_url)
new_img = prepare_image(img)
new_img.save(upload_thumb)
| 24.678571 | 74 | 0.752533 | import sys
import cStringIO
import urllib2
from img_utils import prepare_image
from PIL import Image
from wintria.wintria.settings import PROJECT_ROOT
thumbnail_size = 100, 100
dest_thumb_url = PROJECT_ROOT + 'wintria/wintria/logo_static/logo_thumbs/'
dest_gen_url = PROJECT_ROOT + 'wintria/wintria/logo_static/logobank/'
if __name__ == '__main__':
new_url, domain = sys.argv[1], sys.argv[2]
upload_gen = dest_gen_url+domain+'.png'
upload_thumb = dest_thumb_url+domain+'.png'
_file = cStringIO.StringIO(urllib2.urlopen(new_url, timeout=4).read())
img = Image.open(_file)
img.save(dest_gen_url)
new_img = prepare_image(img)
new_img.save(upload_thumb)
| 0 | 0 | 0 |
0a6332c3d5b912a3397508aa8754164b4481303c | 47,973 | py | Python | modules/s3db/po.py | Mkgdukoo/aidiq | 840b97651d79352878d5a777067a915985617378 | [
"MIT"
] | 1 | 2018-06-06T12:11:25.000Z | 2018-06-06T12:11:25.000Z | modules/s3db/po.py | Mkgdukoo/aidiq | 840b97651d79352878d5a777067a915985617378 | [
"MIT"
] | null | null | null | modules/s3db/po.py | Mkgdukoo/aidiq | 840b97651d79352878d5a777067a915985617378 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
""" Sahana Eden Population Outreach Models
@copyright: 2015-2021 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("OutreachAreaModel",
"OutreachHouseholdModel",
"OutreachReferralModel",
"po_rheader",
"po_organisation_onaccept",
"po_due_followups",
)
from ..s3 import *
from s3layouts import S3PopupLink
from gluon import IS_NOT_EMPTY, IS_INT_IN_RANGE
# =============================================================================
class OutreachAreaModel(S3Model):
""" Model representing a mesh area for outreach work """
names = ("po_area",
"po_area_id",
)
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Safe defaults for names in case the module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False,
)
return {"po_area_id": lambda **attr: dummy("area_id"),
}
# -------------------------------------------------------------------------
@classmethod
def area_onaccept(cls, form):
""" Onaccept actions for po_area """
try:
record_id = form.vars.id
except AttributeError:
return
cls.area_update_affiliations(record_id)
# -------------------------------------------------------------------------
@classmethod
def area_ondelete(cls, row):
""" Ondelete actions for po_area """
try:
record_id = row.id
except AttributeError:
return
cls.area_update_affiliations(record_id)
# -------------------------------------------------------------------------
@staticmethod
def area_update_affiliations(record_id):
"""
Update affiliations for an area
@param record: the area record
"""
ROLE = "Areas"
db = current.db
s3db = current.s3db
table = s3db.po_area
row = db(table.id == record_id).select(table.pe_id,
table.deleted,
table.deleted_fk,
table.organisation_id,
limitby=(0, 1),
).first()
if not row:
return
area_pe_id = row.pe_id
if not area_pe_id:
return
# Get the organisation_id
if row.deleted:
try:
fk = json.loads(row.deleted_fk)
except ValueError:
organisation_id = None
else:
organisation_id = fk.get("organisation_id")
else:
organisation_id = row.organisation_id
# Get the PE ids
organisation_pe_id = s3db.pr_get_pe_id("org_organisation",
organisation_id,
)
# Remove obsolete affiliations
rtable = s3db.pr_role
atable = s3db.pr_affiliation
query = (atable.pe_id == row.pe_id) & \
(atable.deleted != True) & \
(atable.role_id == rtable.id) & \
(rtable.role == ROLE) & \
(rtable.pe_id != organisation_pe_id)
rows = db(query).select(rtable.pe_id)
for row in rows:
s3db.pr_remove_affiliation(row.pe_id, area_pe_id, role=ROLE)
# Add current affiliation
from .pr import OU
s3db.pr_add_affiliation(organisation_pe_id, area_pe_id, role=ROLE, role_type=OU)
# =============================================================================
# =============================================================================
class OutreachReferralModel(S3Model):
""" Model to track referrals of households to organisations """
names = ("po_referral_organisation",
"po_organisation_area",
"po_organisation_household",
)
# =============================================================================
# =============================================================================
# =============================================================================
def po_organisation_onaccept(form):
"""
1. Set the owned_by_group to PO_ADMIN so that they can see these
agencies in the household referrals dropdown
2. Create a po_referral_organisation record onaccept of
an org_organisation to link it to this module.
@param form: the form
"""
try:
organisation_id = form.vars["id"]
except AttributeError:
return
db = current.db
s3db = current.s3db
otable = s3db.org_organisation
record = db(otable.id == organisation_id).select(otable.id,
otable.owned_by_group,
limitby=(0, 1)
).first()
if record:
gtable = db.auth_group
role = db(gtable.uuid == "PO_AGENCIES").select(gtable.id,
limitby = (0, 1)
).first()
try:
PO_AGENCIES = role.id
except AttributeError:
# No PO_AGENCIES role prepopped
pass
else:
if record.owned_by_group != PO_AGENCIES:
record.update_record(owned_by_group = PO_AGENCIES)
rtable = s3db.po_referral_organisation
query = (rtable.organisation_id == organisation_id) & \
(rtable.deleted != True)
exists = db(query).select(rtable.id, limitby=(0, 1)).first()
if not exists:
rtable.insert(organisation_id=organisation_id)
# =============================================================================
def po_due_followups():
""" Number of due follow-ups """
query = (FS("followup_date") <= datetime.datetime.utcnow().date()) & \
(FS("completed") != True)
resource = current.s3db.resource("po_household_followup", filter=query)
return resource.count()
# END =========================================================================
| 42.007881 | 145 | 0.39147 | # -*- coding: utf-8 -*-
""" Sahana Eden Population Outreach Models
@copyright: 2015-2021 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("OutreachAreaModel",
"OutreachHouseholdModel",
"OutreachReferralModel",
"po_rheader",
"po_organisation_onaccept",
"po_due_followups",
)
from ..s3 import *
from s3layouts import S3PopupLink
from gluon import IS_NOT_EMPTY, IS_INT_IN_RANGE
# =============================================================================
class OutreachAreaModel(S3Model):
""" Model representing a mesh area for outreach work """
names = ("po_area",
"po_area_id",
)
def model(self):
T = current.T
auth = current.auth
super_link = self.super_link
#root_org = auth.root_org()
#ADMIN = current.session.s3.system_roles.ADMIN
#is_admin = auth.s3_has_role(ADMIN)
# ---------------------------------------------------------------------
# Area
#
tablename = "po_area"
self.define_table(tablename,
super_link("doc_id", "doc_entity"),
# This was included to allow Areas to be realm entities but this is currently not used
# Re-enable onaccept/ondelete & S3EntityRoleManager if this becomes required in future
#super_link("pe_id", "pr_pentity"),
Field("name",
requires = IS_NOT_EMPTY(),
),
self.gis_location_id(
widget = S3LocationSelector(points = False,
polygons = True,
feature_required = True,
),
),
# Included primarily to set realm
self.org_organisation_id(default = auth.user and auth.user.organisation_id,
#default = root_org,
#readable = is_admin,
#writable = is_admin,
),
Field("attempted_visits", "integer",
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Attempted Visits"),
T("Number of households in the area where nobody was at home at the time of visit"))),
default = 0,
label = T("Attempted Visits"),
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(minimum=0)),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Create Area"),
title_display = T("Area Details"),
title_list = T("Areas"),
title_update = T("Edit Area"),
label_list_button = T("List Areas"),
label_delete_button = T("Delete Area"),
msg_record_created = T("Area created"),
msg_record_modified = T("Area updated"),
msg_record_deleted = T("Area deleted"),
msg_list_empty = T("No Areas currently registered"),
)
# Reusable field
represent = S3Represent(lookup=tablename, show_link=True)
area_id = S3ReusableField("area_id", "reference %s" % tablename,
label = T("Area"),
represent = represent,
requires = IS_ONE_OF(current.db, "po_area.id",
represent,
),
sortby = "name",
comment = S3PopupLink(f = "area",
tooltip = T("Create a new area"),
),
)
# Components
self.add_components(tablename,
po_household = "area_id",
org_organisation = {"link": "po_organisation_area",
"joinby": "area_id",
"key": "organisation_id",
"actuate": "hide",
},
)
levels = current.gis.get_relevant_hierarchy_levels()
# Filters
filter_widgets = [S3TextFilter(["name"]),
S3LocationFilter("location_id", levels = levels),
]
# @todo: reports
# Table Configuration
self.configure(tablename,
deduplicate = S3Duplicate(ignore_deleted=True),
filter_widgets = filter_widgets,
#onaccept = self.area_onaccept,
#ondelete = self.area_ondelete,
realm_components = ("household",
),
summary = ({"common": True,
"name": "add",
"widgets": [{"method": "create"}],
},
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}]
},
{"name": "map",
"label": "Map",
"widgets": [{"method": "map",
"ajax_init": True}],
},
),
#super_entity = ("doc_entity", "pr_pentity"),
super_entity = "doc_entity",
update_realm = True,
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return {"po_area_id": area_id,
}
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Safe defaults for names in case the module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False,
)
return {"po_area_id": lambda **attr: dummy("area_id"),
}
# -------------------------------------------------------------------------
@classmethod
def area_onaccept(cls, form):
""" Onaccept actions for po_area """
try:
record_id = form.vars.id
except AttributeError:
return
cls.area_update_affiliations(record_id)
# -------------------------------------------------------------------------
@classmethod
def area_ondelete(cls, row):
""" Ondelete actions for po_area """
try:
record_id = row.id
except AttributeError:
return
cls.area_update_affiliations(record_id)
# -------------------------------------------------------------------------
@staticmethod
def area_update_affiliations(record_id):
"""
Update affiliations for an area
@param record: the area record
"""
ROLE = "Areas"
db = current.db
s3db = current.s3db
table = s3db.po_area
row = db(table.id == record_id).select(table.pe_id,
table.deleted,
table.deleted_fk,
table.organisation_id,
limitby=(0, 1),
).first()
if not row:
return
area_pe_id = row.pe_id
if not area_pe_id:
return
# Get the organisation_id
if row.deleted:
try:
fk = json.loads(row.deleted_fk)
except ValueError:
organisation_id = None
else:
organisation_id = fk.get("organisation_id")
else:
organisation_id = row.organisation_id
# Get the PE ids
organisation_pe_id = s3db.pr_get_pe_id("org_organisation",
organisation_id,
)
# Remove obsolete affiliations
rtable = s3db.pr_role
atable = s3db.pr_affiliation
query = (atable.pe_id == row.pe_id) & \
(atable.deleted != True) & \
(atable.role_id == rtable.id) & \
(rtable.role == ROLE) & \
(rtable.pe_id != organisation_pe_id)
rows = db(query).select(rtable.pe_id)
for row in rows:
s3db.pr_remove_affiliation(row.pe_id, area_pe_id, role=ROLE)
# Add current affiliation
from .pr import OU
s3db.pr_add_affiliation(organisation_pe_id, area_pe_id, role=ROLE, role_type=OU)
# =============================================================================
class OutreachHouseholdModel(S3Model):
names = ("po_household",
"po_household_id",
"po_household_dwelling",
"po_age_group",
"po_household_member",
"po_household_followup",
"po_household_social",
"po_emotional_need",
"po_household_emotional_need",
"po_practical_need",
"po_household_practical_need",
)
def model(self):
T = current.T
db = current.db
define_table = self.define_table
super_link = self.super_link
configure = self.configure
s3 = current.response.s3
crud_strings = s3.crud_strings
person_id = self.pr_person_id
# ---------------------------------------------------------------------
# Household
#
tablename = "po_household"
define_table(tablename,
super_link("doc_id", "doc_entity"),
super_link("pe_id", "pr_pentity"),
self.po_area_id(),
# Controller (area prep) makes it inherit Lx from area
self.gis_location_id(
label = T("Address"),
widget = S3LocationSelector(show_address=True,
# Defaults:
#show_map=settings.get_gis_map_selector(),
#show_postcode=settings.get_gis_postcode_selector(),
prevent_duplicate_addresses = True,
),
),
s3_date("date_visited",
default = "now",
empty = False,
label = T("Date visited"),
),
Field("followup", "boolean",
default = False,
label = T("Follow up"),
represent = s3_yes_no_represent,
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Household"),
title_display = T("Household Details"),
title_list = T("Households"),
title_update = T("Edit Household"),
label_list_button = T("List Households"),
label_delete_button = T("Delete Household"),
msg_record_created = T("Household created"),
msg_record_modified = T("Household updated"),
msg_record_deleted = T("Household deleted"),
msg_list_empty = T("No Households currently registered"),
)
# Reusable Field
represent = po_HouseholdRepresent()
household_id = S3ReusableField("household_id", "reference %s" % tablename,
label = T("Household"),
represent = represent,
requires = IS_ONE_OF(db, "po_household.id",
represent,
),
sortby = "name",
comment = S3PopupLink(f = "household",
tooltip = T("Create a new household"),
),
)
sticker_opts = {"W": T("White"),
"Y": T("Yellow"),
"R": T("Red"),
}
# Filter Widgets
filter_widgets = [S3TextFilter(("household_member.person_id$first_name",
"household_member.person_id$middle_name",
"household_member.person_id$last_name",
"location_id$addr_street",
),
label = T("Search"),
comment = T("Search by Address or Name of Household Member"),
),
S3OptionsFilter("area_id",
#hidden = True,
),
S3OptionsFilter("household_dwelling.sticker",
cols = 3,
options = sticker_opts,
),
S3OptionsFilter("emotional_need__link.emotional_need_id",
label = T("Emotional Needs"),
hidden = True,
),
S3OptionsFilter("practical_need__link.practical_need_id",
label = T("Practical Needs"),
hidden = True,
),
S3DateFilter("date_visited",
label = T("Date visited"),
hidden = True,
),
S3OptionsFilter("followup",
cols = 2,
hidden = True,
),
S3DateFilter("household_followup.followup_date",
label = T("Follow-up Date"),
hidden = True,
),
S3OptionsFilter("household_followup.completed",
cols = 2,
hidden = True,
),
S3OptionsFilter("organisation_household.organisation_id",
hidden = True,
),
]
# List fields
list_fields = ("area_id",
"location_id",
"date_visited",
"household_dwelling.sticker",
(T("Emotional Needs"), "emotional_need__link.emotional_need_id"),
(T("Practical Needs"), "practical_need__link.practical_need_id"),
"followup",
"household_followup.followup_date",
"household_followup.completed",
"organisation_household.organisation_id",
"comments",
)
# Reports
report_axes = ["area_id",
"followup",
"organisation_household.organisation_id",
"household_followup.completed",
"household_followup.evaluation",
]
reports = ((T("Number of Households Visited"), "count(id)"),
)
# Custom Form
crud_form = S3SQLCustomForm("area_id",
"location_id",
"date_visited",
"household_dwelling.sticker",
S3SQLInlineLink("emotional_need",
field = "emotional_need_id",
label = T("Emotional Needs"),
),
S3SQLInlineLink("practical_need",
field = "practical_need_id",
label = T("Practical Needs"),
),
"followup",
S3SQLInlineComponent("contact",
label = T("Contact Information"),
fields = ["priority",
(T("Type"), "contact_method"),
(T("Number"), "value"),
"comments",
],
orderby = "priority",
),
"household_social.language",
"household_social.community",
"household_dwelling.dwelling_type",
"household_dwelling.type_of_use",
"household_dwelling.repair_status",
"comments",
)
configure(tablename,
create_next = self.household_create_next,
crud_form = crud_form,
deduplicate = S3Duplicate(primary=("location_id",)),
filter_widgets = filter_widgets,
list_fields = list_fields,
onaccept = self.household_onaccept,
realm_components = ("pr_person",
"household_dwelling",
"household_social",
"household_followup",
"organisation_household",
),
report_options = {"rows": report_axes,
"cols": report_axes,
"fact": reports,
"defaults": {
"rows": "area_id",
"cols": "followup",
"fact": "count(id)",
}
},
super_entity = ("doc_entity", "pr_pentity"),
)
# Components
self.add_components(tablename,
pr_person = {"link": "po_household_member",
"joinby": "household_id",
"key": "person_id",
"actuate": "replace",
},
po_household_dwelling = {"joinby": "household_id",
"multiple": False,
},
po_household_social = {"joinby": "household_id",
"multiple": False,
},
po_household_followup = {"joinby": "household_id",
"multiple": False,
},
po_emotional_need = {"link": "po_household_emotional_need",
"joinby": "household_id",
"key": "emotional_need_id",
},
po_practical_need = {"link": "po_household_practical_need",
"joinby": "household_id",
"key": "practical_need_id",
},
po_organisation_household = "household_id",
)
# ---------------------------------------------------------------------
# Household Members
#
tablename = "po_household_member"
define_table(tablename,
household_id(),
person_id(),
s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Household Member Age Groups (under 18,18-30,30-55,56-75,75+)
#
age_groups = ("<18", "18-30", "30-55", "56-75", "75+")
tablename = "po_age_group"
define_table(tablename,
person_id(),
Field("age_group",
label = T("Age Group"),
requires = IS_EMPTY_OR(IS_IN_SET(age_groups)),
),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Dwelling
#
dwelling_type = {"U": T("Unit"),
"H": T("House"),
"A": T("Apartment"),
"S": T("Supervised House"),
"O": T("Other"),
}
type_of_use = {"S": T("Owner-occupied"),
"R": T("Renting"),
"B": T("Boarding"),
"O": T("Other"),
}
repair_status = {"W": T("waiting"),
"R": T("rebuild"),
"C": T("completed"),
"N": T("not required"),
"O": T("other"),
}
tablename = "po_household_dwelling"
define_table(tablename,
household_id(),
Field("dwelling_type",
label = T("Type of Dwelling"),
represent = S3Represent(options=dwelling_type),
requires = IS_EMPTY_OR(IS_IN_SET(dwelling_type)),
),
Field("type_of_use",
label = T("Type of Use"),
represent = S3Represent(options=type_of_use),
requires = IS_EMPTY_OR(IS_IN_SET(type_of_use)),
),
Field("repair_status",
label = T("Stage of Repair"),
represent = S3Represent(options=repair_status),
requires = IS_EMPTY_OR(IS_IN_SET(repair_status)),
),
Field("sticker",
label = T("Sticker"),
represent = S3Represent(options=sticker_opts),
requires = IS_EMPTY_OR(IS_IN_SET(sticker_opts)),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_update = T("Edit Dwelling Data"),
)
# Table configuration
configure(tablename,
deduplicate = S3Duplicate(primary=("household_id",)),
)
# ---------------------------------------------------------------------
# Emotional Needs
#
tablename = "po_emotional_need"
define_table(tablename,
Field("name",
label = T("Name"),
requires = IS_NOT_EMPTY(),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Emotional Need"),
title_display = T("Emotional Need Details"),
title_list = T("Emotional Needs"),
title_update = T("Edit Emotional Need"),
label_list_button = T("List Emotional Needs"),
label_delete_button = T("Delete Emotional Need"),
msg_record_created = T("Emotional Need created"),
msg_record_modified = T("Emotional Need updated"),
msg_record_deleted = T("Emotional Need deleted"),
msg_list_empty = T("No Emotional Needs currently registered"),
)
configure(tablename,
deduplicate = S3Duplicate(),
)
# Reusable Field
represent = S3Represent(lookup=tablename)
emotional_need_id = S3ReusableField("emotional_need_id", "reference %s" % tablename,
label = T("Emotional Need"),
ondelete = "CASCADE",
represent = represent,
requires = IS_ONE_OF(db, "po_emotional_need.id",
represent,
),
sortby = "name",
comment = S3PopupLink(f = "emotional_need",
tooltip = T("Create a new emotional need"),
),
)
tablename = "po_household_emotional_need"
define_table(tablename,
household_id(),
emotional_need_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Practical Needs
#
tablename = "po_practical_need"
define_table(tablename,
Field("name",
label = T("Name"),
requires = IS_NOT_EMPTY(),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Practical Need"),
title_display = T("Practical Need Details"),
title_list = T("Practical Needs"),
title_update = T("Edit Practical Need"),
label_list_button = T("List Practical Needs"),
label_delete_button = T("Delete Practical Need"),
msg_record_created = T("Practical Need created"),
msg_record_modified = T("Practical Need updated"),
msg_record_deleted = T("Practical Need deleted"),
msg_list_empty = T("No Practical Needs currently registered"),
)
configure(tablename,
deduplicate = S3Duplicate(),
)
# Reusable Field
represent = S3Represent(lookup=tablename)
practical_need_id = S3ReusableField("practical_need_id", "reference %s" % tablename,
label = T("Practical Need"),
ondelete = "CASCADE",
represent = represent,
requires = IS_ONE_OF(db, "po_practical_need.id",
represent,
),
sortby = "name",
comment = S3PopupLink(f = "practical_need",
tooltip = T("Create a new practical need"),
),
)
tablename = "po_household_practical_need"
define_table(tablename,
household_id(),
practical_need_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Social Information
#
tablename = "po_household_social"
define_table(tablename,
household_id(),
s3_language(label = T("Main Language"),
select = None,
),
Field("community", "text",
label = T("Community Connections"),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_update = T("Edit Social Information"),
)
# Table configuration
configure(tablename,
deduplicate = S3Duplicate(primary=("household_id",)),
)
# ---------------------------------------------------------------------
# Follow-up Details
#
evaluation = {"B": T("better"),
"S": T("same"),
"W": T("worse"),
}
twoweeks = current.request.utcnow + datetime.timedelta(days=14)
tablename = "po_household_followup"
define_table(tablename,
household_id(),
Field("followup_required",
label = T("Follow-up required"),
),
s3_date("followup_date",
label = T("Date for Follow-up"),
default = twoweeks,
past = 0,
),
Field("followup", "text",
label = T("Follow-up made"),
),
Field("completed", "boolean",
default = False,
label = "Follow-up completed",
represent = s3_yes_no_represent,
),
Field("evaluation",
label = T("Evaluation"),
represent = S3Represent(options=evaluation),
requires = IS_EMPTY_OR(IS_IN_SET(evaluation)),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_update = T("Edit Follow-up Details"),
)
configure(tablename,
deduplicate = S3Duplicate(primary=("household_id",)),
deletable = False,
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return {"po_household_id": household_id,
}
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Safe defaults for names in case the module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False)
return {"po_household_id": lambda **attr: dummy("household_id"),
}
# -------------------------------------------------------------------------
@staticmethod
def household_create_next(r):
""" Determine where to go next after creating a new household """
post_vars = r.post_vars
next_vars = S3Method._remove_filters(r.get_vars)
next_vars.pop("w", None)
follow_up = "followup" in post_vars and post_vars["followup"]
if r.function == "area":
if follow_up:
return URL(f="household",
args=["[id]", "person"],
vars=next_vars,
)
else:
return r.url(method="",
component="household",
vars=next_vars,
)
else:
if follow_up:
return r.url(id="[id]",
component="person",
method="",
vars=next_vars,
)
else:
return r.url(method="summary",
id="",
vars=next_vars,
)
# -------------------------------------------------------------------------
@staticmethod
def household_onaccept(form):
""" Onaccept-routine for households """
formvars = form.vars
try:
record_id = formvars.id
except AttributeError:
return
# Auto-create a followup component if household.followup is True
s3db = current.s3db
htable = s3db.po_household
ftable = s3db.po_household_followup
left = ftable.on((ftable.household_id == htable.id) & \
(ftable.deleted != True))
row = current.db(htable.id == record_id).select(htable.id,
htable.followup,
htable.realm_entity,
ftable.id,
left=left,
limitby=(0, 1)).first()
if row and row[htable.followup] and not row[ftable.id]:
ftable.insert(household_id=row[htable.id],
realm_entity=row[htable.realm_entity],
)
# =============================================================================
class OutreachReferralModel(S3Model):
""" Model to track referrals of households to organisations """
names = ("po_referral_organisation",
"po_organisation_area",
"po_organisation_household",
)
def model(self):
T = current.T
define_table = self.define_table
configure = self.configure
s3 = current.response.s3
crud_strings = s3.crud_strings
organisation_id = self.org_organisation_id
# Organisation Represent should link to po/organisation
org_link = URL(c="po", f="organisation", args="[id]")
org_represent = self.org_OrganisationRepresent(show_link=True,
linkto=org_link,
)
# Organisation AddResourceLink should go to po/organisation
ADD_ORGANISATION = T("Create Agency")
tooltip = T("If you don't see the Agency in the list, you can add a new one by clicking link 'Create Agency'.")
org_comment = S3PopupLink(c = "po",
f = "organisation",
label = ADD_ORGANISATION,
title = ADD_ORGANISATION,
tooltip = tooltip,
)
# ---------------------------------------------------------------------
# Referral Agency (context link table), currently not visible
#
tablename = "po_referral_organisation"
define_table(tablename,
organisation_id(represent = org_represent,
comment = org_comment,
),
#s3_comments(),
*s3_meta_fields())
configure(tablename,
deduplicate = S3Duplicate(primary = ("organisation_id",)),
)
# ---------------------------------------------------------------------
# Areas Served by a Referral Agency
#
tablename = "po_organisation_area"
define_table(tablename,
# @todo: AddResourceLink should go to po/organisation
organisation_id(label = T("Agency"),
represent = org_represent,
comment = org_comment,
),
self.po_area_id(),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Agency"),
title_update = T("Edit Referral Agency"),
label_list_button = T("List Agencies"),
label_delete_button = T("Remove Agency"),
)
# ---------------------------------------------------------------------
# Referral Household => Agency
#
tablename = "po_organisation_household"
define_table(tablename,
# @todo: AddResourceLink should go to po/organisation
organisation_id(label = T("Referral Agency"),
represent = org_represent,
comment = org_comment,
),
self.po_household_id(),
s3_date(default = "now",
label =T("Date Referral Made"),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Referral"),
title_update = T("Edit Referral Details"),
label_delete_button = T("Delete Referral"),
)
# Table Configuration
configure(tablename,
deduplicate = S3Duplicate(primary = ("household_id",
"organisation_id",
),
),
orderby = "%s.date desc" % tablename,
list_fields = ("date",
"organisation_id",
"household_id",
"comments",
),
)
# =============================================================================
class po_HouseholdRepresent(S3Represent):
def __init__(self, show_link=True):
"""
Constructor
@param show_link: whether to add a URL to representations
"""
super(po_HouseholdRepresent, self).__init__(
lookup = "po_household",
show_link = show_link)
self.location_represent = \
current.s3db.gis_LocationRepresent(address_only=True,
show_link=False,
)
# -------------------------------------------------------------------------
def lookup_rows(self, key, values, fields=None):
"""
Custom rows lookup
@param key: the key Field
@param values: the values
@param fields: unused (retained for API compatibility)
"""
table = self.table
count = len(values)
if count == 1:
query = (key == values[0])
else:
query = key.belongs(values)
rows = current.db(query).select(table.id,
table.location_id,
limitby = (0, count),
)
self.queries += 1
# Bulk-represent locations
location_id = str(table.location_id)
location_ids = [row[location_id] for row in rows]
if location_ids:
self.location_represent.bulk(location_ids, show_link=False)
return rows
# -------------------------------------------------------------------------
def represent_row(self, row):
"""
Represent a row
@param row: the Row
"""
# Represent household as its address
return self.location_represent(row.location_id)
# =============================================================================
def po_rheader(r, tabs=None):
if r.representation != "html":
# RHeaders only used in interactive views
return None
tablename = r.tablename
record = r.record
rheader = None
rheader_fields = []
if record:
T = current.T
if tablename == "po_area":
# @todo: hide "Referral Agencies" per deployment setting
if not tabs:
tabs = [(T("Basic Details"), ""),
(T("Households"), "household"),
(T("Referral Agencies"), "organisation"),
(T("Documents"), "document"),
]
rheader_fields = [["name"],
]
elif tablename == "po_household":
if not tabs:
tabs = [(T("Basic Details"), "")]
if record.followup:
tabs.extend([#(T("Contact Information"), "contact"),
#(T("Social Information"), "household_social"),
#(T("Dwelling"), "household_dwelling"),
(T("Members"), "person"),
(T("Follow-up Details"), "household_followup"),
(T("Referrals"), "organisation_household"),
])
rheader_fields = [["area_id"],
["location_id"],
]
elif tablename == "org_organisation":
# @todo: hide "Areas Served" per deployment setting
if not tabs:
tabs = [(T("Basic Details"), ""),
(T("Areas Served"), "area"),
(T("Referrals"), "organisation_household"),
]
rheader_fields = [["name"],
]
rheader = S3ResourceHeader(rheader_fields, tabs)(r)
return rheader
# =============================================================================
def po_organisation_onaccept(form):
"""
1. Set the owned_by_group to PO_ADMIN so that they can see these
agencies in the household referrals dropdown
2. Create a po_referral_organisation record onaccept of
an org_organisation to link it to this module.
@param form: the form
"""
try:
organisation_id = form.vars["id"]
except AttributeError:
return
db = current.db
s3db = current.s3db
otable = s3db.org_organisation
record = db(otable.id == organisation_id).select(otable.id,
otable.owned_by_group,
limitby=(0, 1)
).first()
if record:
gtable = db.auth_group
role = db(gtable.uuid == "PO_AGENCIES").select(gtable.id,
limitby = (0, 1)
).first()
try:
PO_AGENCIES = role.id
except AttributeError:
# No PO_AGENCIES role prepopped
pass
else:
if record.owned_by_group != PO_AGENCIES:
record.update_record(owned_by_group = PO_AGENCIES)
rtable = s3db.po_referral_organisation
query = (rtable.organisation_id == organisation_id) & \
(rtable.deleted != True)
exists = db(query).select(rtable.id, limitby=(0, 1)).first()
if not exists:
rtable.insert(organisation_id=organisation_id)
# =============================================================================
def po_due_followups():
""" Number of due follow-ups """
query = (FS("followup_date") <= datetime.datetime.utcnow().date()) & \
(FS("completed") != True)
resource = current.s3db.resource("po_household_followup", filter=query)
return resource.count()
# END =========================================================================
| 34,963 | 5,271 | 120 |
7a5e56c55559adf12d890e6b7072abf9a91e3aee | 768 | py | Python | backend/routers/cv/cv.py | izdwuut/ceevee | bf3803bea88fa90101f8055fcc8168f572338e2d | [
"MIT"
] | 1 | 2021-05-12T19:56:49.000Z | 2021-05-12T19:56:49.000Z | backend/routers/cv/cv.py | izdwuut/ceevee | bf3803bea88fa90101f8055fcc8168f572338e2d | [
"MIT"
] | 1 | 2020-10-31T19:59:55.000Z | 2020-10-31T20:05:57.000Z | backend/routers/cv/cv.py | resumik/resumik | bf3803bea88fa90101f8055fcc8168f572338e2d | [
"MIT"
] | 1 | 2021-03-10T14:22:12.000Z | 2021-03-10T14:22:12.000Z | from fastapi import APIRouter
from pydantic import UUID4
from models.cv.cv import CVModel, CV_Pydantic
from models.cv.details import DetailsModel
cv_router = APIRouter()
@cv_router.get("/{cv_id}")
@cv_router.get("/")
@cv_router.post("/{user_id}")
| 24 | 65 | 0.730469 | from fastapi import APIRouter
from pydantic import UUID4
from models.cv.cv import CVModel, CV_Pydantic
from models.cv.details import DetailsModel
cv_router = APIRouter()
@cv_router.get("/{cv_id}")
async def get_cv(cv_id: UUID4):
cv = await CVModel.get(id=cv_id)
return await CV_Pydantic.from_tortoise_orm(cv)
@cv_router.get("/")
async def get_cvs(user_id: UUID4):
cvs_orm = await CVModel.filter(user_id=user_id)
cvs = []
for cv in cvs_orm:
cvs.append(await CV_Pydantic.from_tortoise_orm(cv))
return cvs
@cv_router.post("/{user_id}")
async def add_cv(user_id: UUID4):
details = await DetailsModel.create()
cv = await CVModel().create(details=details, user_id=user_id)
return await CV_Pydantic.from_tortoise_orm(cv)
| 445 | 0 | 66 |
608579813c7b89e6b26401e5c791c78ea017e805 | 1,403 | py | Python | src/dataFetchers/voltageDailyFetcher.py | dheerajgupta0001/wrldc_mis_monthly_files_load | a3a6bc950c54a3444248e1eb257814951a999b1d | [
"MIT"
] | null | null | null | src/dataFetchers/voltageDailyFetcher.py | dheerajgupta0001/wrldc_mis_monthly_files_load | a3a6bc950c54a3444248e1eb257814951a999b1d | [
"MIT"
] | null | null | null | src/dataFetchers/voltageDailyFetcher.py | dheerajgupta0001/wrldc_mis_monthly_files_load | a3a6bc950c54a3444248e1eb257814951a999b1d | [
"MIT"
] | null | null | null | from typing import Dict
import pandas as pd
import datetime as dt
from src.typeDefs.freqVoltConfig import IFreqVoltConfig
from src.typeDefs.voltRecord import IVoltDataRecord
from typing import List
| 33.404762 | 127 | 0.622238 | from typing import Dict
import pandas as pd
import datetime as dt
from src.typeDefs.freqVoltConfig import IFreqVoltConfig
from src.typeDefs.voltRecord import IVoltDataRecord
from typing import List
def getDailyVoltData(freqVoltConfigs: List[IFreqVoltConfig], targetFilePath: str) -> List[IVoltDataRecord]:
voltRecords: List[IVoltDataRecord] = []
vol_400_sht = ''
vol_765_sht = ''
# find the sheet that has freq data
for c in freqVoltConfigs:
if c["data_type"] == 'volt_400':
vol_400_sht = c['sheet']
elif c["data_type"] == 'volt_765':
vol_765_sht = c['sheet']
voltSheetsInfo = [
{"lvl": 400, "sht": vol_400_sht},
{"lvl": 765, "sht": vol_765_sht}
]
for shtInfo in voltSheetsInfo:
shtName = shtInfo['sht']
volLvl = shtInfo['lvl']
if not shtName == '':
voltDf = pd.read_excel(
targetFilePath, sheet_name=shtName, header=[0, 1])
voltDf = pd.melt(voltDf, id_vars=[voltDf.columns[0]])
voltDf = voltDf.rename(columns={
voltDf.columns[0]: 'data_time', 'variable_0': 'entity_name', 'variable_1': 'metric_name', 'value': 'data_val'})
voltDf['volt_level'] = volLvl
voltRecords.extend(voltDf.to_dict('records'))
for v in voltRecords:
v['data_val'] = str(v['data_val'])
return voltRecords
| 1,181 | 0 | 23 |
4197f99e4c9e1a4eb00ae71ec0a2e57c570f6414 | 3,690 | py | Python | ezodf2/whitespaces.py | iwschris/ezodf2 | 061c4aa3f26e9157ad46155d8ce92db7187b0574 | [
"MIT"
] | 4 | 2015-03-15T22:32:35.000Z | 2019-12-23T12:13:13.000Z | ezodf2/whitespaces.py | iwschris/ezodf2 | 061c4aa3f26e9157ad46155d8ce92db7187b0574 | [
"MIT"
] | 3 | 2017-08-17T09:36:42.000Z | 2021-12-13T19:43:28.000Z | ezodf2/whitespaces.py | iwschris/ezodf2 | 061c4aa3f26e9157ad46155d8ce92db7187b0574 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#coding:utf-8
# Purpose: whitespace processing
# Created: 06.01.2011
# Copyright (C) 2011, Manfred Moitzi
# License: MIT
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <mozman@gmx.at>"
from .compatibility import tostr
from .xmlns import register_class, CN
from .base import GenericWrapper
@register_class
@register_class
@register_class
@register_class
WhitespaceEncoder = _WhitespaceEncoder()
| 25.102041 | 66 | 0.56206 | #!/usr/bin/env python
#coding:utf-8
# Purpose: whitespace processing
# Created: 06.01.2011
# Copyright (C) 2011, Manfred Moitzi
# License: MIT
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <mozman@gmx.at>"
from .compatibility import tostr
from .xmlns import register_class, CN
from .base import GenericWrapper
@register_class
class Tabulator(GenericWrapper):
TAG = CN('text:tab')
def __str__(self):
return self.plaintext()
@property
def textlen(self):
return 1
def plaintext(self):
return '\t'
@register_class
class LineBreak(Tabulator):
TAG = CN('text:line-break')
def plaintext(self):
return '\n'
@register_class
class Spaces(Tabulator):
TAG = CN('text:s')
def __init__(self, count=1, xmlnode=None):
super(Spaces, self).__init__(xmlnode)
if xmlnode is None:
self.count = count
@property
def count(self):
count = self.get_attr(CN('text:c'))
return int(count) if count is not None else 1
@count.setter
def count(self, value):
if int(value) > 1:
self.set_attr(CN('text:c'), tostr(value))
@property
def textlen(self):
return self.count
def plaintext(self):
return ' ' * self.count
@register_class
class SoftPageBreak(Tabulator):
TAG = CN('text:soft-page-break')
@property
def textlen(self):
return 0
def plaintext(self):
return ''
class _WhitespaceEncoder(object):
result = []
stack = []
space_counter = 0
def encode(self, plaintext):
self.result = []
self.stack = []
self.space_counter = 0
for char in plaintext:
if char == '\n':
self.add_brk()
elif char == '\t':
self.add_tab()
elif char == ' ':
self.add_spc()
else:
self.add_char(char)
if self.space_counter > 1:
self.append_space()
else:
self.append_stack()
return self.result
@staticmethod
def decode(taglist):
return "".join( (tostr(tag) for tag in taglist) )
def append_stack(self):
if not self.stack:
return
txt = ''.join(self.stack)
self.stack = []
self.result.append(txt)
def append_space(self):
spaces = self.space_counter - 1
# remove last spaces from stack
self.stack = self.stack[: -spaces]
self.append_stack()
self.result.append(Spaces(spaces))
self.space_counter = 0
def add_brk(self):
if self.space_counter > 1:
self.append_space()
else:
self.append_stack()
self.space_counter = 0
self.result.append(LineBreak())
def add_tab(self):
if self.space_counter > 1:
self.append_space()
else:
self.append_stack()
self.space_counter = 0
self.result.append(Tabulator())
def add_spc(self):
self.add_char(' ')
self.space_counter += 1
def add_char(self, char):
if char != ' ':
if self.space_counter > 1:
self.append_space()
else:
self.space_counter = 0
self.stack.append(char)
WhitespaceEncoder = _WhitespaceEncoder()
def encode_whitespaces(plaintext):
return WhitespaceEncoder.encode(plaintext)
def decode_whitespaces(taglist):
return WhitespaceEncoder.decode(taglist)
| 2,177 | 867 | 162 |
7ec208231c80a6789a34bfeabdc98108c3394d56 | 963 | py | Python | outlier.py | td236/Fisher-s-linear-discriminant | 21a4c2a1f270f9ad579626caf6270b8ce0cd9d79 | [
"MIT"
] | null | null | null | outlier.py | td236/Fisher-s-linear-discriminant | 21a4c2a1f270f9ad579626caf6270b8ce0cd9d79 | [
"MIT"
] | null | null | null | outlier.py | td236/Fisher-s-linear-discriminant | 21a4c2a1f270f9ad579626caf6270b8ce0cd9d79 | [
"MIT"
] | null | null | null | import pandas as pd
df = pd.read_csv('data_banknote_authentication.txt')
outliers(1.5, df)
outliers(2.5, df)
print()
authentic = df[df['class'] == 0] # class 0
fake = df[df['class'] == 1] # class 1
outliers(1.5, authentic)
outliers(2.0, authentic)
outliers(2.5, authentic)
print()
outliers(1.5, fake)
outliers(2.0, fake)
outliers(2.5, fake)
| 24.692308 | 56 | 0.575286 | import pandas as pd
df = pd.read_csv('data_banknote_authentication.txt')
def outliers(factor, df):
print("factor: ", factor)
for column in df:
if column == "class":
break
columnSeriesObj = df[column]
Q1 = columnSeriesObj.quantile(0.25)
Q3 = columnSeriesObj.quantile(0.75)
IQR = Q3 - Q1
print('Colunm', column, "- ", end=" ")
lower = Q1 - factor*IQR # Standard value is 1.5
upper = Q3 + factor*IQR
outliers = ((columnSeriesObj < lower) |
(columnSeriesObj > upper)).sum()
if column == "entropy":
print(" ", end="")
print("# outliers: ", outliers)
outliers(1.5, df)
outliers(2.5, df)
print()
authentic = df[df['class'] == 0] # class 0
fake = df[df['class'] == 1] # class 1
outliers(1.5, authentic)
outliers(2.0, authentic)
outliers(2.5, authentic)
print()
outliers(1.5, fake)
outliers(2.0, fake)
outliers(2.5, fake)
| 591 | 0 | 23 |
8c9a395b6211fec8cef609fff0203df6b43bd9ff | 1,291 | py | Python | uai-censor-sdk/api/create_async_video_job.py | ucloud/uai-saas-sdk | 96935088daf924c247d6b86eab725e29a9d169f3 | [
"Apache-2.0"
] | 4 | 2020-02-25T18:25:51.000Z | 2021-03-16T08:42:30.000Z | uai-censor-sdk/api/create_async_video_job.py | ucloud/uai-saas-sdk | 96935088daf924c247d6b86eab725e29a9d169f3 | [
"Apache-2.0"
] | null | null | null | uai-censor-sdk/api/create_async_video_job.py | ucloud/uai-saas-sdk | 96935088daf924c247d6b86eab725e29a9d169f3 | [
"Apache-2.0"
] | 2 | 2019-04-22T09:36:46.000Z | 2019-06-25T06:17:53.000Z | from api.base_datastream_api import UAICensorBaseDatastreamApi
from api.utils import gen_async_video_censor_url
| 41.645161 | 106 | 0.629744 | from api.base_datastream_api import UAICensorBaseDatastreamApi
from api.utils import gen_async_video_censor_url
class UAICensorCreateAsyncVideoJobApi(UAICensorBaseDatastreamApi):
REQUEST_URL = gen_async_video_censor_url()
def __init__(self, signature, public_key, resource_id, timestamp, scenes, url, interval, callback=''):
super(UAICensorCreateAsyncVideoJobApi, self).__init__(self.REQUEST_URL,
signature, public_key,
resource_id, timestamp)
self.cmd_params['Scenes'] = scenes
self.cmd_params['Url'] = url
self.cmd_params['Interval'] = interval
self.cmd_params['Callback'] = callback
def _check_args(self, header, params):
if not super(UAICensorCreateAsyncVideoJobApi, self)._check_args(header, params):
return False
if len(params['Scenes']) == 0:
return False
if params['Url'] == "":
return False
if params['Interval'] == 0:
return False
return True
def call_api(self):
super(UAICensorCreateAsyncVideoJobApi, self).call_api()
return super(UAICensorCreateAsyncVideoJobApi, self)._send_post_request()
| 982 | 174 | 23 |
67060ad8973e2b1b3ea5ebbdaf76a6e358959b84 | 2,481 | py | Python | demo-004.py | XuekuanWang/sparse-001 | b4d597d084dc2c876afe2c8ab63f7d25d1900fca | [
"Apache-2.0"
] | null | null | null | demo-004.py | XuekuanWang/sparse-001 | b4d597d084dc2c876afe2c8ab63f7d25d1900fca | [
"Apache-2.0"
] | null | null | null | demo-004.py | XuekuanWang/sparse-001 | b4d597d084dc2c876afe2c8ab63f7d25d1900fca | [
"Apache-2.0"
] | null | null | null | import math
import numpy.linalg
import numpy as np
def shrink(X, tau):
"""
Apply the shrinkage operator the the elements of X.
Returns V such that V[i,j] = max(abs(X[i,j]) - tau,0).
"""
V = np.copy(X).reshape(X.size)
for i in range(V.size):
V[i] = math.copysign(max(abs(V[i]) - tau, 0), V[i])
if V[i] == -0:
V[i] = 0
return V.reshape(X.shape)
def frobeniusNorm(X):
"""
Evaluate the Frobenius norm of X
Returns sqrt(sum_i sum_j X[i,j] ^ 2)
"""
accum = 0
V = np.reshape(X,X.size)
for i in range(V.size):
accum += abs(V[i] ** 2)
return math.sqrt(accum)
def L1Norm(X):
"""
Evaluate the L1 norm of X
Returns the max over the sum of each column of X
"""
return max(np.sum(X,axis=0))
def converged(Y,W,X,E):
"""
A simple test of convergence based on accuracy of matrix reconstruction
from sparse and low rank parts
"""
error = frobeniusNorm(Y - np.dot(W,X) - E) / frobeniusNorm(Y)
print("error =", error)
return error <= 5*10e-5
def run(X_list,Y_list):
"""
"""
Y = Y_list[0]
X = X_list[0]
L = np.zeros(Y.shape)
W = np.zeros([3,3])
E = np.zeros(X.shape)
mu = (Y.shape[0] * Y.shape[1]) / (4.0 * L1Norm(Y))
lamb = max(Y.shape) ** -0.5
print(mu)
i = 1
while not converged(Y,W,X,E):
Y = Y_list[i]
X = X_list[i]
tmp = Y - E + L*(mu**-1)
# print(tmp)
W = np.dot(np.dot(tmp,np.transpose(X)),np.linalg.inv(np.dot(X,np.transpose(X))))
W = -W
#print(np.dot(Y,np.linalg.det(X)))
#print(W)
E = shrink(Y-np.dot(W,X) + (mu**-1) * L, (mu)*lamb)
#mu = max(mu * 0.98,mu*0.1)
#print(mu)
L = L - 1 * (Y - np.dot(W,X) - E)
#print(mu)
i = (i+1) % X_list.__len__()
return W,E
if __name__ == '__main__':
x_list = []
y_list = []
W = np.random.randint(0,10,size=[3,3]) / 255.0
for i in range(10):
x = np.random.randint(0, 255, size=[3, 1000])/255.0
x_list.append(x)
E = GaussieNoisy(x,0.1)
y = np.dot(W,x) + E
y_list.append(y)
W_res, E_res = run(x_list,y_list)
print(W)
print(W_res)
print(E_res)
| 20.848739 | 88 | 0.526401 | import math
import numpy.linalg
import numpy as np
def shrink(X, tau):
"""
Apply the shrinkage operator the the elements of X.
Returns V such that V[i,j] = max(abs(X[i,j]) - tau,0).
"""
V = np.copy(X).reshape(X.size)
for i in range(V.size):
V[i] = math.copysign(max(abs(V[i]) - tau, 0), V[i])
if V[i] == -0:
V[i] = 0
return V.reshape(X.shape)
def frobeniusNorm(X):
"""
Evaluate the Frobenius norm of X
Returns sqrt(sum_i sum_j X[i,j] ^ 2)
"""
accum = 0
V = np.reshape(X,X.size)
for i in range(V.size):
accum += abs(V[i] ** 2)
return math.sqrt(accum)
def L1Norm(X):
"""
Evaluate the L1 norm of X
Returns the max over the sum of each column of X
"""
return max(np.sum(X,axis=0))
def converged(Y,W,X,E):
"""
A simple test of convergence based on accuracy of matrix reconstruction
from sparse and low rank parts
"""
error = frobeniusNorm(Y - np.dot(W,X) - E) / frobeniusNorm(Y)
print("error =", error)
return error <= 5*10e-5
def run(X_list,Y_list):
"""
"""
Y = Y_list[0]
X = X_list[0]
L = np.zeros(Y.shape)
W = np.zeros([3,3])
E = np.zeros(X.shape)
mu = (Y.shape[0] * Y.shape[1]) / (4.0 * L1Norm(Y))
lamb = max(Y.shape) ** -0.5
print(mu)
i = 1
while not converged(Y,W,X,E):
Y = Y_list[i]
X = X_list[i]
tmp = Y - E + L*(mu**-1)
# print(tmp)
W = np.dot(np.dot(tmp,np.transpose(X)),np.linalg.inv(np.dot(X,np.transpose(X))))
W = -W
#print(np.dot(Y,np.linalg.det(X)))
#print(W)
E = shrink(Y-np.dot(W,X) + (mu**-1) * L, (mu)*lamb)
#mu = max(mu * 0.98,mu*0.1)
#print(mu)
L = L - 1 * (Y - np.dot(W,X) - E)
#print(mu)
i = (i+1) % X_list.__len__()
return W,E
def GaussieNoisy(image, sigma):
row, col = image.shape
mean = 0
gauss = np.random.normal(mean, sigma, (row, col))
gauss = gauss.reshape(row, col)
noisy = image + gauss
return noisy
if __name__ == '__main__':
x_list = []
y_list = []
W = np.random.randint(0,10,size=[3,3]) / 255.0
for i in range(10):
x = np.random.randint(0, 255, size=[3, 1000])/255.0
x_list.append(x)
E = GaussieNoisy(x,0.1)
y = np.dot(W,x) + E
y_list.append(y)
W_res, E_res = run(x_list,y_list)
print(W)
print(W_res)
print(E_res)
| 184 | 0 | 23 |
afd2de431b1d5fc7842a17d05064220c3e9fbaad | 24 | py | Python | starkit/fitkit/optimizers/__init__.py | dchu808/starkit | 1940683ef231cee54be2c703d4a7611a3991d8b7 | [
"BSD-3-Clause"
] | 12 | 2018-05-15T14:59:27.000Z | 2022-01-11T16:44:43.000Z | starkit/fitkit/optimizers/__init__.py | dchu808/starkit | 1940683ef231cee54be2c703d4a7611a3991d8b7 | [
"BSD-3-Clause"
] | 27 | 2018-03-13T10:45:38.000Z | 2020-08-03T20:47:31.000Z | starkit/fitkit/optimizers/__init__.py | dchu808/starkit | 1940683ef231cee54be2c703d4a7611a3991d8b7 | [
"BSD-3-Clause"
] | 17 | 2018-03-13T10:06:53.000Z | 2019-06-27T02:02:10.000Z | __author__ = 'wkerzend'
| 12 | 23 | 0.75 | __author__ = 'wkerzend'
| 0 | 0 | 0 |
cd584731c47ab0375b79f468f280e5a2e5064fbd | 20,131 | py | Python | tests/test_cli_cache_connector.py | NERSC/pytokio | 22244718cf82567c50620cbe0e635dfc990de36b | [
"BSD-3-Clause-LBNL"
] | 22 | 2017-11-14T01:30:48.000Z | 2022-01-01T21:51:00.000Z | tests/test_cli_cache_connector.py | glennklockwood/pytokio | 22244718cf82567c50620cbe0e635dfc990de36b | [
"BSD-3-Clause-LBNL"
] | 39 | 2017-12-20T01:42:19.000Z | 2020-05-28T21:17:26.000Z | tests/test_cli_cache_connector.py | glennklockwood/pytokio | 22244718cf82567c50620cbe0e635dfc990de36b | [
"BSD-3-Clause-LBNL"
] | 5 | 2018-02-06T19:39:19.000Z | 2019-07-10T01:20:26.000Z | #!/usr/bin/env python
"""
Test each connector's standalone CLI cache tool
"""
import os
import json
import sqlite3
try:
import StringIO as io
except ImportError:
import io
import datetime
import pandas
import nose
try:
import elasticsearch.exceptions
_HAVE_ELASTICSEARCH = True
except ImportError:
_HAVE_ELASTICSEARCH = False
try:
from requests.exceptions import Timeout, ConnectionError, HTTPError
_HAVE_REQUESTS = True
except ImportError:
_HAVE_REQUESTS = False
import tokiotest
import tokio.cli.cache_collectdes
import tokio.cli.cache_darshan
import tokio.cli.cache_esnet_snmp
import tokio.cli.cache_isdct
import tokio.cli.cache_lfsstatus
import tokio.cli.cache_lmtdb
import tokio.cli.cache_mmperfmon
import tokio.cli.cache_nersc_globuslogs
import tokio.cli.cache_nersc_jobsdb
import tokio.cli.cache_slurm
import tokio.cli.cache_topology
@nose.tools.with_setup(tokiotest.create_tempfile, tokiotest.delete_tempfile)
def verify_sqlite(output_str):
"""
Ensure that the database contains at least one table, and that table
contains at least one row.
"""
### Try to find the caching file name from the application's stdout
output_file = None
for line in output_str.splitlines():
if line.startswith('Caching to'):
output_file = line.strip().split(None, 3)[-1]
break
if output_file is None:
print("Could not find cache file name in output:")
print(output_str)
assert output_file is not None
print("Using output_file [%s]" % output_file)
assert os.path.isfile(output_file)
tmpdb = sqlite3.connect(output_file)
cursor = tmpdb.cursor()
## Count number of tables
cursor.execute("SELECT name FROM sqlite_master WHERE type = 'table'")
tables = cursor.fetchall()
print("Found %d tables in %s" % (len(tables), output_file))
assert len(tables) > 0
for table in [x[0] for x in tables]:
cursor.execute('SELECT count(*) FROM %s' % table)
rows = cursor.fetchall()
num_rows = rows[0][0]
print("Found %d rows in %s" % (num_rows, table))
assert len(rows) > 0
def verify_json_zero_ok(json_str):
"""Ensure that json is loadable
Args:
json_str (str): string containing json text
"""
data = json.loads(json_str)
assert data is not None
def verify_json(json_str):
"""Ensure that json is loadable and contains something
Args:
json_str (str): string containing json text
"""
data = json.loads(json_str)
assert len(data) > 0
def verify_csv(csv_str):
"""
Ensure that csv is loadable by Pandas
"""
data = pandas.read_csv(io.StringIO(csv_str))
assert len(data) > 0
def verify_sacct(csv_str):
"""
Ensure that native format is vaguely valid (treat it as a |-separated csv)
"""
data = pandas.read_csv(io.StringIO(csv_str), sep="|")
assert len(data) > 0
def run_connector(binary, argv):
"""Default cache_connector run function
Args:
binary (module): tokio.cli module that contains a main() function
argv (list of str): list of CLI arguments to pass to connector
Returns:
Stdout of cache connector script as a string
"""
return tokiotest.run_bin(binary, argv)
def run_elasticsearch(binary, argv):
"""Run function that traps connection errors from ElasticSearch
Args:
binary (module): tokio.cli module that contains a main() function
argv (list of str): list of CLI arguments to pass to connector
Returns:
Stdout of cache connector script as a string
"""
if not _HAVE_ELASTICSEARCH:
raise nose.SkipTest("elasticsearch module not available")
try:
return tokiotest.run_bin(binary, argv)
except elasticsearch.exceptions.ConnectionError as error:
raise nose.SkipTest(error)
def run_requests(binary, argv):
"""Run function that traps connection errors from REST
Args:
binary (module): tokio.cli module that contains a main() function
argv (list of str): list of CLI arguments to pass to connector
Returns:
Stdout of cache connector script as a string
"""
if not _HAVE_REQUESTS:
raise nose.SkipTest("requests module not available")
try:
return tokiotest.run_bin(binary, argv)
except (ConnectionError, Timeout, HTTPError) as error:
raise nose.SkipTest(error)
@nose.tools.raises(ValueError)
@nose.tools.raises(SystemExit)
CACHE_CONNECTOR_CONFIGS = [
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon, gzipped text input',
'binary': tokio.cli.cache_mmperfmon,
'args': [tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_json,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon --csv --metric, gzipped text input',
'binary': tokio.cli.cache_mmperfmon,
'args': ['--csv', '--metric', tokiotest.SAMPLE_MMPERFMON_METRICS[0], tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon --csv --host, gzipped text input',
'binary': tokio.cli.cache_mmperfmon,
'args': ['--csv', '--host', tokiotest.SAMPLE_MMPERFMON_HOSTS[0], tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon --csv without --host/--metric',
'binary': tokio.cli.cache_mmperfmon,
'runfunction': run_raises_systemexit,
'args': ['--csv', tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon, tarfile input',
'binary': tokio.cli.cache_mmperfmon,
'args': [tokiotest.SAMPLE_MMPERFMON_TGZ_INPUT],
'validators': [verify_json,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon, multiple inputs',
'binary': tokio.cli.cache_mmperfmon,
'args': [tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT, tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_json,],
},
{
'name': 'cli.cache_nersc_globuslogs',
'description': 'cli.cache_nersc_globuslogs, cached input',
'binary': tokio.cli.cache_nersc_globuslogs,
'args': ['--input', tokiotest.SAMPLE_GLOBUSLOGS,
tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'validators': [verify_json,],
},
{
'name': 'cli.cache_nersc_globuslogs',
'description': 'cli.cache_nersc_globuslogs, remote connection',
'binary': tokio.cli.cache_nersc_globuslogs,
'args': ['--input', tokiotest.SAMPLE_GLOBUSLOGS,
tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'runfunction': run_elasticsearch,
'validators': [verify_json_zero_ok,],
},
{
'name': 'cli.cache_nersc_globuslogs',
'description': 'cli.cache_nersc_globuslogs --csv',
'binary': tokio.cli.cache_nersc_globuslogs,
'args': ['--input', tokiotest.SAMPLE_GLOBUSLOGS,
'--csv',
tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_isdct',
'binary': tokio.cli.cache_isdct,
'args': ['--json', tokiotest.SAMPLE_NERSCISDCT_FILE],
'validators': [verify_json,],
},
{
'name': 'cli.cache_isdct',
'binary': tokio.cli.cache_isdct,
'args': ['--csv', tokiotest.SAMPLE_NERSCISDCT_FILE],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_collectdes',
'description': 'cli.cache_collectdes, cached input',
'binary': tokio.cli.cache_collectdes,
'args': ['--input', tokiotest.SAMPLE_COLLECTDES_FILE,
tokiotest.SAMPLE_COLLECTDES_START,
tokiotest.SAMPLE_COLLECTDES_END],
'validators': [verify_json,],
},
{
'name': 'cli.cache_collectdes',
'description': 'cli.cache_collectdes, remote connection',
'binary': tokio.cli.cache_collectdes,
'args': [tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'runfunction': run_elasticsearch,
'validators': [verify_json_zero_ok,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--perf', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', '--perf', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', '--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--perf', '--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', '--perf', '--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp default args',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--timeout", "5", "nersc"],
'runfunction': run_requests,
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --json, remote connection',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--timeout", "5", "--json", "nersc"],
'runfunction': run_requests,
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --csv, remote connection',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--timeout", "5", "--csv", "nersc"],
'runfunction': run_requests,
'validators': [verify_csv,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --json, cached input',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--json", "nersc"],
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --csv, cached input',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--csv", "nersc"],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, explicit endpoint:interface',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "blah0:interf0"],
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, invalid endpoint:interface',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "blah"],
'runfunction': run_raises_valueerror,
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, invalid --start format',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--start", "invalid", "nersc"],
'runfunction': run_raises_valueerror,
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, --end without --start',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--end", "2019-01-01T00:00:00", "nersc"],
'runfunction': run_raises_systemexit,
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, --start > --end',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--start", "2019-01-02T00:00:00", "--end", "2019-01-01T00:00:00", "nersc"],
'runfunction': run_raises_valueerror,
},
{
'name': 'cli.cache_slurm',
'binary': tokio.cli.cache_slurm,
'args': ['--json', tokiotest.SAMPLE_SLURM_CACHE_FILE],
'validators': [verify_json,],
},
{
'name': 'cli.cache_slurm',
'binary': tokio.cli.cache_slurm,
'args': ['--csv', tokiotest.SAMPLE_SLURM_CACHE_FILE],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_slurm',
'binary': tokio.cli.cache_slurm,
'args': ['--native', tokiotest.SAMPLE_SLURM_CACHE_FILE],
'validators': [verify_sacct,],
},
{
'name': 'cli.cache_topology',
'description': 'cli.cache_topology',
'binary': tokio.cli.cache_topology,
'args': [
'--nodemap-cache', tokiotest.SAMPLE_XTDB2PROC_FILE,
'--jobinfo-cache', tokiotest.SAMPLE_SLURM_CACHE_FILE,
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --fullness, no cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--fullness',
'--',
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --fullness, explicit cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--fullness',
tokiotest.SAMPLE_OSTFULLNESS_FILE,
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --failure, no cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--failure',
'--',
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --failure, explicit cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--failure',
tokiotest.SAMPLE_OSTMAP_FILE,
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_nersc_jobsdb',
'binary': tokio.cli.cache_nersc_jobsdb,
'args': [
'-i', tokiotest.SAMPLE_NERSCJOBSDB_FILE,
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_NERSCJOBSDB_START).strftime("%Y-%m-%dT%H:%M:%S"),
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_NERSCJOBSDB_END).strftime("%Y-%m-%dT%H:%M:%S"),
'edison',
],
'validators': [verify_sqlite,],
'to_file': [True],
'validate_contents': False,
},
{
'description': 'cli.cache_lmtdb',
'binary': tokio.cli.cache_lmtdb,
'args': [
'-i', tokiotest.SAMPLE_LMTDB_FILE,
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_START).strftime("%Y-%m-%dT%H:%M:%S"),
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_END).strftime("%Y-%m-%dT%H:%M:%S"),
],
'validators': [verify_sqlite,],
'to_file': [True],
'validate_contents': False,
},
{
'description': 'cli.cache_lmtdb --limit',
'binary': tokio.cli.cache_lmtdb,
'args': [
'--limit', '2',
'-i', tokiotest.SAMPLE_LMTDB_FILE,
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_START).strftime("%Y-%m-%dT%H:%M:%S"),
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_END).strftime("%Y-%m-%dT%H:%M:%S"),
],
'validators': [verify_sqlite,],
'to_file': [True],
'validate_contents': False,
},
]
@nose.tools.with_setup(tokiotest.create_tempfile, tokiotest.delete_tempfile)
def run_cache_connector(config, to_file=False):
"""
Test a connector cache (cache_*.py) CLI interface
"""
if config['binary'] == tokio.cli.cache_darshan:
tokiotest.check_darshan()
runfunction = config.get('runfunction', run_connector)
if to_file:
argv = ['-o', tokiotest.TEMP_FILE.name] + config['args']
print("Caching to %s" % tokiotest.TEMP_FILE.name)
print("Executing: %s" % ' '.join(argv))
output_str = runfunction(config['binary'], argv)
# (validate_contents == True) means the associated validator function
# expects the contents of the output file rather than the name of the
# output file
if config.get('validate_contents', True):
output_str = tokiotest.TEMP_FILE.read()
else:
argv = config['args']
print("Caching to stdout")
print("Executing: %s" % ' '.join(argv))
output_str = runfunction(config['binary'], argv)
for validator in config.get('validators', []):
if isinstance(output_str, bytes):
validator(output_str.decode())
else:
validator(output_str)
def craft_description(config, suffix):
"""
Take a cache_*.py command invocation and craft a clever test description
"""
if 'description' in config:
result = "%s %s" % (config['description'], suffix)
elif 'name' in config:
result = "%s %s %s" % (
config['name'],
' '.join(config['args'][0:-1]),
suffix)
else:
result = "%s %s %s" % (
config['binary'],
' '.join(config['args'][0:-1]),
suffix)
return result
@tokiotest.needs_darshan
def test():
"""
Test all connector cache scripts
"""
for config in CACHE_CONNECTOR_CONFIGS:
func = run_cache_connector
for to_file in config.get('to_file', [True, False]):
if to_file:
func.description = craft_description(config, '(to file)')
else:
func.description = craft_description(config, '(to stdout)')
yield func, config, to_file
| 35.693262 | 144 | 0.584422 | #!/usr/bin/env python
"""
Test each connector's standalone CLI cache tool
"""
import os
import json
import sqlite3
try:
import StringIO as io
except ImportError:
import io
import datetime
import pandas
import nose
try:
import elasticsearch.exceptions
_HAVE_ELASTICSEARCH = True
except ImportError:
_HAVE_ELASTICSEARCH = False
try:
from requests.exceptions import Timeout, ConnectionError, HTTPError
_HAVE_REQUESTS = True
except ImportError:
_HAVE_REQUESTS = False
import tokiotest
import tokio.cli.cache_collectdes
import tokio.cli.cache_darshan
import tokio.cli.cache_esnet_snmp
import tokio.cli.cache_isdct
import tokio.cli.cache_lfsstatus
import tokio.cli.cache_lmtdb
import tokio.cli.cache_mmperfmon
import tokio.cli.cache_nersc_globuslogs
import tokio.cli.cache_nersc_jobsdb
import tokio.cli.cache_slurm
import tokio.cli.cache_topology
@nose.tools.with_setup(tokiotest.create_tempfile, tokiotest.delete_tempfile)
def verify_sqlite(output_str):
"""
Ensure that the database contains at least one table, and that table
contains at least one row.
"""
### Try to find the caching file name from the application's stdout
output_file = None
for line in output_str.splitlines():
if line.startswith('Caching to'):
output_file = line.strip().split(None, 3)[-1]
break
if output_file is None:
print("Could not find cache file name in output:")
print(output_str)
assert output_file is not None
print("Using output_file [%s]" % output_file)
assert os.path.isfile(output_file)
tmpdb = sqlite3.connect(output_file)
cursor = tmpdb.cursor()
## Count number of tables
cursor.execute("SELECT name FROM sqlite_master WHERE type = 'table'")
tables = cursor.fetchall()
print("Found %d tables in %s" % (len(tables), output_file))
assert len(tables) > 0
for table in [x[0] for x in tables]:
cursor.execute('SELECT count(*) FROM %s' % table)
rows = cursor.fetchall()
num_rows = rows[0][0]
print("Found %d rows in %s" % (num_rows, table))
assert len(rows) > 0
def verify_json_zero_ok(json_str):
"""Ensure that json is loadable
Args:
json_str (str): string containing json text
"""
data = json.loads(json_str)
assert data is not None
def verify_json(json_str):
"""Ensure that json is loadable and contains something
Args:
json_str (str): string containing json text
"""
data = json.loads(json_str)
assert len(data) > 0
def verify_csv(csv_str):
"""
Ensure that csv is loadable by Pandas
"""
data = pandas.read_csv(io.StringIO(csv_str))
assert len(data) > 0
def verify_sacct(csv_str):
"""
Ensure that native format is vaguely valid (treat it as a |-separated csv)
"""
data = pandas.read_csv(io.StringIO(csv_str), sep="|")
assert len(data) > 0
def run_connector(binary, argv):
"""Default cache_connector run function
Args:
binary (module): tokio.cli module that contains a main() function
argv (list of str): list of CLI arguments to pass to connector
Returns:
Stdout of cache connector script as a string
"""
return tokiotest.run_bin(binary, argv)
def run_elasticsearch(binary, argv):
"""Run function that traps connection errors from ElasticSearch
Args:
binary (module): tokio.cli module that contains a main() function
argv (list of str): list of CLI arguments to pass to connector
Returns:
Stdout of cache connector script as a string
"""
if not _HAVE_ELASTICSEARCH:
raise nose.SkipTest("elasticsearch module not available")
try:
return tokiotest.run_bin(binary, argv)
except elasticsearch.exceptions.ConnectionError as error:
raise nose.SkipTest(error)
def run_requests(binary, argv):
"""Run function that traps connection errors from REST
Args:
binary (module): tokio.cli module that contains a main() function
argv (list of str): list of CLI arguments to pass to connector
Returns:
Stdout of cache connector script as a string
"""
if not _HAVE_REQUESTS:
raise nose.SkipTest("requests module not available")
try:
return tokiotest.run_bin(binary, argv)
except (ConnectionError, Timeout, HTTPError) as error:
raise nose.SkipTest(error)
@nose.tools.raises(ValueError)
def run_raises_valueerror(binary, argv):
return tokiotest.run_bin(binary, argv)
@nose.tools.raises(SystemExit)
def run_raises_systemexit(binary, argv):
return tokiotest.run_bin(binary, argv)
CACHE_CONNECTOR_CONFIGS = [
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon, gzipped text input',
'binary': tokio.cli.cache_mmperfmon,
'args': [tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_json,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon --csv --metric, gzipped text input',
'binary': tokio.cli.cache_mmperfmon,
'args': ['--csv', '--metric', tokiotest.SAMPLE_MMPERFMON_METRICS[0], tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon --csv --host, gzipped text input',
'binary': tokio.cli.cache_mmperfmon,
'args': ['--csv', '--host', tokiotest.SAMPLE_MMPERFMON_HOSTS[0], tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon --csv without --host/--metric',
'binary': tokio.cli.cache_mmperfmon,
'runfunction': run_raises_systemexit,
'args': ['--csv', tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon, tarfile input',
'binary': tokio.cli.cache_mmperfmon,
'args': [tokiotest.SAMPLE_MMPERFMON_TGZ_INPUT],
'validators': [verify_json,],
},
{
'name': 'cli.cache_mmperfmon',
'description': 'cli.cache_mmperfmon, multiple inputs',
'binary': tokio.cli.cache_mmperfmon,
'args': [tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT, tokiotest.SAMPLE_MMPERFMON_USAGE_INPUT],
'validators': [verify_json,],
},
{
'name': 'cli.cache_nersc_globuslogs',
'description': 'cli.cache_nersc_globuslogs, cached input',
'binary': tokio.cli.cache_nersc_globuslogs,
'args': ['--input', tokiotest.SAMPLE_GLOBUSLOGS,
tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'validators': [verify_json,],
},
{
'name': 'cli.cache_nersc_globuslogs',
'description': 'cli.cache_nersc_globuslogs, remote connection',
'binary': tokio.cli.cache_nersc_globuslogs,
'args': ['--input', tokiotest.SAMPLE_GLOBUSLOGS,
tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'runfunction': run_elasticsearch,
'validators': [verify_json_zero_ok,],
},
{
'name': 'cli.cache_nersc_globuslogs',
'description': 'cli.cache_nersc_globuslogs --csv',
'binary': tokio.cli.cache_nersc_globuslogs,
'args': ['--input', tokiotest.SAMPLE_GLOBUSLOGS,
'--csv',
tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_isdct',
'binary': tokio.cli.cache_isdct,
'args': ['--json', tokiotest.SAMPLE_NERSCISDCT_FILE],
'validators': [verify_json,],
},
{
'name': 'cli.cache_isdct',
'binary': tokio.cli.cache_isdct,
'args': ['--csv', tokiotest.SAMPLE_NERSCISDCT_FILE],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_collectdes',
'description': 'cli.cache_collectdes, cached input',
'binary': tokio.cli.cache_collectdes,
'args': ['--input', tokiotest.SAMPLE_COLLECTDES_FILE,
tokiotest.SAMPLE_COLLECTDES_START,
tokiotest.SAMPLE_COLLECTDES_END],
'validators': [verify_json,],
},
{
'name': 'cli.cache_collectdes',
'description': 'cli.cache_collectdes, remote connection',
'binary': tokio.cli.cache_collectdes,
'args': [tokiotest.SAMPLE_TIMESTAMP_START_NOW,
tokiotest.SAMPLE_TIMESTAMP_END_NOW],
'runfunction': run_elasticsearch,
'validators': [verify_json_zero_ok,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--perf', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', '--perf', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', '--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--perf', '--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_darshan',
'binary': tokio.cli.cache_darshan,
'args': ['--base', '--perf', '--total', tokiotest.SAMPLE_DARSHAN_LOG],
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp default args',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--timeout", "5", "nersc"],
'runfunction': run_requests,
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --json, remote connection',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--timeout", "5", "--json", "nersc"],
'runfunction': run_requests,
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --csv, remote connection',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--timeout", "5", "--csv", "nersc"],
'runfunction': run_requests,
'validators': [verify_csv,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --json, cached input',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--json", "nersc"],
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp --csv, cached input',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--csv", "nersc"],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, explicit endpoint:interface',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "blah0:interf0"],
'validators': [verify_json,],
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, invalid endpoint:interface',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "blah"],
'runfunction': run_raises_valueerror,
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, invalid --start format',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--start", "invalid", "nersc"],
'runfunction': run_raises_valueerror,
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, --end without --start',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--end", "2019-01-01T00:00:00", "nersc"],
'runfunction': run_raises_systemexit,
},
{
'name': 'cli.cache_esnet_snmp',
'description': 'cli.cache_esnet_snmp, --start > --end',
'binary': tokio.cli.cache_esnet_snmp,
'args': ["--input", tokiotest.SAMPLE_ESNET_SNMP_FILE, "--start", "2019-01-02T00:00:00", "--end", "2019-01-01T00:00:00", "nersc"],
'runfunction': run_raises_valueerror,
},
{
'name': 'cli.cache_slurm',
'binary': tokio.cli.cache_slurm,
'args': ['--json', tokiotest.SAMPLE_SLURM_CACHE_FILE],
'validators': [verify_json,],
},
{
'name': 'cli.cache_slurm',
'binary': tokio.cli.cache_slurm,
'args': ['--csv', tokiotest.SAMPLE_SLURM_CACHE_FILE],
'validators': [verify_csv,],
},
{
'name': 'cli.cache_slurm',
'binary': tokio.cli.cache_slurm,
'args': ['--native', tokiotest.SAMPLE_SLURM_CACHE_FILE],
'validators': [verify_sacct,],
},
{
'name': 'cli.cache_topology',
'description': 'cli.cache_topology',
'binary': tokio.cli.cache_topology,
'args': [
'--nodemap-cache', tokiotest.SAMPLE_XTDB2PROC_FILE,
'--jobinfo-cache', tokiotest.SAMPLE_SLURM_CACHE_FILE,
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --fullness, no cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--fullness',
'--',
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --fullness, explicit cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--fullness',
tokiotest.SAMPLE_OSTFULLNESS_FILE,
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --failure, no cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--failure',
'--',
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_lfsstatus --failure, explicit cache',
'binary': tokio.cli.cache_lfsstatus,
'args': [
'--failure',
tokiotest.SAMPLE_OSTMAP_FILE,
tokiotest.SAMPLE_DARSHAN_SONEXION_ID,
tokiotest.SAMPLE_DARSHAN_START_TIME.replace(' ', 'T'),
],
'validators': [verify_json,],
},
{
'description': 'cli.cache_nersc_jobsdb',
'binary': tokio.cli.cache_nersc_jobsdb,
'args': [
'-i', tokiotest.SAMPLE_NERSCJOBSDB_FILE,
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_NERSCJOBSDB_START).strftime("%Y-%m-%dT%H:%M:%S"),
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_NERSCJOBSDB_END).strftime("%Y-%m-%dT%H:%M:%S"),
'edison',
],
'validators': [verify_sqlite,],
'to_file': [True],
'validate_contents': False,
},
{
'description': 'cli.cache_lmtdb',
'binary': tokio.cli.cache_lmtdb,
'args': [
'-i', tokiotest.SAMPLE_LMTDB_FILE,
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_START).strftime("%Y-%m-%dT%H:%M:%S"),
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_END).strftime("%Y-%m-%dT%H:%M:%S"),
],
'validators': [verify_sqlite,],
'to_file': [True],
'validate_contents': False,
},
{
'description': 'cli.cache_lmtdb --limit',
'binary': tokio.cli.cache_lmtdb,
'args': [
'--limit', '2',
'-i', tokiotest.SAMPLE_LMTDB_FILE,
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_START).strftime("%Y-%m-%dT%H:%M:%S"),
datetime.datetime.fromtimestamp(
tokiotest.SAMPLE_LMTDB_END).strftime("%Y-%m-%dT%H:%M:%S"),
],
'validators': [verify_sqlite,],
'to_file': [True],
'validate_contents': False,
},
]
@nose.tools.with_setup(tokiotest.create_tempfile, tokiotest.delete_tempfile)
def run_cache_connector(config, to_file=False):
"""
Test a connector cache (cache_*.py) CLI interface
"""
if config['binary'] == tokio.cli.cache_darshan:
tokiotest.check_darshan()
runfunction = config.get('runfunction', run_connector)
if to_file:
argv = ['-o', tokiotest.TEMP_FILE.name] + config['args']
print("Caching to %s" % tokiotest.TEMP_FILE.name)
print("Executing: %s" % ' '.join(argv))
output_str = runfunction(config['binary'], argv)
# (validate_contents == True) means the associated validator function
# expects the contents of the output file rather than the name of the
# output file
if config.get('validate_contents', True):
output_str = tokiotest.TEMP_FILE.read()
else:
argv = config['args']
print("Caching to stdout")
print("Executing: %s" % ' '.join(argv))
output_str = runfunction(config['binary'], argv)
for validator in config.get('validators', []):
if isinstance(output_str, bytes):
validator(output_str.decode())
else:
validator(output_str)
def craft_description(config, suffix):
"""
Take a cache_*.py command invocation and craft a clever test description
"""
if 'description' in config:
result = "%s %s" % (config['description'], suffix)
elif 'name' in config:
result = "%s %s %s" % (
config['name'],
' '.join(config['args'][0:-1]),
suffix)
else:
result = "%s %s %s" % (
config['binary'],
' '.join(config['args'][0:-1]),
suffix)
return result
@tokiotest.needs_darshan
def test():
"""
Test all connector cache scripts
"""
for config in CACHE_CONNECTOR_CONFIGS:
func = run_cache_connector
for to_file in config.get('to_file', [True, False]):
if to_file:
func.description = craft_description(config, '(to file)')
else:
func.description = craft_description(config, '(to stdout)')
yield func, config, to_file
| 124 | 0 | 44 |
92436b9b7c47ccddc497c1620da5fc07684e0c9f | 2,133 | py | Python | test.py | LaudateCorpus1/accimage | 15ec9d4a95060ee54ab80fa3a7ae57a9ca7763ff | [
"BSD-2-Clause"
] | 317 | 2017-01-21T07:04:53.000Z | 2022-03-18T16:10:11.000Z | test.py | LaudateCorpus1/accimage | 15ec9d4a95060ee54ab80fa3a7ae57a9ca7763ff | [
"BSD-2-Clause"
] | 30 | 2017-03-15T11:35:29.000Z | 2021-09-18T13:21:05.000Z | test.py | LaudateCorpus1/accimage | 15ec9d4a95060ee54ab80fa3a7ae57a9ca7763ff | [
"BSD-2-Clause"
] | 42 | 2017-04-25T14:42:42.000Z | 2022-03-02T01:27:10.000Z | import accimage
import numpy as np
import imageio
import os
ACCIMAGE_SAVE = os.environ.get('ACCIMAGE_SAVE', '')
if len(ACCIMAGE_SAVE) and ACCIMAGE_SAVE.lower() not in {'0', 'false', 'no'}:
SAVE_IMAGES = True
else:
SAVE_IMAGES = False
def image_to_np(image):
"""
Returns:
np.ndarray: Image converted to array with shape (width, height, channels)
"""
image_np = np.empty([image.channels, image.height, image.width], dtype=np.uint8)
image.copyto(image_np)
image_np = np.transpose(image_np, (1, 2, 0))
return image_np
| 27 | 84 | 0.687764 | import accimage
import numpy as np
import imageio
import os
ACCIMAGE_SAVE = os.environ.get('ACCIMAGE_SAVE', '')
if len(ACCIMAGE_SAVE) and ACCIMAGE_SAVE.lower() not in {'0', 'false', 'no'}:
SAVE_IMAGES = True
else:
SAVE_IMAGES = False
def image_to_np(image):
"""
Returns:
np.ndarray: Image converted to array with shape (width, height, channels)
"""
image_np = np.empty([image.channels, image.height, image.width], dtype=np.uint8)
image.copyto(image_np)
image_np = np.transpose(image_np, (1, 2, 0))
return image_np
def save_image(path, image):
imageio.imwrite(path, image_to_np(image))
def test_reading_image():
image = accimage.Image("chicago.jpg")
if SAVE_IMAGES:
save_image('test_reading_image.jpg', image)
assert image.width == 1920
assert image.height == 931
def test_reading_image_from_memory():
from_file = accimage.Image("chicago.jpg")
bytes = open("chicago.jpg", "rb").read()
from_bytes = accimage.Image(bytes)
if SAVE_IMAGES:
save_image('test_reading_image_from_memory.jpg', from_bytes)
assert from_bytes.width == 1920
assert from_bytes.height == 931
np.testing.assert_array_equal(image_to_np(from_file), image_to_np(from_bytes))
def test_resizing():
image = accimage.Image("chicago.jpg")
image.resize(size=(200, 200))
if SAVE_IMAGES:
save_image('test_resizing.jpg', image)
assert image.width == 200
assert image.height == 200
def test_cropping():
image = accimage.Image("chicago.jpg")
image.crop(box=(50, 50, 150, 150))
if SAVE_IMAGES:
save_image('test_cropping.jpg', image)
assert image.width == 100
assert image.height == 100
def test_flipping():
image = accimage.Image("chicago.jpg")
original_image_np = image_to_np(image)
FLIP_LEFT_RIGHT = 0
image.transpose(FLIP_LEFT_RIGHT)
if SAVE_IMAGES:
save_image('test_flipping.jpg', image)
new_image_np = image_to_np(image)
assert image.width == 1920
assert image.height == 931
np.testing.assert_array_equal(new_image_np[:, ::-1, :], original_image_np)
| 1,431 | 0 | 138 |
f0b4ba278a3ddabe3ebfa7f3478caa2f80cc3931 | 524 | py | Python | seqsearch/databases/nr.py | xapple/seqsearch | cf3a7691285d245829bc9c6d18354e01e631fc12 | [
"MIT"
] | null | null | null | seqsearch/databases/nr.py | xapple/seqsearch | cf3a7691285d245829bc9c6d18354e01e631fc12 | [
"MIT"
] | null | null | null | seqsearch/databases/nr.py | xapple/seqsearch | cf3a7691285d245829bc9c6d18354e01e631fc12 | [
"MIT"
] | 1 | 2015-01-21T14:38:46.000Z | 2015-01-21T14:38:46.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by Lucas Sinclair.
MIT Licensed.
Contact at www.sinclair.bio
"""
# Built-in modules #
# Internal modules #
# First party modules #
###############################################################################
class NonRedundant(object):
"""
The NR database from NCBI.
NR contains non-redundant sequences from GenBank translations
(i.e. GenPept) together with sequences from other databanks
(Refseq, PDB, SwissProt, PIR and PRF).
"""
| 20.96 | 79 | 0.574427 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by Lucas Sinclair.
MIT Licensed.
Contact at www.sinclair.bio
"""
# Built-in modules #
# Internal modules #
# First party modules #
###############################################################################
class NonRedundant(object):
"""
The NR database from NCBI.
NR contains non-redundant sequences from GenBank translations
(i.e. GenPept) together with sequences from other databanks
(Refseq, PDB, SwissProt, PIR and PRF).
"""
| 0 | 0 | 0 |
5fa7fff36d6cb6d8b2047e3f41b22a2469bc10e3 | 3,121 | py | Python | ops-implementations/sagemaker-service/swagger_server/test_mocked/test_info_controller.py | IBM/open-prediction-service-hub | 8b7db98f46a81b731d0dddfde8e3fb6f91ebc71a | [
"Apache-2.0"
] | 1 | 2021-09-14T18:40:33.000Z | 2021-09-14T18:40:33.000Z | ops-implementations/sagemaker-service/swagger_server/test_mocked/test_info_controller.py | IBM/open-prediction-service-hub | 8b7db98f46a81b731d0dddfde8e3fb6f91ebc71a | [
"Apache-2.0"
] | 7 | 2021-04-23T13:41:39.000Z | 2021-08-12T09:33:10.000Z | ops-implementations/sagemaker-service/swagger_server/test_mocked/test_info_controller.py | IBM/open-prediction-service-hub | 8b7db98f46a81b731d0dddfde8e3fb6f91ebc71a | [
"Apache-2.0"
] | 5 | 2020-12-10T14:27:23.000Z | 2022-03-29T08:44:22.000Z | #!/usr/bin/env python3
#
# Copyright 2020 IBM
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.IBM Confidential
#
# coding: utf-8
from __future__ import absolute_import
from unittest import mock
from swagger_server.models.capabilities import Capabilities # noqa: E501
from swagger_server.models.capability import Capability # noqa: E501
from swagger_server.models.server_info import ServerInfo # noqa: E501
from swagger_server.test import BaseTestCase
from swagger_server.controllers.info_controller import get_capabilities, get_info
class TestStatusController(BaseTestCase):
"""StatusController integration test stubs"""
# GET CAPABILITIES
def test_get_capabilities(self):
"""Test case for get_capabilities
Get Server Capabilities
"""
expected = "{'capabilities': ['" + Capability.INFO + "', '" + Capability.DISCOVER + "', '" + Capability.RUN + "']}"
response = get_capabilities()
assert isinstance(response, Capabilities)
assert str(response) == expected, 'response is not matching expected response'
# GET STATUS
@mock.patch("swagger_server.controllers.info_controller.boto3.client")
def test_get_info(self, mock_boto_client):
"""Test case for get_info
Get Server Status
"""
expected = "{'error': None,\n" +\
" 'info': {'description': 'Open Prediction Service for Amazon Sagemaker based '\n" + \
" 'on OPSv2 API'},\n" + \
" 'status': 'ok'}"
response = get_info()
assert isinstance(response, ServerInfo)
assert str(response) == expected, 'response is not matching expected response'
mock_boto_client.assert_called_once_with('sagemaker')
@mock.patch("swagger_server.controllers.info_controller.boto3.client")
def test_get_info_error(self, mock_boto_client):
"""Test case for get_info
Get Server Status
"""
mock_boto_client.side_effect = KeyError('foo')
expected = '{\'error\': "<class \'KeyError\'>",\n' + \
' \'info\': {\'description\': \'Open Prediction Service for Amazon Sagemaker based \'\n' + \
' \'on OPSv2 API\'},\n' + \
' \'status\': \'error\'}'
response = get_info()
assert isinstance(response, ServerInfo)
assert str(response) == expected, 'response is not matching expected response'
mock_boto_client.assert_called_once_with('sagemaker')
if __name__ == '__main__':
import unittest
unittest.main()
| 35.873563 | 123 | 0.659084 | #!/usr/bin/env python3
#
# Copyright 2020 IBM
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.IBM Confidential
#
# coding: utf-8
from __future__ import absolute_import
from unittest import mock
from swagger_server.models.capabilities import Capabilities # noqa: E501
from swagger_server.models.capability import Capability # noqa: E501
from swagger_server.models.server_info import ServerInfo # noqa: E501
from swagger_server.test import BaseTestCase
from swagger_server.controllers.info_controller import get_capabilities, get_info
class TestStatusController(BaseTestCase):
"""StatusController integration test stubs"""
# GET CAPABILITIES
def test_get_capabilities(self):
"""Test case for get_capabilities
Get Server Capabilities
"""
expected = "{'capabilities': ['" + Capability.INFO + "', '" + Capability.DISCOVER + "', '" + Capability.RUN + "']}"
response = get_capabilities()
assert isinstance(response, Capabilities)
assert str(response) == expected, 'response is not matching expected response'
# GET STATUS
@mock.patch("swagger_server.controllers.info_controller.boto3.client")
def test_get_info(self, mock_boto_client):
"""Test case for get_info
Get Server Status
"""
expected = "{'error': None,\n" +\
" 'info': {'description': 'Open Prediction Service for Amazon Sagemaker based '\n" + \
" 'on OPSv2 API'},\n" + \
" 'status': 'ok'}"
response = get_info()
assert isinstance(response, ServerInfo)
assert str(response) == expected, 'response is not matching expected response'
mock_boto_client.assert_called_once_with('sagemaker')
@mock.patch("swagger_server.controllers.info_controller.boto3.client")
def test_get_info_error(self, mock_boto_client):
"""Test case for get_info
Get Server Status
"""
mock_boto_client.side_effect = KeyError('foo')
expected = '{\'error\': "<class \'KeyError\'>",\n' + \
' \'info\': {\'description\': \'Open Prediction Service for Amazon Sagemaker based \'\n' + \
' \'on OPSv2 API\'},\n' + \
' \'status\': \'error\'}'
response = get_info()
assert isinstance(response, ServerInfo)
assert str(response) == expected, 'response is not matching expected response'
mock_boto_client.assert_called_once_with('sagemaker')
if __name__ == '__main__':
import unittest
unittest.main()
| 0 | 0 | 0 |
e4580e4272a104808d6bd411f21508ce0e61c3c9 | 111 | py | Python | DjangoEcommerceApp/apps.py | abdullah1107/DjangoEcommarceAmazonClone | 93b7d00e74e8e5ce202d0b7cbb70e7974ee47dba | [
"MIT"
] | null | null | null | DjangoEcommerceApp/apps.py | abdullah1107/DjangoEcommarceAmazonClone | 93b7d00e74e8e5ce202d0b7cbb70e7974ee47dba | [
"MIT"
] | null | null | null | DjangoEcommerceApp/apps.py | abdullah1107/DjangoEcommarceAmazonClone | 93b7d00e74e8e5ce202d0b7cbb70e7974ee47dba | [
"MIT"
] | null | null | null | from django.apps import AppConfig
| 18.5 | 42 | 0.801802 | from django.apps import AppConfig
class DjangoecommerceappConfig(AppConfig):
name = 'DjangoEcommerceApp'
| 0 | 53 | 23 |
0dbf868fbe306d0bc0e56fc30248f9f7e2a22e93 | 4,694 | py | Python | Competitive Programming/System Design/Operations on Tree.py | shreejitverma/GeeksforGeeks | d7bcb166369fffa9a031a258e925b6aff8d44e6c | [
"MIT"
] | 2 | 2022-02-18T05:14:28.000Z | 2022-03-08T07:00:08.000Z | Competitive Programming/System Design/Operations on Tree.py | shivaniverma1/Competitive-Programming-1 | d7bcb166369fffa9a031a258e925b6aff8d44e6c | [
"MIT"
] | 6 | 2022-01-13T04:31:04.000Z | 2022-03-12T01:06:16.000Z | Competitive Programming/System Design/Operations on Tree.py | shivaniverma1/Competitive-Programming-1 | d7bcb166369fffa9a031a258e925b6aff8d44e6c | [
"MIT"
] | 2 | 2022-02-14T19:53:53.000Z | 2022-02-18T05:14:30.000Z | '''https://leetcode.com/problems/operations-on-tree/
1993. Operations on Tree
Medium
152
34
Add to List
Share
You are given a tree with n nodes numbered from 0 to n - 1 in the form of a parent array parent where parent[i] is the parent of the ith node. The root of the tree is node 0, so parent[0] = -1 since it has no parent. You want to design a data structure that allows users to lock, unlock, and upgrade nodes in the tree.
The data structure should support the following functions:
Lock: Locks the given node for the given user and prevents other users from locking the same node. You may only lock a node using this function if the node is unlocked.
Unlock: Unlocks the given node for the given user. You may only unlock a node using this function if it is currently locked by the same user.
Upgrade: Locks the given node for the given user and unlocks all of its descendants regardless of who locked it. You may only upgrade a node if all 3 conditions are true:
The node is unlocked,
It has at least one locked descendant (by any user), and
It does not have any locked ancestors.
Implement the LockingTree class:
LockingTree(int[] parent) initializes the data structure with the parent array.
lock(int num, int user) returns true if it is possible for the user with id user to lock the node num, or false otherwise. If it is possible, the node num will become locked by the user with id user.
unlock(int num, int user) returns true if it is possible for the user with id user to unlock the node num, or false otherwise. If it is possible, the node num will become unlocked.
upgrade(int num, int user) returns true if it is possible for the user with id user to upgrade the node num, or false otherwise. If it is possible, the node num will be upgraded.
Example 1:
Input
["LockingTree", "lock", "unlock", "unlock", "lock", "upgrade", "lock"]
[[[-1, 0, 0, 1, 1, 2, 2]], [2, 2], [2, 3], [2, 2], [4, 5], [0, 1], [0, 1]]
Output
[null, true, false, true, true, true, false]
Explanation
LockingTree lockingTree = new LockingTree([-1, 0, 0, 1, 1, 2, 2]);
lockingTree.lock(2, 2); // return true because node 2 is unlocked.
// Node 2 will now be locked by user 2.
lockingTree.unlock(2, 3); // return false because user 3 cannot unlock a node locked by user 2.
lockingTree.unlock(2, 2); // return true because node 2 was previously locked by user 2.
// Node 2 will now be unlocked.
lockingTree.lock(4, 5); // return true because node 4 is unlocked.
// Node 4 will now be locked by user 5.
lockingTree.upgrade(0, 1); // return true because node 0 is unlocked and has at least one locked descendant (node 4).
// Node 0 will now be locked by user 1 and node 4 will now be unlocked.
lockingTree.lock(0, 1); // return false because node 0 is already locked.
Constraints:
n == parent.length
2 <= n <= 2000
0 <= parent[i] <= n - 1 for i != 0
parent[0] == -1
0 <= num <= n - 1
1 <= user <= 104
parent represents a valid tree.
At most 2000 calls in total will be made to lock, unlock, and upgrade.'''
# Time: ctor: O(n)
# lock: O(1)
# unlock: O(1)
# upgrade: O(n)
# Space: O(n)
| 36.387597 | 318 | 0.619088 | '''https://leetcode.com/problems/operations-on-tree/
1993. Operations on Tree
Medium
152
34
Add to List
Share
You are given a tree with n nodes numbered from 0 to n - 1 in the form of a parent array parent where parent[i] is the parent of the ith node. The root of the tree is node 0, so parent[0] = -1 since it has no parent. You want to design a data structure that allows users to lock, unlock, and upgrade nodes in the tree.
The data structure should support the following functions:
Lock: Locks the given node for the given user and prevents other users from locking the same node. You may only lock a node using this function if the node is unlocked.
Unlock: Unlocks the given node for the given user. You may only unlock a node using this function if it is currently locked by the same user.
Upgrade: Locks the given node for the given user and unlocks all of its descendants regardless of who locked it. You may only upgrade a node if all 3 conditions are true:
The node is unlocked,
It has at least one locked descendant (by any user), and
It does not have any locked ancestors.
Implement the LockingTree class:
LockingTree(int[] parent) initializes the data structure with the parent array.
lock(int num, int user) returns true if it is possible for the user with id user to lock the node num, or false otherwise. If it is possible, the node num will become locked by the user with id user.
unlock(int num, int user) returns true if it is possible for the user with id user to unlock the node num, or false otherwise. If it is possible, the node num will become unlocked.
upgrade(int num, int user) returns true if it is possible for the user with id user to upgrade the node num, or false otherwise. If it is possible, the node num will be upgraded.
Example 1:
Input
["LockingTree", "lock", "unlock", "unlock", "lock", "upgrade", "lock"]
[[[-1, 0, 0, 1, 1, 2, 2]], [2, 2], [2, 3], [2, 2], [4, 5], [0, 1], [0, 1]]
Output
[null, true, false, true, true, true, false]
Explanation
LockingTree lockingTree = new LockingTree([-1, 0, 0, 1, 1, 2, 2]);
lockingTree.lock(2, 2); // return true because node 2 is unlocked.
// Node 2 will now be locked by user 2.
lockingTree.unlock(2, 3); // return false because user 3 cannot unlock a node locked by user 2.
lockingTree.unlock(2, 2); // return true because node 2 was previously locked by user 2.
// Node 2 will now be unlocked.
lockingTree.lock(4, 5); // return true because node 4 is unlocked.
// Node 4 will now be locked by user 5.
lockingTree.upgrade(0, 1); // return true because node 0 is unlocked and has at least one locked descendant (node 4).
// Node 0 will now be locked by user 1 and node 4 will now be unlocked.
lockingTree.lock(0, 1); // return false because node 0 is already locked.
Constraints:
n == parent.length
2 <= n <= 2000
0 <= parent[i] <= n - 1 for i != 0
parent[0] == -1
0 <= num <= n - 1
1 <= user <= 104
parent represents a valid tree.
At most 2000 calls in total will be made to lock, unlock, and upgrade.'''
# Time: ctor: O(n)
# lock: O(1)
# unlock: O(1)
# upgrade: O(n)
# Space: O(n)
class LockingTree(object):
def __init__(self, parent):
"""
:type parent: List[int]
"""
self.__parent = parent
self.__children = [[] for _ in range(len(parent))]
for i, x in enumerate(parent):
if x != -1:
self.__children[x].append(i)
self.__locked = {}
def lock(self, num, user):
"""
:type num: int
:type user: int
:rtype: bool
"""
if num in self.__locked:
return False
self.__locked[num] = user
return True
def unlock(self, num, user):
"""
:type num: int
:type user: int
:rtype: bool
"""
if self.__locked.get(num) != user:
return False
del self.__locked[num]
return True
def upgrade(self, num, user):
"""
:type num: int
:type user: int
:rtype: bool
"""
node = num
while node != -1:
if node in self.__locked:
return False
node = self.__parent[node]
result = False
stk = [num]
while stk:
node = stk.pop()
if node in self.__locked:
del self.__locked[node]
result = True
for child in self.__children[node]:
stk.append(child)
if result:
self.__locked[num] = user
return result
| 0 | 1,438 | 23 |
5d5d2b839a17ba0b56b9d88bd34ce723f3e7d142 | 2,589 | py | Python | vsts/vsts/build/v4_0/models/build_controller.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | vsts/vsts/build/v4_0/models/build_controller.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | vsts/vsts/build/v4_0/models/build_controller.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .xaml_build_controller_reference import XamlBuildControllerReference
class BuildController(XamlBuildControllerReference):
"""BuildController.
:param id: Id of the resource
:type id: int
:param name: Name of the linked resource (definition name, controller name, etc.)
:type name: str
:param url: Full http link to the resource
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <build.v4_0.models.ReferenceLinks>`
:param created_date: The date the controller was created.
:type created_date: datetime
:param description: The description of the controller.
:type description: str
:param enabled: Indicates whether the controller is enabled.
:type enabled: bool
:param status: The status of the controller.
:type status: object
:param updated_date: The date the controller was last updated.
:type updated_date: datetime
:param uri: The controller's URI.
:type uri: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'status': {'key': 'status', 'type': 'object'},
'updated_date': {'key': 'updatedDate', 'type': 'iso-8601'},
'uri': {'key': 'uri', 'type': 'str'}
}
| 43.881356 | 159 | 0.579761 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .xaml_build_controller_reference import XamlBuildControllerReference
class BuildController(XamlBuildControllerReference):
"""BuildController.
:param id: Id of the resource
:type id: int
:param name: Name of the linked resource (definition name, controller name, etc.)
:type name: str
:param url: Full http link to the resource
:type url: str
:param _links:
:type _links: :class:`ReferenceLinks <build.v4_0.models.ReferenceLinks>`
:param created_date: The date the controller was created.
:type created_date: datetime
:param description: The description of the controller.
:type description: str
:param enabled: Indicates whether the controller is enabled.
:type enabled: bool
:param status: The status of the controller.
:type status: object
:param updated_date: The date the controller was last updated.
:type updated_date: datetime
:param uri: The controller's URI.
:type uri: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'status': {'key': 'status', 'type': 'object'},
'updated_date': {'key': 'updatedDate', 'type': 'iso-8601'},
'uri': {'key': 'uri', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None, _links=None, created_date=None, description=None, enabled=None, status=None, updated_date=None, uri=None):
super(BuildController, self).__init__(id=id, name=name, url=url)
self._links = _links
self.created_date = created_date
self.description = description
self.enabled = enabled
self.status = status
self.updated_date = updated_date
self.uri = uri
| 440 | 0 | 27 |
d8de67bb1a6b4309fef7f7de4735e4ee01fb2e3e | 3,020 | py | Python | configs/_xm/cascade_x101_fpn.py | 991166chun/TeaDisease | 3cf6499617c01b3a22babcbf65e8241c9cac3c06 | [
"Apache-2.0"
] | null | null | null | configs/_xm/cascade_x101_fpn.py | 991166chun/TeaDisease | 3cf6499617c01b3a22babcbf65e8241c9cac3c06 | [
"Apache-2.0"
] | null | null | null | configs/_xm/cascade_x101_fpn.py | 991166chun/TeaDisease | 3cf6499617c01b3a22babcbf65e8241c9cac3c06 | [
"Apache-2.0"
] | null | null | null | _base_ = [
'../_base_/models/cascade_rcnn_r50_fpn.py',
'./my_voc.py',
'./my_runtime.py'
]
model = dict(
pretrained='open-mmlab://resnext101_32x4d',
backbone=dict(
type='ResNeXt',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(
type='BN',
requires_grad=True),
norm_eval=True,
style='pytorch',
groups=32,
base_width=4),
roi_head=dict(
bbox_head=[
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
]))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35))
# learning policy
# actual epoch =
lr_config = dict(policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[5, 8, 10], gamma=0.2)
# runtime settings 0.01 0.002 0.0004 0.00008
total_epochs = 12 # actual epoch = 12 * 2 = 24
| 33.932584 | 79 | 0.469868 | _base_ = [
'../_base_/models/cascade_rcnn_r50_fpn.py',
'./my_voc.py',
'./my_runtime.py'
]
model = dict(
pretrained='open-mmlab://resnext101_32x4d',
backbone=dict(
type='ResNeXt',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(
type='BN',
requires_grad=True),
norm_eval=True,
style='pytorch',
groups=32,
base_width=4),
roi_head=dict(
bbox_head=[
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=13,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
]))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35))
# learning policy
# actual epoch =
lr_config = dict(policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[5, 8, 10], gamma=0.2)
# runtime settings 0.01 0.002 0.0004 0.00008
total_epochs = 12 # actual epoch = 12 * 2 = 24
| 0 | 0 | 0 |
a348e4213f1690758954dd2e169e3648444ed0d6 | 909 | py | Python | setup.py | NamorNiradnug/SimpleParser | 4b127902808007015b8c9b322b348215977e246f | [
"MIT"
] | 1 | 2022-03-17T01:59:13.000Z | 2022-03-17T01:59:13.000Z | setup.py | NamorNiradnug/SimpleParser | 4b127902808007015b8c9b322b348215977e246f | [
"MIT"
] | null | null | null | setup.py | NamorNiradnug/SimpleParser | 4b127902808007015b8c9b322b348215977e246f | [
"MIT"
] | null | null | null | import setuptools
with open("README.md") as readme:
long_description = readme.read()
setuptools.setup(
name="simpleparser",
version="0.1.3",
author="NamorNiradnug",
author_email="roma57linux@gmail.com",
packages=["simpleparser"],
tests_require=["test.py"],
description="Simple library with simple parser which parses simple expressions.",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
url="https://github.com/NamorNiradnug/SimpleParser",
project_urls={
"Bug Tracker": "https://github.com/NamorNiradnug/SimpleParser/issues",
"Source": "https://github.com/NamorNiradnug/SimpleParser",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.7"
)
| 31.344828 | 85 | 0.672167 | import setuptools
with open("README.md") as readme:
long_description = readme.read()
setuptools.setup(
name="simpleparser",
version="0.1.3",
author="NamorNiradnug",
author_email="roma57linux@gmail.com",
packages=["simpleparser"],
tests_require=["test.py"],
description="Simple library with simple parser which parses simple expressions.",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
url="https://github.com/NamorNiradnug/SimpleParser",
project_urls={
"Bug Tracker": "https://github.com/NamorNiradnug/SimpleParser/issues",
"Source": "https://github.com/NamorNiradnug/SimpleParser",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.7"
)
| 0 | 0 | 0 |
d782cce0615c6d363effe44e194c6f8699d3af5e | 1,099 | py | Python | apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/stock_account/models/stock_config_settings.py | gtfarng/Odoo_migrade | 9cc28fae4c379e407645248a29d22139925eafe7 | [
"Apache-2.0"
] | 1 | 2019-12-19T01:53:13.000Z | 2019-12-19T01:53:13.000Z | apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/stock_account/models/stock_config_settings.py | gtfarng/Odoo_migrade | 9cc28fae4c379e407645248a29d22139925eafe7 | [
"Apache-2.0"
] | null | null | null | apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/stock_account/models/stock_config_settings.py | gtfarng/Odoo_migrade | 9cc28fae4c379e407645248a29d22139925eafe7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
| 45.791667 | 130 | 0.720655 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class StockConfigSettings(models.TransientModel):
_inherit = 'stock.config.settings'
group_stock_inventory_valuation = fields.Selection([
(0, "Periodic inventory valuation (recommended)"),
(1, 'Perpetual inventory valuation (stock move generates accounting entries)')],
"Inventory Valuation", implied_group='stock_account.group_inventory_valuation',
help="""Allows to configure inventory valuations on products and product categories.""")
module_stock_landed_costs = fields.Selection([
(0, 'No landed costs'),
(1, 'Include landed costs in product costing computation')], "Landed Costs",
help="""Install the module that allows to affect landed costs on pickings, and split them onto the different products.""")
@api.onchange('module_stock_landed_costs')
def onchange_landed_costs(self):
if self.module_stock_landed_costs:
self.group_stock_inventory_valuation = 1
| 107 | 831 | 23 |
55cf2b629e08d27bc037771f3beff948390d1f01 | 3,609 | py | Python | app/__init__.py | samanyougarg/sample-rest-api | 0d394a289810bc3ced3420076b7cb0f2de7d5997 | [
"MIT"
] | 3 | 2019-07-31T14:10:29.000Z | 2020-04-18T14:36:42.000Z | app/__init__.py | samanyougarg/sample-rest-api | 0d394a289810bc3ced3420076b7cb0f2de7d5997 | [
"MIT"
] | null | null | null | app/__init__.py | samanyougarg/sample-rest-api | 0d394a289810bc3ced3420076b7cb0f2de7d5997 | [
"MIT"
] | 2 | 2019-07-28T12:06:13.000Z | 2022-02-16T06:02:13.000Z | import os
from flask import Flask
from flask_mail import Mail
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_assets import Environment
from flask_wtf import CsrfProtect
from flask_compress import Compress
from flask_rq import RQ
from flask_oauthlib.provider import OAuth2Provider
from flask_wtf import CsrfProtect
from flask_restful import Api
from config import config
from .assets import app_css, app_js, vendor_css, vendor_js
basedir = os.path.abspath(os.path.dirname(__file__))
mail = Mail()
db = SQLAlchemy()
csrf = CsrfProtect()
compress = Compress()
csrf = CsrfProtect()
oauth = OAuth2Provider()
# Set up Flask-Login
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'account.login'
| 27.976744 | 80 | 0.689111 | import os
from flask import Flask
from flask_mail import Mail
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_assets import Environment
from flask_wtf import CsrfProtect
from flask_compress import Compress
from flask_rq import RQ
from flask_oauthlib.provider import OAuth2Provider
from flask_wtf import CsrfProtect
from flask_restful import Api
from config import config
from .assets import app_css, app_js, vendor_css, vendor_js
basedir = os.path.abspath(os.path.dirname(__file__))
mail = Mail()
db = SQLAlchemy()
csrf = CsrfProtect()
compress = Compress()
csrf = CsrfProtect()
oauth = OAuth2Provider()
# Set up Flask-Login
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'account.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# not using sqlalchemy event system, hence disabling it
config[config_name].init_app(app)
# Set up extensions
mail.init_app(app)
db.init_app(app)
login_manager.init_app(app)
csrf.init_app(app)
compress.init_app(app)
csrf.init_app(app)
oauth.init_app(app)
RQ(app)
api = Api(app)
# Register Jinja template functions
from .utils import register_template_utils
register_template_utils(app)
# Set up asset pipeline
assets_env = Environment(app)
dirs = ['assets/styles', 'assets/scripts']
for path in dirs:
assets_env.append_path(os.path.join(basedir, path))
assets_env.url_expire = True
assets_env.register('app_css', app_css)
assets_env.register('app_js', app_js)
assets_env.register('vendor_css', vendor_css)
assets_env.register('vendor_js', vendor_js)
# Configure SSL if platform supports it
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask_sslify import SSLify
SSLify(app)
# Create app blueprints
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .account import account as account_blueprint
app.register_blueprint(account_blueprint, url_prefix='/account')
from .api.auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
from .admin import admin as admin_blueprint
app.register_blueprint(admin_blueprint, url_prefix='/admin')
from app.api.docs import docs as docs_blueprint
app.register_blueprint(docs_blueprint)
from flasgger import APISpec, Schema, Swagger, fields
spec = APISpec(
title='REST API',
version='1.0.0',
plugins=[
'apispec.ext.flask',
'apispec.ext.marshmallow',
],
)
app.config['SWAGGER'] = {
'title': 'REST API',
'uiversion': 3
}
swagger = Swagger(
app,
template={
'swagger': '3.0',
'info':
{
'title': 'REST API',
'version': '1.0'
}
}
)
from app.api.v1.building import Building, BuildingList
building_view = Building.as_view('Building')
app.add_url_rule('/v1/buildings/<int:building_id>', view_func=building_view)
building_list_view = BuildingList.as_view('BuildingList')
app.add_url_rule('/v1/buildings', view_func=building_list_view)
with app.test_request_context():
spec.add_path(view=building_view)
spec.add_path(view=building_list_view)
return app
| 2,798 | 0 | 23 |
2011c7c7ff23822acfc226cef105dbe2a74083c7 | 3,596 | py | Python | data_structures/heaps/array_based_binary_heap.py | vinta/fuck-coding-interviews | 915ff55963430e81134a35f65f511e5684c52f11 | [
"MIT"
] | 590 | 2020-06-17T08:26:47.000Z | 2022-03-30T18:47:32.000Z | data_structures/heaps/array_based_binary_heap.py | parvathirajan/fuck-coding-interviews | 915ff55963430e81134a35f65f511e5684c52f11 | [
"MIT"
] | 12 | 2020-07-14T09:24:32.000Z | 2020-11-02T03:43:47.000Z | data_structures/heaps/array_based_binary_heap.py | parvathirajan/fuck-coding-interviews | 915ff55963430e81134a35f65f511e5684c52f11 | [
"MIT"
] | 75 | 2020-07-29T06:50:13.000Z | 2022-03-13T16:14:57.000Z | # coding: utf-8
"""
Binary Heap
https://en.wikipedia.org/wiki/Binary_heap
https://en.wikipedia.org/wiki/Heap_(data_structure)
A binary heap is a special binary tree which satisfies following properties:
- The tree is complete.
- The parent's value is less than or equal to children's values.
- The root's value would be the minimum of the tree.
A binary heap is typically represented as a compact array since it's a complete binary search tree:
- array[0] is the root node.
- array[floor((i - 1) / 2)] is the parent node of array[i].
- array[(i * 2) + 1] is the left child node of array[i].
- array[(i * 2) + 2] is the right child node of array[i].
Applications:
- K-way merges (merging k sorted arrays into a single sorted array).
- Priority queues.
"""
# This is a min heap.
# O(log n)
# O(log n)
# O(1)
| 32.990826 | 100 | 0.625973 | # coding: utf-8
"""
Binary Heap
https://en.wikipedia.org/wiki/Binary_heap
https://en.wikipedia.org/wiki/Heap_(data_structure)
A binary heap is a special binary tree which satisfies following properties:
- The tree is complete.
- The parent's value is less than or equal to children's values.
- The root's value would be the minimum of the tree.
A binary heap is typically represented as a compact array since it's a complete binary search tree:
- array[0] is the root node.
- array[floor((i - 1) / 2)] is the parent node of array[i].
- array[(i * 2) + 1] is the left child node of array[i].
- array[(i * 2) + 2] is the right child node of array[i].
Applications:
- K-way merges (merging k sorted arrays into a single sorted array).
- Priority queues.
"""
# This is a min heap.
class ArrayBasedBinaryHeap:
def __init__(self):
self._array = []
def __len__(self):
return len(self._array)
def __iter__(self):
for value in sorted(self._array):
yield value
def _parent(self, index):
return (index - 1) // 2
def _left(self, index):
return (index * 2) + 1
def _right(self, index):
return (index * 2) + 2
def _swap(self, index_a, index_b):
self._array[index_a], self._array[index_b] = self._array[index_b], self._array[index_a]
def _up_heap(self, index):
# Compare the current item with its parent;
# if they're not in the correct order, swap them.
while index >= 1:
parent_index = self._parent(index)
if self._array[parent_index] > self._array[index]:
self._swap(parent_index, index)
else:
return
index = parent_index
# O(log n)
def push(self, value):
self._array.append(value) # Add the element to the bottom level at the leftmost open space.
self._up_heap(len(self._array) - 1)
def _min_child_index(self, index):
left_index = self._left(index)
right_index = self._right(index)
left = self._array[left_index] if left_index < len(self._array) else None
right = self._array[right_index] if right_index < len(self._array) else None
# Determine the smaller child.
if (left is not None) and (right is not None): # There're both left and right children.
return left_index if left < right else right_index
elif left is not None: # There's only left child.
return left_index
elif right is not None: # There's only right child.
return right_index
return None # There's no child.
def _down_heap(self, index):
# Compare the current item with its smaller child;
# if they're not in the correct order, swap with its smaller child.
min_child_index = self._min_child_index(index)
if min_child_index:
current = self._array[index]
min_child = self._array[min_child_index]
if current > min_child:
self._swap(index, min_child_index)
self._down_heap(min_child_index)
# O(log n)
def pop_min(self):
if not self._array:
raise ValueError('heap is empty')
# Replace the root with the last element on the last level, and drop the old root.
self._swap(0, len(self._array) - 1)
popped = self._array.pop()
self._down_heap(0)
return popped
# O(1)
def peek_min(self):
try:
return self._array[0]
except IndexError:
raise ValueError('heap is empty')
| 2,395 | 6 | 369 |
6946d71191a319831b0c63a3b76fba6b80049742 | 2,277 | py | Python | django_test_tools/generators/crud_generator.py | jayvdb/django-test-tools | a832cc6acf8e45c8d6b0cd5e3c424b95595c1855 | [
"MIT"
] | 9 | 2017-04-29T20:21:07.000Z | 2021-11-16T07:00:01.000Z | django_test_tools/generators/crud_generator.py | jayvdb/django-test-tools | a832cc6acf8e45c8d6b0cd5e3c424b95595c1855 | [
"MIT"
] | 211 | 2017-11-21T00:23:03.000Z | 2022-03-28T02:06:25.000Z | django_test_tools/generators/crud_generator.py | jayvdb/django-test-tools | a832cc6acf8e45c8d6b0cd5e3c424b95595c1855 | [
"MIT"
] | 4 | 2017-11-21T18:19:53.000Z | 2021-05-24T06:34:16.000Z | from django.template.loader import render_to_string
from jinja2 import Environment, PackageLoader, select_autoescape
from ..templatetags.dtt_filters import to_snake_case
| 34.5 | 73 | 0.667984 | from django.template.loader import render_to_string
from jinja2 import Environment, PackageLoader, select_autoescape
from ..templatetags.dtt_filters import to_snake_case
class UrlGenerator(object):
def __init__(self, model_name):
self.model_name = model_name
self.template = 'django_test_tools/urls.py.j2'
def print_urls(self, filename):
self._print(filename, 'urls')
def print_paths(self, filename):
self._print(filename, 'paths')
def _print(self, filename, what_to_print):
data = dict()
data['model_name'] = self.model_name
data['print_{}'.format(what_to_print)] = True
rendered = render_to_string(self.template, data)
with open(filename, 'w', encoding='utf-8') as url_file:
url_file.write(rendered)
class SerializerTestGenerator(object):
def __init__(self):
self.env = Environment(
loader=PackageLoader('django_test_tools', 'templates'),
autoescape=select_autoescape(['html', 'j2'])
)
self.env.filters['to_snake_case'] = to_snake_case
self.template_name = 'django_test_tools/test_serializers.py.j2'
self.template = self.env.get_template(self.template_name)
def print(self, serializer_info, filename):
rendered = self.template.render(serializer_info)
with open(filename, 'w', encoding='utf-8') as file:
file.write(rendered)
class GenericTemplateWriter(object):
def __init__(self, template_name, **kwargs):
trim_blocks = kwargs.get('trim_blocks', False)
lstrip_blocks = kwargs.get('lstrip_blocks', False)
self.env = Environment(
loader=PackageLoader('django_test_tools', 'templates'),
autoescape=select_autoescape(['html', ],),
trim_blocks=trim_blocks,
lstrip_blocks=lstrip_blocks
)
self.env.filters['to_snake_case'] = to_snake_case
self.template_name = 'django_test_tools/{}'.format(template_name)
self.template = self.env.get_template(self.template_name)
def write(self, template_data, filename):
rendered = self.template.render(template_data)
with open(filename, 'w', encoding='utf-8') as file:
file.write(rendered)
| 1,780 | 38 | 285 |
6d0ba2881c02c55558b9a88c0cdc90ed083b7e54 | 3,640 | py | Python | Easy_Image/pixabay.py | xjdeng/Easy_Facial_Recognition | f06c5013e0930c4d703845d7714f53871cef83e5 | [
"MIT"
] | 2 | 2018-05-24T17:04:47.000Z | 2022-01-27T20:52:27.000Z | Easy_Image/pixabay.py | xjdeng/Easy_Facial_Recognition | f06c5013e0930c4d703845d7714f53871cef83e5 | [
"MIT"
] | 1 | 2020-03-29T16:42:10.000Z | 2020-03-29T16:42:10.000Z | Easy_Image/pixabay.py | xjdeng/Easy_Facial_Recognition | f06c5013e0930c4d703845d7714f53871cef83e5 | [
"MIT"
] | null | null | null | from path import Path as path
import pixabay as python_pixabay
import tempfile
from sklearn.externals import joblib
import os
import time
import warnings
import json
import requests
import numpy as np
import re
cache_filename = "pixabay_cache.pkl"
api_key = None
cache_update_interval = 3600
cache_expiry = 24*3600
cache = {}
try:
cache = joblib.load(cache_path())
if isinstance(cache, dict) == False:
cache = create_cache()
except IOError:
cache = create_cache()
update_api_key()
def download_google_file(google_file, folder = "./"):
"""
Do a Google image search limited to pixabay.com and get the download file
using these instructions:
https://github.com/fastai/course-v3/blob/master/nbs/dl1/lesson2-download.ipynb
Then use this script to grab the higher res photos.
"""
f = open(google_file,'r')
urls = f.read().split("\n")
f.close()
[download(cdn_to_larger(url), folder) for url in urls]
| 28.661417 | 88 | 0.646978 | from path import Path as path
import pixabay as python_pixabay
import tempfile
from sklearn.externals import joblib
import os
import time
import warnings
import json
import requests
import numpy as np
import re
cache_filename = "pixabay_cache.pkl"
api_key = None
cache_update_interval = 3600
cache_expiry = 24*3600
cache = {}
def cache_path():
return tempfile.gettempdir() + "/" + cache_filename
def create_cache():
ccache = {}
ccache['last_saved'] = time.time()
joblib.dump(ccache, cache_path())
return ccache
try:
cache = joblib.load(cache_path())
if isinstance(cache, dict) == False:
cache = create_cache()
except IOError:
cache = create_cache()
def update_api_key():
global api_key
try:
if api_key is None:
api_key = os.environ['PIXABAY_API_KEY']
except KeyError:
message = """Pixabay API key not found. Please get your key at
https://pixabay.com/api/docs/ and either set it in your OS's PIXABAY_API_KEY
environment variable or set it by calling the set_key() function.""".replace("\n","")
warnings.warn(message)
update_api_key()
def cdn_to_larger(url):
newname = re.sub("__[0-9]+","_{}".format("960_720"), url)
return newname
def download(url, folder = "./"):
try:
filename = str(path(url).name)
res = requests.get(url)
res.raise_for_status()
f = open(folder + "/" + filename, 'wb')
for chunk in res.iter_content(100000):
f.write(chunk)
except Exception:
pass
def download_google_file(google_file, folder = "./"):
"""
Do a Google image search limited to pixabay.com and get the download file
using these instructions:
https://github.com/fastai/course-v3/blob/master/nbs/dl1/lesson2-download.ipynb
Then use this script to grab the higher res photos.
"""
f = open(google_file,'r')
urls = f.read().split("\n")
f.close()
[download(cdn_to_larger(url), folder) for url in urls]
def download_query(myquery, destination = "./", imgtype = "largeImageURL"):
imglist = images_from_query(myquery, imgtype)
[download(url, destination) for url in imglist]
def images_from_query(myquery, imgtype = "largeImageURL"):
if isinstance(myquery, list):
results = []
for item in myquery:
results += images_from_query(item, imgtype)
return list(set(results))
return [m[imgtype] for m in myquery['hits']]
def query(*args, **kwargs):
try:
return cache[(args, json.dumps(kwargs))][0]
except KeyError:
update_api_key()
pix = python_pixabay.Image(api_key)
results = pix.search(*args, **kwargs)
update_cache((args, json.dumps(kwargs)), results)
return results
def query_all_pages(*args, **kwargs):
results = []
initial = query(*args, **kwargs)
perpage = len(initial['hits'])
totalHits = min(500,initial['totalHits'])
pages1 = int(np.floor(totalHits/perpage))
for p in range(0, pages1):
results.append(query(*args, **kwargs, page = p + 1))
results.append(query(*args, **kwargs, page = pages1 + 1)) #TODO: fix redundant pages
return results
def set_key(key):
global api_key
api_key = key
def update_cache(key, value):
global cache
cache[key] = (value, time.time())
for k in cache.keys():
tmp = cache[k]
if isinstance(tmp, tuple):
if time.time() - tmp[1] > cache_expiry:
del cache[k]
if time.time() - cache['last_saved'] > cache_update_interval:
joblib.dump(cache, cache_path())
cache['last_saved'] = time.time() | 2,430 | 0 | 253 |
83e0e604f70f525d82c49328db5f84d43b7bd2f5 | 614 | py | Python | pyskeleton/pyfile.py | wang502/pyskeleton | 3044d61dd9e67f6443017ecfb7c3541f5f41b8d1 | [
"MIT"
] | null | null | null | pyskeleton/pyfile.py | wang502/pyskeleton | 3044d61dd9e67f6443017ecfb7c3541f5f41b8d1 | [
"MIT"
] | 1 | 2016-06-06T05:38:36.000Z | 2016-06-06T05:38:36.000Z | pyskeleton/pyfile.py | wang502/pydeptree | 3044d61dd9e67f6443017ecfb7c3541f5f41b8d1 | [
"MIT"
] | null | null | null | """
pydeptree
file.py
-- contains functions to parses .py file
-- extract function names, class names
-- find what other functions a given function rely on
@By Seth (Xiaohui) Wang
@email: sethwang199418@gmail.com
"""
'''
class pyfile:
def __init__(self, file_dir):
self.file_dir = file_dir
# find what other funcs the given func depends on
def find_depend_funcs(self, func_name, file_funcs, result):
moduleNames = []
with open(self.file_dir) as fp:
for line in fp:
if line.match("from .* import"):
#def find_func_class(self, file_funcs):
'''
| 23.615385 | 63 | 0.656352 | """
pydeptree
file.py
-- contains functions to parses .py file
-- extract function names, class names
-- find what other functions a given function rely on
@By Seth (Xiaohui) Wang
@email: sethwang199418@gmail.com
"""
'''
class pyfile:
def __init__(self, file_dir):
self.file_dir = file_dir
# find what other funcs the given func depends on
def find_depend_funcs(self, func_name, file_funcs, result):
moduleNames = []
with open(self.file_dir) as fp:
for line in fp:
if line.match("from .* import"):
#def find_func_class(self, file_funcs):
'''
| 0 | 0 | 0 |
95d56f805ed3455cd5bff3927e213970eb6b5bc7 | 11,659 | py | Python | bot/reviewbot/tools/testing/testcases.py | reviewboard/ReviewBot | 6c529706229da647cc8cdef27db75cebc0abf216 | [
"MIT"
] | 91 | 2015-04-30T21:00:40.000Z | 2022-03-30T07:19:03.000Z | bot/reviewbot/tools/testing/testcases.py | reviewboard/ReviewBot | 6c529706229da647cc8cdef27db75cebc0abf216 | [
"MIT"
] | 11 | 2015-01-08T13:48:21.000Z | 2018-07-03T13:18:35.000Z | bot/reviewbot/tools/testing/testcases.py | reviewboard/ReviewBot | 6c529706229da647cc8cdef27db75cebc0abf216 | [
"MIT"
] | 23 | 2015-04-03T17:17:00.000Z | 2022-03-07T08:14:27.000Z | """Base test case support for tools.
Version Added:
3.0
"""
from __future__ import unicode_literals
import os
import tempfile
from copy import deepcopy
from functools import wraps
from unittest import SkipTest
import kgb
import six
from reviewbot.config import config
from reviewbot.repositories import GitRepository
from reviewbot.testing import TestCase
from reviewbot.utils.process import execute
class ToolTestCaseMetaclass(type):
"""Metaclass for tool tests.
This is required for all subclasses of :py:class:`BaseToolTestCase`.
This will split any test methods that are marked as a simulation and/or
integration test into individual tests, set up by the subclass's
:py:meth:`~BaseToolTestCase.setup_simulation_test` or
:py:meth:`~BaseToolTestCase.setup_integration_test` method.
Version Added:
3.0
"""
def __new__(meta, name, bases, d):
"""Construct a new class.
Args:
name (str):
The name of the class.
bases (tuple of str):
The parent classes/mixins.
d (dict):
The class dictionary.
Returns:
type:
The new class.
"""
tool_class = d.get('tool_class')
assert tool_class, '%s must set base_tool_class' % name
if tool_class.exe_dependencies:
assert d.get('tool_exe_config_key'), \
'%s must set tool_exe_config_key' % name
assert d.get('tool_exe_path'), '%s must set tool_exe_path' % name
for func_name, func in six.iteritems(d.copy()):
if callable(func):
added = False
if hasattr(func, 'integration_setup_kwargs'):
new_name = meta.tag_func_name(func_name, 'integration')
d[new_name] = meta.make_integration_test_func(func,
new_name)
added = True
if hasattr(func, 'simulation_setup_kwargs'):
new_name = meta.tag_func_name(func_name, 'simulation')
d[new_name] = meta.make_simulation_test_func(func,
new_name)
added = True
if added:
del d[func_name]
return super(ToolTestCaseMetaclass, meta).__new__(meta, name, bases, d)
@classmethod
def tag_func_name(meta, func_name, tag):
"""Return a function name tagged with an identifier.
This will convert a ``test_*` function name into a
:samp:`test_{tag}_*`.
Args:
func_name (str):
The original name of the function.
tag (unicode):
The tag to add.
Returns:
str:
The resulting function name.
"""
assert func_name.startswith('test_')
return str('test_%s_%s' % (tag, func_name[5:]))
@classmethod
def make_integration_test_func(meta, func, func_name):
"""Return a new function for an integration test.
The function will wrap the original function from the class, and
set up the state for an integration test.
Args:
func (callable):
The function to wrap.
func_name (str):
The name of the function.
Returns:
callable:
The new integration test function.
"""
@wraps(func)
_wrapper.__name__ = func_name
_wrapper.__doc__ = '%s [integration test]' % _wrapper.__doc__
return _wrapper
@classmethod
def make_simulation_test_func(meta, func, func_name):
"""Return a new function for a simulation test.
The function will wrap the original function from the class, and
set up the state for a simulation test.
Args:
func (callable):
The function to wrap.
func_name (str):
The name of the function.
Returns:
callable:
The new simulation test function.
"""
@wraps(func)
_wrapper.__name__ = func_name
_wrapper.__doc__ = '%s [simulation test]' % _wrapper.__doc__
return _wrapper
class BaseToolTestCase(kgb.SpyAgency, TestCase):
"""Base class for Tool test cases.
Version Added:
3.0
"""
#: The tool class to test.
#:
#: This is required.
#:
#: Type:
#: type
tool_class = None
#: The key in the configuration identifying the executable of the tool.
#:
#: This is required.
#:
#: Type:
#: unicode
tool_exe_config_key = None
#: The path to the executable for running the tool.
#:
#: This will generally be a fake path for simulated tool runs, but a
#: real one for integration tests. It can be set on the class or during
#: test/test suite setup.
#:
#: Type:
#: unicode
tool_exe_path = None
def run_get_can_handle_file(self, filename, file_contents=b'',
tool_settings={}):
"""Run get_can_handle_file with the given file and settings.
This will create the review objects, set up a repository (if needed
by the tool), apply any configuration, and run
:py:meth:`~reviewbot.tools.base.BaseTool.get_can_handle_file`.
Args:
filename (unicode):
The filename of the file being reviewed.
file_contents (bytes, optional):
File content to review.
tool_settings (dict, optional):
The settings to pass to the tool constructor.
Returns:
bool:
``True`` if the file can be handled. ``False`` if it cannot.
"""
review = self.create_review()
review_file = self.create_review_file(
review,
source_file=filename,
dest_file=filename,
diff_data=self.create_diff_data(chunks=[{
'change': 'insert',
'lines': file_contents.splitlines(),
'new_linenum': 1,
}]),
patched_content=file_contents)
tool = self.tool_class(settings=tool_settings)
return tool.get_can_handle_file(review_file)
def run_tool_execute(self, filename, file_contents, checkout_dir=None,
tool_settings={}, other_files={}):
"""Run execute with the given file and settings.
This will create the review objects, set up a repository (if needed
by the tool), apply any configuration, and run
:py:meth:`~reviewbot.tools.base.BaseTool.execute`.
Args:
filename (unicode):
The filename of the file being reviewed.
file_contents (bytes):
File content to review.
checkout_dir (unicode, optional):
An explicit directory to use as the checkout directory, for
tools that require full-repository checkouts.
tool_settings (dict, optional):
The settings to pass to the tool constructor.
other_files (dict, optional):
Other files to write to the tree. Each will result in a new
file added to the review.
The dictionary is a map of file paths (relative to the
checkout directory) to byte strings.
Returns:
tuple:
A 2-tuple containing:
1. The review (:py:class:`reviewbot.processing.review.Review)`
2. The file entry corresponding to ``filename``
(:py:class:`reviewbot.processing.review.File`)
If ``other_files`` is specified, the second tuple item will
instead be a dictionary of keys from ``other_files`` (along with
``filename``) to :py:class:`reviewbot.processing.review.File`
instances.
"""
if self.tool_class.working_directory_required:
repository = GitRepository(name='MyRepo',
clone_path='git://example.com/repo')
self.spy_on(repository.sync, call_original=False)
@self.spy_for(repository.checkout)
else:
repository = None
review = self.create_review()
review_file = self.create_review_file(
review,
source_file=filename,
dest_file=filename,
diff_data=self.create_diff_data(chunks=[{
'change': 'insert',
'lines': file_contents.splitlines(),
'new_linenum': 1,
}]),
patched_content=file_contents)
review_files = {}
if other_files:
review_files[filename] = review_file
for other_filename, other_contents in six.iteritems(other_files):
review_files[other_filename] = self.create_review_file(
review,
source_file=other_filename,
dest_file=other_filename,
diff_data=self.create_diff_data(chunks=[{
'change': 'insert',
'lines': other_contents.splitlines(),
'new_linenum': 1,
}]),
patched_content=other_contents)
worker_config = deepcopy(self.config)
worker_config.setdefault('exe_paths', {}).update({
self.tool_exe_config_key: self.tool_exe_path,
})
with self.override_config(worker_config):
tool = self.tool_class(settings=tool_settings)
tool.execute(review,
repository=repository)
if other_files:
return review, review_files
return review, review_file
def setup_integration_test(self, **kwargs):
"""Set up an integration test.
Args:
**kwargs (dict):
Keyword arguments passed to
:py:func:`~reviewbot.tools.testing.testcases.integration_test`.
"""
pass
def setup_simulation_test(self, **kwargs):
"""Set up a simulation test.
Args:
**kwargs (dict):
Keyword arguments passed to
:py:func:`~reviewbot.tools.testing.testcases.simulation_test`.
"""
pass
| 31.425876 | 79 | 0.564371 | """Base test case support for tools.
Version Added:
3.0
"""
from __future__ import unicode_literals
import os
import tempfile
from copy import deepcopy
from functools import wraps
from unittest import SkipTest
import kgb
import six
from reviewbot.config import config
from reviewbot.repositories import GitRepository
from reviewbot.testing import TestCase
from reviewbot.utils.process import execute
class ToolTestCaseMetaclass(type):
"""Metaclass for tool tests.
This is required for all subclasses of :py:class:`BaseToolTestCase`.
This will split any test methods that are marked as a simulation and/or
integration test into individual tests, set up by the subclass's
:py:meth:`~BaseToolTestCase.setup_simulation_test` or
:py:meth:`~BaseToolTestCase.setup_integration_test` method.
Version Added:
3.0
"""
def __new__(meta, name, bases, d):
"""Construct a new class.
Args:
name (str):
The name of the class.
bases (tuple of str):
The parent classes/mixins.
d (dict):
The class dictionary.
Returns:
type:
The new class.
"""
tool_class = d.get('tool_class')
assert tool_class, '%s must set base_tool_class' % name
if tool_class.exe_dependencies:
assert d.get('tool_exe_config_key'), \
'%s must set tool_exe_config_key' % name
assert d.get('tool_exe_path'), '%s must set tool_exe_path' % name
for func_name, func in six.iteritems(d.copy()):
if callable(func):
added = False
if hasattr(func, 'integration_setup_kwargs'):
new_name = meta.tag_func_name(func_name, 'integration')
d[new_name] = meta.make_integration_test_func(func,
new_name)
added = True
if hasattr(func, 'simulation_setup_kwargs'):
new_name = meta.tag_func_name(func_name, 'simulation')
d[new_name] = meta.make_simulation_test_func(func,
new_name)
added = True
if added:
del d[func_name]
return super(ToolTestCaseMetaclass, meta).__new__(meta, name, bases, d)
@classmethod
def tag_func_name(meta, func_name, tag):
"""Return a function name tagged with an identifier.
This will convert a ``test_*` function name into a
:samp:`test_{tag}_*`.
Args:
func_name (str):
The original name of the function.
tag (unicode):
The tag to add.
Returns:
str:
The resulting function name.
"""
assert func_name.startswith('test_')
return str('test_%s_%s' % (tag, func_name[5:]))
@classmethod
def make_integration_test_func(meta, func, func_name):
"""Return a new function for an integration test.
The function will wrap the original function from the class, and
set up the state for an integration test.
Args:
func (callable):
The function to wrap.
func_name (str):
The name of the function.
Returns:
callable:
The new integration test function.
"""
@wraps(func)
def _wrapper(self, *args, **kwargs):
old_path = os.environ['PATH']
old_tool_exe_path = self.tool_exe_path
try:
os.environ['PATH'] = self._old_path
if not self.tool_class().check_dependencies():
raise SkipTest('%s dependencies not available'
% self.tool_class.name)
if self.tool_exe_config_key:
self.tool_exe_path = \
config['exe_paths'][self.tool_exe_config_key]
self.spy_on(execute)
self.setup_integration_test(**func.integration_setup_kwargs)
return func(self, *args, **kwargs)
finally:
os.environ['PATH'] = old_path
self.tool_exe_path = old_tool_exe_path
_wrapper.__name__ = func_name
_wrapper.__doc__ = '%s [integration test]' % _wrapper.__doc__
return _wrapper
@classmethod
def make_simulation_test_func(meta, func, func_name):
"""Return a new function for a simulation test.
The function will wrap the original function from the class, and
set up the state for a simulation test.
Args:
func (callable):
The function to wrap.
func_name (str):
The name of the function.
Returns:
callable:
The new simulation test function.
"""
@wraps(func)
def _wrapper(self, *args, **kwargs):
print('setup!')
self.setup_simulation_test(**func.simulation_setup_kwargs)
return func(self, *args, **kwargs)
_wrapper.__name__ = func_name
_wrapper.__doc__ = '%s [simulation test]' % _wrapper.__doc__
return _wrapper
class BaseToolTestCase(kgb.SpyAgency, TestCase):
"""Base class for Tool test cases.
Version Added:
3.0
"""
#: The tool class to test.
#:
#: This is required.
#:
#: Type:
#: type
tool_class = None
#: The key in the configuration identifying the executable of the tool.
#:
#: This is required.
#:
#: Type:
#: unicode
tool_exe_config_key = None
#: The path to the executable for running the tool.
#:
#: This will generally be a fake path for simulated tool runs, but a
#: real one for integration tests. It can be set on the class or during
#: test/test suite setup.
#:
#: Type:
#: unicode
tool_exe_path = None
def run_get_can_handle_file(self, filename, file_contents=b'',
tool_settings={}):
"""Run get_can_handle_file with the given file and settings.
This will create the review objects, set up a repository (if needed
by the tool), apply any configuration, and run
:py:meth:`~reviewbot.tools.base.BaseTool.get_can_handle_file`.
Args:
filename (unicode):
The filename of the file being reviewed.
file_contents (bytes, optional):
File content to review.
tool_settings (dict, optional):
The settings to pass to the tool constructor.
Returns:
bool:
``True`` if the file can be handled. ``False`` if it cannot.
"""
review = self.create_review()
review_file = self.create_review_file(
review,
source_file=filename,
dest_file=filename,
diff_data=self.create_diff_data(chunks=[{
'change': 'insert',
'lines': file_contents.splitlines(),
'new_linenum': 1,
}]),
patched_content=file_contents)
tool = self.tool_class(settings=tool_settings)
return tool.get_can_handle_file(review_file)
def run_tool_execute(self, filename, file_contents, checkout_dir=None,
tool_settings={}, other_files={}):
"""Run execute with the given file and settings.
This will create the review objects, set up a repository (if needed
by the tool), apply any configuration, and run
:py:meth:`~reviewbot.tools.base.BaseTool.execute`.
Args:
filename (unicode):
The filename of the file being reviewed.
file_contents (bytes):
File content to review.
checkout_dir (unicode, optional):
An explicit directory to use as the checkout directory, for
tools that require full-repository checkouts.
tool_settings (dict, optional):
The settings to pass to the tool constructor.
other_files (dict, optional):
Other files to write to the tree. Each will result in a new
file added to the review.
The dictionary is a map of file paths (relative to the
checkout directory) to byte strings.
Returns:
tuple:
A 2-tuple containing:
1. The review (:py:class:`reviewbot.processing.review.Review)`
2. The file entry corresponding to ``filename``
(:py:class:`reviewbot.processing.review.File`)
If ``other_files`` is specified, the second tuple item will
instead be a dictionary of keys from ``other_files`` (along with
``filename``) to :py:class:`reviewbot.processing.review.File`
instances.
"""
if self.tool_class.working_directory_required:
repository = GitRepository(name='MyRepo',
clone_path='git://example.com/repo')
self.spy_on(repository.sync, call_original=False)
@self.spy_for(repository.checkout)
def _checkout(_self, *args, **kwargs):
return checkout_dir or tempfile.mkdtemp()
else:
repository = None
review = self.create_review()
review_file = self.create_review_file(
review,
source_file=filename,
dest_file=filename,
diff_data=self.create_diff_data(chunks=[{
'change': 'insert',
'lines': file_contents.splitlines(),
'new_linenum': 1,
}]),
patched_content=file_contents)
review_files = {}
if other_files:
review_files[filename] = review_file
for other_filename, other_contents in six.iteritems(other_files):
review_files[other_filename] = self.create_review_file(
review,
source_file=other_filename,
dest_file=other_filename,
diff_data=self.create_diff_data(chunks=[{
'change': 'insert',
'lines': other_contents.splitlines(),
'new_linenum': 1,
}]),
patched_content=other_contents)
worker_config = deepcopy(self.config)
worker_config.setdefault('exe_paths', {}).update({
self.tool_exe_config_key: self.tool_exe_path,
})
with self.override_config(worker_config):
tool = self.tool_class(settings=tool_settings)
tool.execute(review,
repository=repository)
if other_files:
return review, review_files
return review, review_file
def setup_integration_test(self, **kwargs):
"""Set up an integration test.
Args:
**kwargs (dict):
Keyword arguments passed to
:py:func:`~reviewbot.tools.testing.testcases.integration_test`.
"""
pass
def setup_simulation_test(self, **kwargs):
"""Set up a simulation test.
Args:
**kwargs (dict):
Keyword arguments passed to
:py:func:`~reviewbot.tools.testing.testcases.simulation_test`.
"""
pass
| 1,053 | 0 | 94 |
48263036a4ae9dffc08e7ebd8049a796b56e6de0 | 1,054 | py | Python | code/yakindu/src-gen/cart/cart_statemachine_interfaces.py | samuelexferri/tvsw | ce61649546b23e86268143142a2d45375b06fcfe | [
"MIT"
] | null | null | null | code/yakindu/src-gen/cart/cart_statemachine_interfaces.py | samuelexferri/tvsw | ce61649546b23e86268143142a2d45375b06fcfe | [
"MIT"
] | 9 | 2021-03-31T20:05:26.000Z | 2021-10-04T12:27:50.000Z | code/yakindu/src-gen/cart/cart_statemachine_interfaces.py | samuelexferri/tvsw | ce61649546b23e86268143142a2d45375b06fcfe | [
"MIT"
] | null | null | null | """Interfaces defined in the state chart model.
The interfaces defined in the state chart model are represented
as separate classes.
"""
class SCI_Interface:
"""Implementation of scope sci_interface.
"""
| 18.172414 | 63 | 0.710626 | """Interfaces defined in the state chart model.
The interfaces defined in the state chart model are represented
as separate classes.
"""
class SCI_Interface:
"""Implementation of scope sci_interface.
"""
def __init__(self):
self.reset = None
self.exit = None
self.order = None
self.selectgeneric = None
self.selectcommercial = None
self.confirm = None
self.back = None
self.numofproductsincart = None
self.totalproducts = None
self.operationCallback = None
def raise_reset(self):
self.reset = True
def raise_exit(self):
self.exit = True
def raise_order(self):
self.order = True
def raise_selectgeneric(self):
self.selectgeneric = True
def raise_selectcommercial(self):
self.selectcommercial = True
def raise_confirm(self):
self.confirm = True
def raise_back(self):
self.back = True
def clear_events(self):
self.reset = False
self.exit = False
self.order = False
self.selectgeneric = False
self.selectcommercial = False
self.confirm = False
self.back = False
| 604 | 0 | 232 |