sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def names_to_abbreviations(reporters):
"""Build a dict mapping names to their variations
Something like:
{
"Atlantic Reporter": ['A.', 'A.2d'],
}
Note that the abbreviations are sorted by start date.
"""
names = {}
for reporter_key, data_list in reporters.items():
for data in data_list:
abbrevs = data['editions'].keys()
# Sort abbreviations by start date of the edition
sort_func = lambda x: str(data['editions'][x]['start']) + x
abbrevs = sorted(abbrevs, key=sort_func)
names[data['name']] = abbrevs
sorted_names = OrderedDict(sorted(names.items(), key=lambda t: t[0]))
return sorted_names | Build a dict mapping names to their variations
Something like:
{
"Atlantic Reporter": ['A.', 'A.2d'],
}
Note that the abbreviations are sorted by start date. | entailment |
def check_rank(player, platform="steam"):
"""Gets the Rocket League stats and name and dp of a UserID
Args:
player (str): The UserID of the player we want to rank check
platform (str): The platform to check for, can be 'steam', 'ps', or 'xbox'
Returns:
success (bool): Whether the rank check was successful
package (tuple): If successful, the retrieved stats, in order (stats, name, dp)
"""
# Get player ID and name Rocket League Tracker Network
webpage = requests.get(
"https://rocketleague.tracker.network/profile/{}/{}".format(platform, player)
).text
try:
# Get player ID
playerid_index = webpage.index("/live?ids=") + len("/live?ids=")
playerid_end_index = webpage.index("""">""", playerid_index)
playerid = webpage[playerid_index:playerid_end_index]
# Get player name
name_index = webpage.index("Stats Profile : ") + len("Stats Profile : ")
name_end_index = webpage.index("""\n""", name_index)
name = webpage[name_index:name_end_index]
except (ValueError, IndexError):
return False, ()
# Get player stats from Rocket League Tracker Network
livedata = json.loads(
requests.post(
"https://rocketleague.tracker.network/live/data",
json={"playerIds": [playerid]}
).text
)
stats = []
try:
for statpack in livedata['players'][0]['Stats']:
field = statpack['Value']['Label']
value = str(statpack['Value']['DisplayValue'])
if statpack['Value']['Percentile']:
percentile = str(statpack['Value']['Percentile'])
else:
percentile = None
stats.append((field, value, percentile))
except (IndexError, KeyError):
return False, ()
dp = "https://rocketleague.media.zestyio.com/rocket-league-logos-vr-white.f1cb27a519bdb5b6ed34049a5b86e317.png"
platform_display = platform
if platform == "steam":
platform_display = "Steam"
elif platform == "ps":
platform_display = "PlayStation"
elif platform == "xbox":
platform_display = "Xbox"
return True, (stats, name, platform_display, dp) | Gets the Rocket League stats and name and dp of a UserID
Args:
player (str): The UserID of the player we want to rank check
platform (str): The platform to check for, can be 'steam', 'ps', or 'xbox'
Returns:
success (bool): Whether the rank check was successful
package (tuple): If successful, the retrieved stats, in order (stats, name, dp) | entailment |
def send_message(channel_id, message):
"""
Send a message to a channel
Args:
channel_id (str): The id of the channel to send the message to
message (str): The message to send to the channel
"""
channel = client.get_channel(channel_id)
if channel is None:
logger.info("{} is not a channel".format(channel_id))
return
# Check that it's enabled in the server
data = datatools.get_data()
if not data["discord"]["servers"][channel.server.id][modulename]["activated"]:
logger.info("This module has been disabled in {} ({})".format(channel.server.name, channel.server.id))
try:
runcoro(client.send_message(channel, message))
except Exception as e:
logger.exception(e) | Send a message to a channel
Args:
channel_id (str): The id of the channel to send the message to
message (str): The message to send to the channel | entailment |
def runcoro(async_function):
"""
Runs an asynchronous function without needing to use await - useful for lambda
Args:
async_function (Coroutine): The asynchronous function to run
"""
future = _asyncio.run_coroutine_threadsafe(async_function, client.loop)
result = future.result()
return result | Runs an asynchronous function without needing to use await - useful for lambda
Args:
async_function (Coroutine): The asynchronous function to run | entailment |
async def on_message(message):
"""The on_message event handler for this module
Args:
message (discord.Message): Input message
"""
# Simplify message info
server = message.server
author = message.author
channel = message.channel
content = message.content
data = datatools.get_data()
if not data["discord"]["servers"][server.id][_data.modulename]["activated"]:
return
# Only reply to server messages and don't reply to myself
if server is not None and author != channel.server.me:
# Do a flip check
flipchecked = api_flipcheck.flipcheck(content)
if flipchecked:
await client.send_typing(channel)
await client.send_message(channel, flipchecked) | The on_message event handler for this module
Args:
message (discord.Message): Input message | entailment |
def update_keys(self):
"""Updates the Google API key with the text value"""
from ...main import add_api_key
add_api_key("reddit_api_user_agent", self.reddit_api_user_agent.get())
add_api_key("reddit_api_client_id", self.reddit_api_client_id.get())
add_api_key("reddit_api_client_secret", self.reddit_api_client_secret.get()) | Updates the Google API key with the text value | entailment |
async def activate_module(channel, module_name, activate):
"""
Changes a modules activated/deactivated state for a server
Args:
channel: The channel to send the message to
module_name: The name of the module to change state for
activate: The activated/deactivated state of the module
"""
data = datatools.get_data()
server_id = channel.server.id
_dir = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
_dir_modules = "{}/../".format(_dir)
if not os.path.isfile("{}/{}/_data.py".format(_dir_modules, module_name)):
await client.send_typing(channel)
embed = ui_embed.error(channel, "Error", "No module found named '{}'".format(module_name))
await embed.send()
return
try:
import_name = ".discord_modis.modules.{}.{}".format(module_name, "_data")
module_data = importlib.import_module(import_name, "modis")
# Don't try and deactivate this module (not that it would do anything)
if module_data.modulename == _data.modulename:
await client.send_typing(channel)
embed = ui_embed.error(channel, "Error", "I'm sorry, Dave. I'm afraid I can't do that.")
await embed.send()
return
# This /should/ never happen if everything goes well
if module_data.modulename not in data["discord"]["servers"][server_id]:
await client.send_typing(channel)
embed = ui_embed.error(channel, "Error",
"No data found for module '{}'".format(module_data.modulename))
await embed.send()
return
# Modify the module
if "activated" in data["discord"]["servers"][server_id][module_data.modulename]:
data["discord"]["servers"][server_id][module_data.modulename]["activated"] = activate
# Write the data
datatools.write_data(data)
await client.send_typing(channel)
embed = ui_embed.modify_module(channel, module_data.modulename, activate)
await embed.send()
return
else:
await client.send_typing(channel)
embed = ui_embed.error(channel, "Error", "Can't deactivate module '{}'".format(module_data.modulename))
await embed.send()
return
except Exception as e:
logger.error("Could not modify module {}".format(module_name))
logger.exception(e) | Changes a modules activated/deactivated state for a server
Args:
channel: The channel to send the message to
module_name: The name of the module to change state for
activate: The activated/deactivated state of the module | entailment |
async def warn_user(channel, user):
"""
Gives a user a warning, and bans them if they are over the maximum warnings
Args:
channel: The channel to send the warning message in
user: The user to give the warning to
"""
data = datatools.get_data()
server_id = channel.server.id
if "warnings_max" not in data["discord"]["servers"][server_id][_data.modulename]:
data["discord"]["servers"][server_id][_data.modulename]["warnings_max"] = 3
if "warnings" not in data["discord"]["servers"][server_id][_data.modulename]:
data["discord"]["servers"][server_id][_data.modulename]["warnings"] = {}
if user.id in data["discord"]["servers"][server_id][_data.modulename]["warnings"]:
data["discord"]["servers"][server_id][_data.modulename]["warnings"][user.id] += 1
else:
data["discord"]["servers"][server_id][_data.modulename]["warnings"][user.id] = 1
datatools.write_data(data)
warnings = data["discord"]["servers"][server_id][_data.modulename]["warnings"][user.id]
max_warnings = data["discord"]["servers"][server_id][_data.modulename]["warnings_max"]
await client.send_typing(channel)
embed = ui_embed.user_warning(channel, user, warnings, max_warnings)
await embed.send()
if warnings >= max_warnings:
await ban_user(channel, user) | Gives a user a warning, and bans them if they are over the maximum warnings
Args:
channel: The channel to send the warning message in
user: The user to give the warning to | entailment |
async def ban_user(channel, user):
"""
Bans a user from a server
Args:
channel: The channel to send the warning message in
user: The user to give the warning to
"""
data = datatools.get_data()
server_id = channel.server.id
try:
await client.ban(user)
except discord.errors.Forbidden:
await client.send_typing(channel)
embed = ui_embed.error(channel, "Ban Error", "I do not have the permissions to ban that person.")
await embed.send()
return
# Set the user's warnings to 0
if "warnings" in data["discord"]["servers"][server_id][_data.modulename]:
if user.id in data["discord"]["servers"][server_id][_data.modulename]["warnings"]:
data["discord"]["servers"][server_id][_data.modulename]["warnings"][user.id] = 0
datatools.write_data(data)
await client.send_typing(channel)
embed = ui_embed.user_ban(channel, user)
await embed.send()
try:
response = "You have been banned from the server '{}' " \
"contact the owners to resolve this issue.".format(channel.server.name)
await client.send_message(user, response)
except Exception as e:
logger.exception(e) | Bans a user from a server
Args:
channel: The channel to send the warning message in
user: The user to give the warning to | entailment |
def get_help_datapacks(module_name, server_prefix):
"""
Get the help datapacks for a module
Args:
module_name (str): The module to get help data for
server_prefix (str): The command prefix for this server
Returns:
datapacks (list): The help datapacks for the module
"""
_dir = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
module_dir = "{}/../{}".format(_dir, module_name, "_help.json")
if os.path.isdir(module_dir):
module_help_path = "{}/{}".format(module_dir, "_help.json")
if os.path.isfile(module_help_path):
return helptools.get_help_datapacks(module_help_path, server_prefix)
else:
return [("Help", "{} does not have a help.json file".format(module_name), False)]
else:
return [("Help", "No module found called {}".format(module_name), False)] | Get the help datapacks for a module
Args:
module_name (str): The module to get help data for
server_prefix (str): The command prefix for this server
Returns:
datapacks (list): The help datapacks for the module | entailment |
def get_help_commands(server_prefix):
"""
Get the help commands for all modules
Args:
server_prefix: The server command prefix
Returns:
datapacks (list): A list of datapacks for the help commands for all the modules
"""
datapacks = []
_dir = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
for module_name in os.listdir("{}/../".format(_dir)):
if not module_name.startswith("_") and not module_name.startswith("!"):
help_command = "`{}help {}`".format(server_prefix, module_name)
datapacks.append((module_name, help_command, True))
return datapacks | Get the help commands for all modules
Args:
server_prefix: The server command prefix
Returns:
datapacks (list): A list of datapacks for the help commands for all the modules | entailment |
async def on_message(message):
"""The on_message event handler for this module
Args:
message (discord.Message): Input message
"""
# Simplify message info
server = message.server
author = message.author
channel = message.channel
content = message.content
data = datatools.get_data()
# Only reply to server messages and don't reply to myself
if server is not None and author != channel.server.me:
# Commands section
prefix = data["discord"]["servers"][server.id]["prefix"]
if content.startswith(prefix):
# Parse message
package = content.split(" ")
command = package[0][len(prefix):]
args = package[1:]
arg = ' '.join(args)
# Commands
if command == 'help':
if args:
# Parse message
datapacks = api_help.get_help_datapacks(arg, prefix)
# Create embed UI
if datapacks:
await client.send_typing(channel)
embed = ui_embed.success(channel, arg, datapacks)
try:
await embed.send()
except discord.errors.HTTPException:
embed = ui_embed.http_exception(channel, arg)
await embed.send()
else:
# Parse message
datapacks = api_help.get_help_commands(prefix)
# Create embed UI
if datapacks:
await client.send_typing(channel)
embed = ui_embed.success(channel, arg, datapacks)
try:
await embed.send()
except discord.errors.HTTPException:
embed = ui_embed.http_exception(channel, arg)
await embed.send() | The on_message event handler for this module
Args:
message (discord.Message): Input message | entailment |
async def on_message(message):
"""The on_message event handler for this module
Args:
message (discord.Message): Input message
"""
# Simplify message info
server = message.server
author = message.author
channel = message.channel
content = message.content
data = datatools.get_data()
# Only reply to server messages and don't reply to myself
if server is not None and author != channel.server.me:
prefix = data["discord"]["servers"][server.id]["prefix"]
# Check for mentions reply to mentions
if channel.server.me in message.mentions:
await client.send_typing(channel)
response = "The current server prefix is `{0}`. Type `{0}help` for help.".format(prefix)
await client.send_message(channel, response)
# Commands section
if content.startswith(prefix):
# Parse message
package = content.split(" ")
command = package[0][len(prefix):]
args = package[1:]
arg = ' '.join(args)
# Commands
if command not in ["prefix", "activate", "deactivate", "warnmax", "warn", "ban"]:
return
is_admin = author == server.owner
for role in message.author.roles:
if role.permissions.administrator:
is_admin = True
if not is_admin:
await client.send_typing(channel)
reason = "You must have a role that has the permission 'Administrator'"
embed = ui_embed.error(channel, "Insufficient Permissions", reason)
await embed.send()
return
if command == "prefix" and args:
new_prefix = arg.replace(" ", "").strip()
data["discord"]["servers"][server.id]["prefix"] = new_prefix
# Write the data
datatools.write_data(data)
await client.send_typing(channel)
embed = ui_embed.modify_prefix(channel, new_prefix)
await embed.send()
if command == "warnmax" and args:
try:
warn_max = int(arg)
if warn_max > 0:
data["discord"]["servers"][server.id][_data.modulename]["warnings_max"] = warn_max
datatools.write_data(data)
await client.send_typing(channel)
embed = ui_embed.warning_max_changed(channel, warn_max)
await embed.send()
else:
reason = "Maximum warnings must be greater than 0"
embed = ui_embed.error(channel, "Error", reason)
await embed.send()
except (ValueError, TypeError):
reason = "Warning maximum must be a number"
embed = ui_embed.error(channel, "Error", reason)
await embed.send()
except Exception as e:
logger.exception(e)
if command == "warn" and args:
for user in message.mentions:
await api_manager.warn_user(channel, user)
if command == "ban" and args:
for user in message.mentions:
await api_manager.ban_user(channel, user)
if command == "activate" and args:
await api_manager.activate_module(channel, arg, True)
elif command == "deactivate" and args:
await api_manager.activate_module(channel, arg, False) | The on_message event handler for this module
Args:
message (discord.Message): Input message | entailment |
def topic_update(channel, topic_channel):
"""
Creates an embed UI for the topic update
Args:
channel (discord.Channel): The Discord channel to bind the embed to
topic_channel: The new topic channel
Returns:
embed: The created embed
"""
if topic_channel is not None:
try:
channel_message = "Topic channel is now `{}`.".format(topic_channel.name)
except Exception as e:
logger.exception(e)
channel_message = "Topic channel has been updated."
else:
channel_message = "Topic channel has been cleared."
# Create embed UI object
gui = ui_embed.UI(
channel,
"Topic channel updated",
channel_message,
modulename=modulename,
colour=modulecolor_info
)
return gui | Creates an embed UI for the topic update
Args:
channel (discord.Channel): The Discord channel to bind the embed to
topic_channel: The new topic channel
Returns:
embed: The created embed | entailment |
def error_message(channel, err_title, err_message):
"""
Creates an embed UI for the topic update
Args:
channel (discord.Channel): The Discord channel to bind the embed to
err_title: The title for the error
err_message: The message for the error
Returns:
embed: The created embed
"""
# Create embed UI object
gui = ui_embed.UI(
channel,
err_title,
err_message,
modulename=modulename,
colour=modulecolor_error
)
return gui | Creates an embed UI for the topic update
Args:
channel (discord.Channel): The Discord channel to bind the embed to
err_title: The title for the error
err_message: The message for the error
Returns:
embed: The created embed | entailment |
def clear_cache_root():
"""Clears everything in the song cache"""
logger.debug("Clearing root cache")
if os.path.isdir(_root_songcache_dir):
for filename in os.listdir(_root_songcache_dir):
file_path = os.path.join(_root_songcache_dir, filename)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except PermissionError:
pass
except Exception as e:
logger.exception(e)
logger.debug("Root cache cleared") | Clears everything in the song cache | entailment |
async def play(self, author, text_channel, query, index=None, stop_current=False, shuffle=False):
"""
The play command
Args:
author (discord.Member): The member that called the command
text_channel (discord.Channel): The channel where the command was called
query (str): The argument that was passed with the command
index (str): Whether to play next or at the end of the queue
stop_current (bool): Whether to stop the currently playing song
shuffle (bool): Whether to shuffle the queue after starting
"""
if self.state == 'off':
self.state = 'starting'
self.prev_queue = []
await self.set_topic("")
# Init the music player
await self.msetup(text_channel)
# Queue the song
await self.enqueue(query, index, stop_current, shuffle)
# Connect to voice
await self.vsetup(author)
# Mark as 'ready' if everything is ok
self.state = 'ready' if self.mready and self.vready else 'off'
else:
# Queue the song
await self.enqueue(query, index, stop_current, shuffle)
if self.state == 'ready':
if self.streamer is None:
await self.vplay() | The play command
Args:
author (discord.Member): The member that called the command
text_channel (discord.Channel): The channel where the command was called
query (str): The argument that was passed with the command
index (str): Whether to play next or at the end of the queue
stop_current (bool): Whether to stop the currently playing song
shuffle (bool): Whether to shuffle the queue after starting | entailment |
async def destroy(self):
"""Destroy the whole gui and music player"""
self.logger.debug("destroy command")
self.state = 'destroyed'
await self.set_topic("")
self.nowplayinglog.debug("---")
self.nowplayingauthorlog.debug("---")
self.nowplayingsourcelog.debug("---")
self.timelog.debug(_timebar.make_timebar())
self.prev_time = "---"
self.statuslog.debug("Destroying")
self.mready = False
self.vready = False
self.pause_time = None
self.loop_type = 'off'
if self.vclient:
try:
await self.vclient.disconnect()
except Exception as e:
logger.error(e)
pass
if self.streamer:
try:
self.streamer.stop()
except:
pass
self.vclient = None
self.vchannel = None
self.streamer = None
self.current_duration = 0
self.current_download_elapsed = 0
self.is_live = False
self.queue = []
self.prev_queue = []
if self.embed:
await self.embed.delete()
self.embed = None
self.clear_cache() | Destroy the whole gui and music player | entailment |
async def toggle(self):
"""Toggles between pause and resume command"""
self.logger.debug("toggle command")
if not self.state == 'ready':
return
if self.streamer is None:
return
try:
if self.streamer.is_playing():
await self.pause()
else:
await self.resume()
except Exception as e:
logger.error(e)
pass | Toggles between pause and resume command | entailment |
async def pause(self):
"""Pauses playback if playing"""
self.logger.debug("pause command")
if not self.state == 'ready':
return
if self.streamer is None:
return
try:
if self.streamer.is_playing():
self.streamer.pause()
self.pause_time = self.vclient.loop.time()
self.statuslog.info("Paused")
except Exception as e:
logger.error(e)
pass | Pauses playback if playing | entailment |
async def resume(self):
"""Resumes playback if paused"""
self.logger.debug("resume command")
if not self.state == 'ready':
return
if self.streamer is None:
return
try:
if not self.streamer.is_playing():
play_state = "Streaming" if self.is_live else "Playing"
self.statuslog.info(play_state)
self.streamer.resume()
if self.pause_time is not None:
self.vclient_starttime += (self.vclient.loop.time() - self.pause_time)
self.pause_time = None
except Exception as e:
logger.error(e)
pass | Resumes playback if paused | entailment |
async def skip(self, query="1"):
"""The skip command
Args:
query (str): The number of items to skip
"""
if not self.state == 'ready':
logger.debug("Trying to skip from wrong state '{}'".format(self.state))
return
if query == "":
query = "1"
elif query == "all":
query = str(len(self.queue) + 1)
try:
num = int(query)
except TypeError:
self.statuslog.error("Skip argument must be a number")
except ValueError:
self.statuslog.error("Skip argument must be a number")
else:
self.statuslog.info("Skipping")
for i in range(num - 1):
if len(self.queue) > 0:
self.prev_queue.append(self.queue.pop(0))
try:
self.streamer.stop()
except Exception as e:
logger.exception(e) | The skip command
Args:
query (str): The number of items to skip | entailment |
async def remove(self, index=""):
"""
The remove command
Args:
index (str): The index to remove, can be either a number, or a range in the for '##-##'
"""
if not self.state == 'ready':
logger.debug("Trying to remove from wrong state '{}'".format(self.state))
return
if index == "":
self.statuslog.error("Must provide index to remove")
return
elif index == "all":
self.queue = []
self.update_queue()
self.statuslog.info("Removed all songs")
return
indexes = index.split("-")
self.logger.debug("Removing {}".format(indexes))
try:
if len(indexes) == 0:
self.statuslog.error("Remove must specify an index or range")
return
elif len(indexes) == 1:
num_lower = int(indexes[0]) - 1
num_upper = num_lower + 1
elif len(indexes) == 2:
num_lower = int(indexes[0]) - 1
num_upper = int(indexes[1])
else:
self.statuslog.error("Cannot have more than 2 indexes for remove range")
return
except TypeError:
self.statuslog.error("Remove index must be a number")
return
except ValueError:
self.statuslog.error("Remove index must be a number")
return
if num_lower < 0 or num_lower >= len(self.queue) or num_upper > len(self.queue):
if len(self.queue) == 0:
self.statuslog.warning("No songs in queue")
elif len(self.queue) == 1:
self.statuslog.error("Remove index must be 1 (only 1 song in queue)")
else:
self.statuslog.error("Remove index must be between 1 and {}".format(len(self.queue)))
return
if num_upper <= num_lower:
self.statuslog.error("Second index in range must be greater than first")
return
lower_songname = self.queue[num_lower][1]
for num in range(0, num_upper - num_lower):
self.logger.debug("Removed {}".format(self.queue[num_lower][1]))
self.queue.pop(num_lower)
if len(indexes) == 1:
self.statuslog.info("Removed {}".format(lower_songname))
else:
self.statuslog.info("Removed songs {}-{}".format(num_lower + 1, num_upper))
self.update_queue() | The remove command
Args:
index (str): The index to remove, can be either a number, or a range in the for '##-##' | entailment |
async def rewind(self, query="1"):
"""
The rewind command
Args:
query (str): The number of items to skip
"""
if not self.state == 'ready':
logger.debug("Trying to rewind from wrong state '{}'".format(self.state))
return
if query == "":
query = "1"
try:
num = int(query)
except TypeError:
self.statuslog.error("Rewind argument must be a number")
except ValueError:
self.statuslog.error("Rewind argument must be a number")
else:
if len(self.prev_queue) == 0:
self.statuslog.error("No songs to rewind")
return
if num < 0:
self.statuslog.error("Rewind must be postitive or 0")
return
elif num > len(self.prev_queue):
self.statuslog.warning("Rewinding to start")
else:
self.statuslog.info("Rewinding")
for i in range(num + 1):
if len(self.prev_queue) > 0:
self.queue.insert(0, self.prev_queue.pop())
try:
self.streamer.stop()
except Exception as e:
logger.exception(e) | The rewind command
Args:
query (str): The number of items to skip | entailment |
async def shuffle(self):
"""The shuffle command"""
self.logger.debug("shuffle command")
if not self.state == 'ready':
return
self.statuslog.debug("Shuffling")
random.shuffle(self.queue)
self.update_queue()
self.statuslog.debug("Shuffled") | The shuffle command | entailment |
async def set_loop(self, loop_value):
"""Updates the loop value, can be 'off', 'on', or 'shuffle'"""
if loop_value not in ['on', 'off', 'shuffle']:
self.statuslog.error("Loop value must be `off`, `on`, or `shuffle`")
return
self.loop_type = loop_value
if self.loop_type == 'on':
self.statuslog.info("Looping on")
elif self.loop_type == 'off':
self.statuslog.info("Looping off")
elif self.loop_type == 'shuffle':
self.statuslog.info("Looping on and shuffling") | Updates the loop value, can be 'off', 'on', or 'shuffle | entailment |
async def setvolume(self, value):
"""The volume command
Args:
value (str): The value to set the volume to
"""
self.logger.debug("volume command")
if self.state != 'ready':
return
logger.debug("Volume command received")
if value == '+':
if self.volume < 100:
self.statuslog.debug("Volume up")
self.volume = (10 * (self.volume // 10)) + 10
self.volumelog.info(str(self.volume))
try:
self.streamer.volume = self.volume / 100
except AttributeError:
pass
else:
self.statuslog.warning("Already at maximum volume")
elif value == '-':
if self.volume > 0:
self.statuslog.debug("Volume down")
self.volume = (10 * ((self.volume + 9) // 10)) - 10
self.volumelog.info(str(self.volume))
try:
self.streamer.volume = self.volume / 100
except AttributeError:
pass
else:
self.statuslog.warning("Already at minimum volume")
else:
try:
value = int(value)
except ValueError:
self.statuslog.error("Volume argument must be +, -, or a %")
else:
if 0 <= value <= 200:
self.statuslog.debug("Setting volume")
self.volume = value
self.volumelog.info(str(self.volume))
try:
self.streamer.volume = self.volume / 100
except AttributeError:
pass
else:
self.statuslog.error("Volume must be between 0 and 200")
self.write_volume() | The volume command
Args:
value (str): The value to set the volume to | entailment |
def write_volume(self):
"""Writes the current volume to the data.json"""
# Update the volume
data = datatools.get_data()
data["discord"]["servers"][self.server_id][_data.modulename]["volume"] = self.volume
datatools.write_data(data) | Writes the current volume to the data.json | entailment |
async def movehere(self, channel):
"""
Moves the embed message to a new channel; can also be used to move the musicplayer to the front
Args:
channel (discord.Channel): The channel to move to
"""
self.logger.debug("movehere command")
# Delete the old message
await self.embed.delete()
# Set the channel to this channel
self.embed.channel = channel
# Send a new embed to the channel
await self.embed.send()
# Re-add the reactions
await self.add_reactions()
self.statuslog.info("Moved to front") | Moves the embed message to a new channel; can also be used to move the musicplayer to the front
Args:
channel (discord.Channel): The channel to move to | entailment |
async def set_topic_channel(self, channel):
"""Set the topic channel for this server"""
data = datatools.get_data()
data["discord"]["servers"][self.server_id][_data.modulename]["topic_id"] = channel.id
datatools.write_data(data)
self.topicchannel = channel
await self.set_topic(self.topic)
await client.send_typing(channel)
embed = ui_embed.topic_update(channel, self.topicchannel)
await embed.send() | Set the topic channel for this server | entailment |
async def clear_topic_channel(self, channel):
"""Set the topic channel for this server"""
try:
if self.topicchannel:
await client.edit_channel(self.topicchannel, topic="")
except Exception as e:
logger.exception(e)
self.topicchannel = None
logger.debug("Clearing topic channel")
data = datatools.get_data()
data["discord"]["servers"][self.server_id][_data.modulename]["topic_id"] = ""
datatools.write_data(data)
await client.send_typing(channel)
embed = ui_embed.topic_update(channel, self.topicchannel)
await embed.send() | Set the topic channel for this server | entailment |
async def vsetup(self, author):
"""Creates the voice client
Args:
author (discord.Member): The user that the voice ui will seek
"""
if self.vready:
logger.warning("Attempt to init voice when already initialised")
return
if self.state != 'starting':
logger.error("Attempt to init from wrong state ('{}'), must be 'starting'.".format(self.state))
return
self.logger.debug("Setting up voice")
# Create voice client
self.vchannel = author.voice.voice_channel
if self.vchannel:
self.statuslog.info("Connecting to voice")
try:
self.vclient = await client.join_voice_channel(self.vchannel)
except discord.ClientException as e:
logger.exception(e)
self.statuslog.warning("I'm already connected to a voice channel.")
return
except discord.opus.OpusNotLoaded as e:
logger.exception(e)
logger.error("Could not load Opus. This is an error with your FFmpeg setup.")
self.statuslog.error("Could not load Opus.")
return
except discord.DiscordException as e:
logger.exception(e)
self.statuslog.error("I couldn't connect to the voice channel. Check my permissions.")
return
except Exception as e:
self.statuslog.error("Internal error connecting to voice, disconnecting.")
logger.error("Error connecting to voice {}".format(e))
return
else:
self.statuslog.error("You're not connected to a voice channel.")
return
self.vready = True | Creates the voice client
Args:
author (discord.Member): The user that the voice ui will seek | entailment |
async def msetup(self, text_channel):
"""Creates the gui
Args:
text_channel (discord.Channel): The channel for the embed ui to run in
"""
if self.mready:
logger.warning("Attempt to init music when already initialised")
return
if self.state != 'starting':
logger.error("Attempt to init from wrong state ('{}'), must be 'starting'.".format(self.state))
return
self.logger.debug("Setting up gui")
# Create gui
self.mchannel = text_channel
self.new_embed_ui()
await self.embed.send()
await self.embed.usend()
await self.add_reactions()
self.mready = True | Creates the gui
Args:
text_channel (discord.Channel): The channel for the embed ui to run in | entailment |
def new_embed_ui(self):
"""Create the embed UI object and save it to self"""
self.logger.debug("Creating new embed ui object")
# Initial queue display
queue_display = []
for i in range(self.queue_display):
queue_display.append("{}. ---\n".format(str(i + 1)))
# Initial datapacks
datapacks = [
("Now playing", "---", False),
("Author", "---", True),
("Source", "---", True),
("Time", "```http\n" + _timebar.make_timebar() + "\n```", False),
("Queue", "```md\n{}\n```".format(''.join(queue_display)), False),
("Songs left in queue", "---", True),
("Volume", "{}%".format(self.volume), True),
("Status", "```---```", False)
]
# Create embed UI object
self.embed = ui_embed_tools.UI(
self.mchannel,
"",
"",
modulename=_data.modulename,
colour=_data.modulecolor,
datapacks=datapacks
)
# Add handlers to update gui
noformatter = logging.Formatter("{message}", style="{")
timeformatter = logging.Formatter("```http\n{message}\n```", style="{")
mdformatter = logging.Formatter("```md\n{message}\n```", style="{")
statusformatter = logging.Formatter("```__{levelname}__\n{message}\n```", style="{")
volumeformatter = logging.Formatter("{message}%", style="{")
nowplayinghandler = EmbedLogHandler(self, self.embed, 0)
nowplayinghandler.setFormatter(noformatter)
nowplayingauthorhandler = EmbedLogHandler(self, self.embed, 1)
nowplayingauthorhandler.setFormatter(noformatter)
nowplayingsourcehandler = EmbedLogHandler(self, self.embed, 2)
nowplayingsourcehandler.setFormatter(noformatter)
timehandler = EmbedLogHandler(self, self.embed, 3)
timehandler.setFormatter(timeformatter)
queuehandler = EmbedLogHandler(self, self.embed, 4)
queuehandler.setFormatter(mdformatter)
queuelenhandler = EmbedLogHandler(self, self.embed, 5)
queuelenhandler.setFormatter(noformatter)
volumehandler = EmbedLogHandler(self, self.embed, 6)
volumehandler.setFormatter(volumeformatter)
statushandler = EmbedLogHandler(self, self.embed, 7)
statushandler.setFormatter(statusformatter)
self.nowplayinglog.addHandler(nowplayinghandler)
self.nowplayingauthorlog.addHandler(nowplayingauthorhandler)
self.nowplayingsourcelog.addHandler(nowplayingsourcehandler)
self.timelog.addHandler(timehandler)
self.queuelog.addHandler(queuehandler)
self.queuelenlog.addHandler(queuelenhandler)
self.volumelog.addHandler(volumehandler)
self.statuslog.addHandler(statushandler) | Create the embed UI object and save it to self | entailment |
async def add_reactions(self):
"""Adds the reactions buttons to the current message"""
self.statuslog.info("Loading buttons")
for e in ("⏯", "⏮", "⏹", "⏭", "🔀", "🔉", "🔊"):
try:
if self.embed is not None:
await client.add_reaction(self.embed.sent_embed, e)
except discord.DiscordException as e:
logger.exception(e)
self.statuslog.error("I couldn't add the buttons. Check my permissions.")
except Exception as e:
logger.exception(e) | Adds the reactions buttons to the current message | entailment |
async def enqueue(self, query, queue_index=None, stop_current=False, shuffle=False):
"""
Queues songs based on either a YouTube search or a link
Args:
query (str): Either a search term or a link
queue_index (str): The queue index to enqueue at (None for end)
stop_current (bool): Whether to stop the current song after the songs are queued
shuffle (bool): Whether to shuffle the added songs
"""
if query is None or query == "":
return
self.statuslog.info("Parsing {}".format(query))
self.logger.debug("Enqueueing from query")
indexnum = None
if queue_index is not None:
try:
indexnum = int(queue_index) - 1
except TypeError:
self.statuslog.error("Play index argument must be a number")
return
except ValueError:
self.statuslog.error("Play index argument must be a number")
return
if not self.vready:
self.parse_query(query, indexnum, stop_current, shuffle)
else:
parse_thread = threading.Thread(
target=self.parse_query,
args=[query, indexnum, stop_current, shuffle])
# Run threads
parse_thread.start() | Queues songs based on either a YouTube search or a link
Args:
query (str): Either a search term or a link
queue_index (str): The queue index to enqueue at (None for end)
stop_current (bool): Whether to stop the current song after the songs are queued
shuffle (bool): Whether to shuffle the added songs | entailment |
def parse_query(self, query, index, stop_current, shuffle):
"""
Parses a query and adds it to the queue
Args:
query (str): Either a search term or a link
index (int): The index to enqueue at (None for end)
stop_current (bool): Whether to stop the current song after the songs are queued
shuffle (bool): Whether to shuffle the added songs
"""
if index is not None and len(self.queue) > 0:
if index < 0 or index >= len(self.queue):
if len(self.queue) == 1:
self.statuslog.error("Play index must be 1 (1 song in queue)")
return
else:
self.statuslog.error("Play index must be between 1 and {}".format(len(self.queue)))
return
try:
yt_videos = api_music.parse_query(query, self.statuslog)
if shuffle:
random.shuffle(yt_videos)
if len(yt_videos) == 0:
self.statuslog.error("No results for: {}".format(query))
return
if index is None:
self.queue = self.queue + yt_videos
else:
if len(self.queue) > 0:
self.queue = self.queue[:index] + yt_videos + self.queue[index:]
else:
self.queue = yt_videos
self.update_queue()
if stop_current:
if self.streamer:
self.streamer.stop()
except Exception as e:
logger.exception(e) | Parses a query and adds it to the queue
Args:
query (str): Either a search term or a link
index (int): The index to enqueue at (None for end)
stop_current (bool): Whether to stop the current song after the songs are queued
shuffle (bool): Whether to shuffle the added songs | entailment |
def update_queue(self):
"""Updates the queue in the music player """
self.logger.debug("Updating queue display")
queue_display = []
for i in range(self.queue_display):
try:
if len(self.queue[i][1]) > 40:
songname = self.queue[i][1][:37] + "..."
else:
songname = self.queue[i][1]
except IndexError:
songname = "---"
queue_display.append("{}. {}\n".format(str(i + 1), songname))
self.queuelog.debug(''.join(queue_display))
self.queuelenlog.debug(str(len(self.queue))) | Updates the queue in the music player | entailment |
async def set_topic(self, topic):
"""Sets the topic for the topic channel"""
self.topic = topic
try:
if self.topicchannel:
await client.edit_channel(self.topicchannel, topic=topic)
except Exception as e:
logger.exception(e) | Sets the topic for the topic channel | entailment |
def clear_cache(self):
"""Removes all files from the songcache dir"""
self.logger.debug("Clearing cache")
if os.path.isdir(self.songcache_dir):
for filename in os.listdir(self.songcache_dir):
file_path = os.path.join(self.songcache_dir, filename)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
except PermissionError:
pass
except Exception as e:
logger.exception(e)
self.logger.debug("Cache cleared") | Removes all files from the songcache dir | entailment |
def move_next_cache(self):
"""Moves files in the 'next' cache dir to the root"""
if not os.path.isdir(self.songcache_next_dir):
return
logger.debug("Moving next cache")
files = os.listdir(self.songcache_next_dir)
for f in files:
try:
os.rename("{}/{}".format(self.songcache_next_dir, f), "{}/{}".format(self.songcache_dir, f))
except PermissionError:
pass
except Exception as e:
logger.exception(e)
logger.debug("Next cache moved") | Moves files in the 'next' cache dir to the root | entailment |
def ytdl_progress_hook(self, d):
"""Called when youtube-dl updates progress"""
if d['status'] == 'downloading':
self.play_empty()
if "elapsed" in d:
if d["elapsed"] > self.current_download_elapsed + 4:
self.current_download_elapsed = d["elapsed"]
current_download = 0
current_download_total = 0
current_download_eta = 0
if "total_bytes" in d and d["total_bytes"] > 0:
current_download_total = d["total_bytes"]
elif "total_bytes_estimate" in d and d["total_bytes_estimate"] > 0:
current_download_total = d["total_bytes_estimate"]
if "downloaded_bytes" in d and d["downloaded_bytes"] > 0:
current_download = d["downloaded_bytes"]
if "eta" in d and d["eta"] > 0:
current_download_eta = d["eta"]
if current_download_total > 0:
percent = round(100 * (current_download / current_download_total))
if percent > 100:
percent = 100
elif percent < 0:
percent = 0
seconds = str(round(current_download_eta)) if current_download_eta > 0 else ""
eta = " ({} {} remaining)".format(seconds, "seconds" if seconds != 1 else "second")
downloading = "Downloading song: {}%{}".format(percent, eta)
if self.prev_time != downloading:
self.timelog.debug(downloading)
self.prev_time = downloading
if d['status'] == 'error':
self.statuslog.error("Error downloading song")
elif d['status'] == 'finished':
self.statuslog.info("Downloaded song")
downloading = "Downloading song: {}%".format(100)
if self.prev_time != downloading:
self.timelog.debug(downloading)
self.prev_time = downloading
if "elapsed" in d:
download_time = "{} {}".format(d["elapsed"] if d["elapsed"] > 0 else "<1",
"seconds" if d["elapsed"] != 1 else "second")
self.logger.debug("Downloaded song in {}".format(download_time))
# Create an FFmpeg player
future = asyncio.run_coroutine_threadsafe(self.create_ffmpeg_player(d['filename']), client.loop)
try:
future.result()
except Exception as e:
logger.exception(e)
return | Called when youtube-dl updates progress | entailment |
def play_empty(self):
"""Play blank audio to let Discord know we're still here"""
if self.vclient:
if self.streamer:
self.streamer.volume = 0
self.vclient.play_audio("\n".encode(), encode=False) | Play blank audio to let Discord know we're still here | entailment |
def download_next_song(self, song):
"""Downloads the next song and starts playing it"""
dl_ydl_opts = dict(ydl_opts)
dl_ydl_opts["progress_hooks"] = [self.ytdl_progress_hook]
dl_ydl_opts["outtmpl"] = self.output_format
# Move the songs from the next cache to the current cache
self.move_next_cache()
self.state = 'ready'
self.play_empty()
# Download the file and create the stream
with youtube_dl.YoutubeDL(dl_ydl_opts) as ydl:
try:
ydl.download([song])
except DownloadStreamException:
# This is a livestream, use the appropriate player
future = asyncio.run_coroutine_threadsafe(self.create_stream_player(song, dl_ydl_opts), client.loop)
try:
future.result()
except Exception as e:
logger.exception(e)
self.vafter_ts()
return
except PermissionError:
# File is still in use, it'll get cleared next time
pass
except youtube_dl.utils.DownloadError as e:
self.logger.exception(e)
self.statuslog.error(e)
self.vafter_ts()
return
except Exception as e:
self.logger.exception(e)
self.vafter_ts()
return | Downloads the next song and starts playing it | entailment |
def download_next_song_cache(self):
"""Downloads the next song in the queue to the cache"""
if len(self.queue) == 0:
return
cache_ydl_opts = dict(ydl_opts)
cache_ydl_opts["outtmpl"] = self.output_format_next
with youtube_dl.YoutubeDL(cache_ydl_opts) as ydl:
try:
url = self.queue[0][0]
ydl.download([url])
except:
pass | Downloads the next song in the queue to the cache | entailment |
async def create_ffmpeg_player(self, filepath):
"""Creates a streamer that plays from a file"""
self.current_download_elapsed = 0
self.streamer = self.vclient.create_ffmpeg_player(filepath, after=self.vafter_ts)
self.state = "ready"
await self.setup_streamer()
try:
# Read from the info json
info_filename = "{}.info.json".format(filepath)
with open(info_filename, 'r') as file:
info = json.load(file)
self.nowplayinglog.debug(info["title"])
self.is_live = False
if "duration" in info and info["duration"] is not None:
self.current_duration = info["duration"]
else:
self.current_duration = 0
if "uploader" in info:
self.nowplayingauthorlog.info(info["uploader"])
else:
self.nowplayingauthorlog.info("Unknown")
self.nowplayingsourcelog.info(api_music.parse_source(info))
play_state = "Streaming" if self.is_live else "Playing"
await self.set_topic("{} {}".format(play_state, info["title"]))
self.statuslog.debug(play_state)
except Exception as e:
logger.exception(e) | Creates a streamer that plays from a file | entailment |
async def create_stream_player(self, url, opts=ydl_opts):
"""Creates a streamer that plays from a URL"""
self.current_download_elapsed = 0
self.streamer = await self.vclient.create_ytdl_player(url, ytdl_options=opts, after=self.vafter_ts)
self.state = "ready"
await self.setup_streamer()
self.nowplayinglog.debug(self.streamer.title)
self.nowplayingauthorlog.debug(self.streamer.uploader if self.streamer.uploader is not None else "Unknown")
self.current_duration = 0
self.is_live = True
info = self.streamer.yt.extract_info(url, download=False)
self.nowplayingsourcelog.info(api_music.parse_source(info))
play_state = "Streaming" if self.is_live else "Playing"
await self.set_topic("{} {}".format(play_state, self.streamer.title))
self.statuslog.debug(play_state) | Creates a streamer that plays from a URL | entailment |
async def setup_streamer(self):
"""Sets up basic defaults for the streamer"""
self.streamer.volume = self.volume / 100
self.streamer.start()
self.pause_time = None
self.vclient_starttime = self.vclient.loop.time()
# Cache next song
self.logger.debug("Caching next song")
dl_thread = threading.Thread(target=self.download_next_song_cache)
dl_thread.start() | Sets up basic defaults for the streamer | entailment |
def vafter_ts(self):
"""Function that is called after a song finishes playing"""
logger.debug("Song finishing")
future = asyncio.run_coroutine_threadsafe(self.vafter(), client.loop)
try:
future.result()
except Exception as e:
logger.exception(e) | Function that is called after a song finishes playing | entailment |
async def vafter(self):
"""Function that is called after a song finishes playing"""
self.logger.debug("Finished playing a song")
if self.state != 'ready':
self.logger.debug("Returning because player is in state {}".format(self.state))
return
self.pause_time = None
if self.vclient_task:
loop = asyncio.get_event_loop()
loop.call_soon(self.vclient_task.cancel)
self.vclient_task = None
try:
if self.streamer is None:
await self.stop()
return
if self.streamer.error is None:
await self.vplay()
else:
self.statuslog.error(self.streamer.error)
await self.destroy()
except Exception as e:
logger.exception(e)
try:
await self.destroy()
except Exception as e:
logger.exception(e) | Function that is called after a song finishes playing | entailment |
async def on_message(message):
"""The on_message event handler for this module
Args:
message (discord.Message): Input message
"""
# Simplify message info
server = message.server
author = message.author
channel = message.channel
content = message.content
data = datatools.get_data()
if not data["discord"]["servers"][server.id][_data.modulename]["activated"]:
return
# Only reply to server messages and don't reply to myself
if server is not None and author != channel.server.me:
# Retrieve replies from server data
normal_replies = data["discord"]["servers"][server.id][_data.modulename]["normal"]
tts_replies = data["discord"]["servers"][server.id][_data.modulename]["tts"]
# Check normal replies
for r in normal_replies.keys():
if r in content.lower().replace(' ', ''):
await client.send_typing(channel)
await client.send_message(channel, normal_replies[r])
# Check tts replies
for r in tts_replies.keys():
if r in content.lower().replace(' ', ''):
await client.send_typing(channel)
await client.send_message(channel, tts_replies[r]) | The on_message event handler for this module
Args:
message (discord.Message): Input message | entailment |
def build_yt_api():
"""Build the YouTube API for future use"""
data = datatools.get_data()
if "google_api_key" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'google_api_key'")
logger.info("Please add your Google API key with name 'google_api_key' "
"in data.json to use YouTube features of the music module")
return False
logger.debug("Building YouTube discovery API")
ytdevkey = data["discord"]["keys"]["google_api_key"]
try:
global ytdiscoveryapi
ytdiscoveryapi = googleapiclient.discovery.build("youtube", "v3", developerKey=ytdevkey)
logger.debug("YouTube API build successful")
return True
except Exception as e:
logger.exception(e)
logger.warning("HTTP error connecting to YouTube API, YouTube won't be available")
return False | Build the YouTube API for future use | entailment |
def build_sc_api():
"""Build the SoundCloud API for future use"""
data = datatools.get_data()
if "soundcloud_client_id" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'soundcloud_client_id'")
logger.info("Please add your SoundCloud client id with name 'soundcloud_client_id' "
"in data.json to use Soundcloud features of the music module")
return False
try:
global scclient
scclient = soundcloud.Client(client_id=data["discord"]["keys"]["soundcloud_client_id"])
logger.debug("SoundCloud build successful")
return True
except Exception as e:
logger.exception(e)
return False | Build the SoundCloud API for future use | entailment |
def build_spotify_api():
"""Build the Spotify API for future use"""
data = datatools.get_data()
if "spotify_client_id" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'spotify_client_id'")
logger.info("Please add your Spotify client id with name 'spotify_client_id' "
"in data.json to use Spotify features of the music module")
return False
if "spotify_client_secret" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'spotify_client_secret'")
logger.info("Please add your Spotify client secret with name 'spotify_client_secret' "
"in data.json to use Spotify features of the music module")
return False
try:
global spclient
client_credentials_manager = SpotifyClientCredentials(
data["discord"]["keys"]["spotify_client_id"],
data["discord"]["keys"]["spotify_client_secret"])
spclient = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
logger.debug("Spotify build successful")
return True
except Exception as e:
logger.exception(e)
return False | Build the Spotify API for future use | entailment |
def parse_query(query, ilogger):
"""
Gets either a list of videos from a query, parsing links and search queries
and playlists
Args:
query (str): The YouTube search query
ilogger (logging.logger): The logger to log API calls to
Returns:
queue (list): The items obtained from the YouTube search
"""
# Try parsing this as a link
p = urlparse(query)
if p and p.scheme and p.netloc:
if "youtube" in p.netloc and p.query and ytdiscoveryapi is not None:
query_parts = p.query.split('&')
yturl_parts = {}
for q in query_parts:
s = q.split('=')
if len(s) < 2:
continue
q_name = s[0]
q_val = '='.join(s[1:])
# Add to the query
if q_name not in yturl_parts:
yturl_parts[q_name] = q_val
if "list" in yturl_parts:
ilogger.info("Queued YouTube playlist from link")
return get_queue_from_playlist(yturl_parts["list"])
elif "v" in yturl_parts:
ilogger.info("Queued YouTube video from link")
return [["https://www.youtube.com/watch?v={}".format(yturl_parts["v"]), query]]
elif "soundcloud" in p.netloc:
if scclient is None:
ilogger.error("Could not queue from SoundCloud API, using link")
return [[query, query]]
try:
result = scclient.get('/resolve', url=query)
track_list = []
if isinstance(result, ResourceList):
for r in result.data:
tracks = get_sc_tracks(r)
if tracks is not None:
for t in tracks:
track_list.append(t)
elif isinstance(result, Resource):
tracks = get_sc_tracks(result)
if tracks is not None:
for t in tracks:
track_list.append(t)
if track_list is not None and len(track_list) > 0:
ilogger.info("Queued SoundCloud songs from link")
return track_list
else:
ilogger.error("Could not queue from SoundCloud API")
return [[query, query]]
except Exception as e:
logger.exception(e)
ilogger.error("Could not queue from SoundCloud API, using link")
return [[query, query]]
else:
ilogger.debug("Using url: {}".format(query))
return [[query, query]]
args = query.split(' ')
if len(args) == 0:
ilogger.error("No query given")
return []
if args[0].lower() in ["sp", "spotify"] and spclient is not None:
if spclient is None:
ilogger.error("Host does not support Spotify")
return []
try:
if len(args) > 2 and args[1] in ['album', 'artist', 'song', 'track', 'playlist']:
query_type = args[1].lower()
query_search = ' '.join(args[2:])
else:
query_type = 'track'
query_search = ' '.join(args[1:])
query_type = query_type.replace('song', 'track')
ilogger.info("Queueing Spotify {}: {}".format(query_type, query_search))
spotify_tracks = search_sp_tracks(query_type, query_search)
if spotify_tracks is None or len(spotify_tracks) == 0:
ilogger.error("Could not queue Spotify {}: {}".format(query_type, query_search))
return []
ilogger.info("Queued Spotify {}: {}".format(query_type, query_search))
return spotify_tracks
except Exception as e:
logger.exception(e)
ilogger.error("Error queueing from Spotify")
return []
elif args[0].lower() in ["sc", "soundcloud"]:
if scclient is None:
ilogger.error("Host does not support SoundCloud")
return []
try:
requests = ['song', 'songs', 'track', 'tracks', 'user', 'playlist', 'tagged', 'genre']
if len(args) > 2 and args[1] in requests:
query_type = args[1].lower()
query_search = ' '.join(args[2:])
else:
query_type = 'track'
query_search = ' '.join(args[1:])
query_type = query_type.replace('song', 'track')
ilogger.info("Queueing SoundCloud {}: {}".format(query_type, query_search))
soundcloud_tracks = search_sc_tracks(query_type, query_search)
ilogger.info("Queued SoundCloud {}: {}".format(query_type, query_search))
return soundcloud_tracks
except Exception as e:
logger.exception(e)
ilogger.error("Could not queue from SoundCloud")
return []
elif args[0].lower() in ["yt", "youtube"] and ytdiscoveryapi is not None:
if ytdiscoveryapi is None:
ilogger.error("Host does not support YouTube")
return []
try:
query_search = ' '.join(args[1:])
ilogger.info("Queued Youtube search: {}".format(query_search))
return get_ytvideos(query_search, ilogger)
except Exception as e:
logger.exception(e)
ilogger.error("Could not queue YouTube search")
return []
if ytdiscoveryapi is not None:
ilogger.info("Queued YouTube search: {}".format(query))
return get_ytvideos(query, ilogger)
else:
ilogger.error("Host does not support YouTube".format(query))
return [] | Gets either a list of videos from a query, parsing links and search queries
and playlists
Args:
query (str): The YouTube search query
ilogger (logging.logger): The logger to log API calls to
Returns:
queue (list): The items obtained from the YouTube search | entailment |
def get_ytvideos(query, ilogger):
"""
Gets either a list of videos from a playlist or a single video, using the
first result of a YouTube search
Args:
query (str): The YouTube search query
ilogger (logging.logger): The logger to log API calls to
Returns:
queue (list): The items obtained from the YouTube search
"""
queue = []
# Search YouTube
search_result = ytdiscoveryapi.search().list(
q=query,
part="id,snippet",
maxResults=1,
type="video,playlist"
).execute()
if not search_result["items"]:
return []
# Get video/playlist title
title = search_result["items"][0]["snippet"]["title"]
ilogger.info("Queueing {}".format(title))
# Queue video if video
if search_result["items"][0]["id"]["kind"] == "youtube#video":
# Get ID of video
videoid = search_result["items"][0]["id"]["videoId"]
# Append video to queue
queue.append(["https://www.youtube.com/watch?v={}".format(videoid), title])
# Queue playlist if playlist
elif search_result["items"][0]["id"]["kind"] == "youtube#playlist":
queue = get_queue_from_playlist(search_result["items"][0]["id"]["playlistId"])
return queue | Gets either a list of videos from a playlist or a single video, using the
first result of a YouTube search
Args:
query (str): The YouTube search query
ilogger (logging.logger): The logger to log API calls to
Returns:
queue (list): The items obtained from the YouTube search | entailment |
def duration_to_string(duration):
"""
Converts a duration to a string
Args:
duration (int): The duration in seconds to convert
Returns s (str): The duration as a string
"""
m, s = divmod(duration, 60)
h, m = divmod(m, 60)
return "%d:%02d:%02d" % (h, m, s) | Converts a duration to a string
Args:
duration (int): The duration in seconds to convert
Returns s (str): The duration as a string | entailment |
def parse_source(info):
"""
Parses the source info from an info dict generated by youtube-dl
Args:
info (dict): The info dict to parse
Returns:
source (str): The source of this song
"""
if "extractor_key" in info:
source = info["extractor_key"]
lower_source = source.lower()
for key in SOURCE_TO_NAME:
lower_key = key.lower()
if lower_source == lower_key:
source = SOURCE_TO_NAME[lower_key]
if source != "Generic":
return source
if "url" in info and info["url"] is not None:
p = urlparse(info["url"])
if p and p.netloc:
return p.netloc
return "Unknown" | Parses the source info from an info dict generated by youtube-dl
Args:
info (dict): The info dict to parse
Returns:
source (str): The source of this song | entailment |
def flipcheck(content):
"""Checks a string for anger and soothes said anger
Args:
content (str): The message to be flipchecked
Returns:
putitback (str): The righted table or text
"""
# Prevent tampering with flip
punct = """!"#$%&'*+,-./:;<=>?@[\]^_`{|}~ ━─"""
tamperdict = str.maketrans('', '', punct)
tamperproof = content.translate(tamperdict)
# Unflip
if "(╯°□°)╯︵" in tamperproof:
# For tables
if "┻┻" in tamperproof:
# Calculate table length
length = 0
for letter in content:
if letter == "━":
length += 1.36
elif letter == "─":
length += 1
elif letter == "-":
length += 0.50
# Construct table
putitback = "┬"
for i in range(int(length)):
putitback += "─"
putitback += "┬ ノ( ゜-゜ノ)"
return putitback
# For text
else:
# Create dictionary for flipping text
flipdict = str.maketrans(
'abcdefghijklmnopqrstuvwxyzɐqɔpǝɟbɥıظʞןɯuodbɹsʇnʌʍxʎz😅🙃😞😟😠😡☹🙁😱😨😰😦😧😢😓😥😭',
'ɐqɔpǝɟbɥıظʞןɯuodbɹsʇnʌʍxʎzabcdefghijklmnopqrstuvwxyz😄🙂🙂🙂🙂🙂🙂😀😀🙂😄🙂🙂😄😄😄😁'
)
# Construct flipped text
flipstart = content.index('︵')
flipped = content[flipstart+1:]
flipped = str.lower(flipped).translate(flipdict)
putitback = ''.join(list(reversed(list(flipped))))
putitback += "ノ( ゜-゜ノ)"
return putitback
else:
return False | Checks a string for anger and soothes said anger
Args:
content (str): The message to be flipchecked
Returns:
putitback (str): The righted table or text | entailment |
def get_botcust2():
"""Gets a botcust2, used to identify a speaker with Mitsuku
Returns:
botcust2 (str): The botcust2 identifier
"""
logger.debug("Getting new botcust2")
# Set up http request packages
params = {
'botid': 'f6a012073e345a08',
'amp;skin': 'chat'
}
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, sdch, br',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive',
'DNT': '1',
'Host': 'kakko.pandorabots.com',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/58.0.3029.110 Safari/537.36'
}
# Get response from http POST request to url
logger.debug("Sending POST request")
response = requests.post(
url,
params=params,
headers=headers
)
logger.debug("POST response {}".format(response))
# Try to extract Mitsuku response from POST response
try:
result = response.headers['set-cookie'][9:25]
logger.debug("Getting botcust2 successful")
except IndexError:
result = False
logger.critical("Getting botcust2 from html failed")
return result | Gets a botcust2, used to identify a speaker with Mitsuku
Returns:
botcust2 (str): The botcust2 identifier | entailment |
def query(botcust2, message):
"""Sends a message to Mitsuku and retrieves the reply
Args:
botcust2 (str): The botcust2 identifier
message (str): The message to send to Mitsuku
Returns:
reply (str): The message Mitsuku sent back
"""
logger.debug("Getting Mitsuku reply")
# Set up http request packages
params = {
'botid': 'f6a012073e345a08',
'amp;skin': 'chat'
}
headers = {
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.8',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
'Content-Length': str(len(message) + 34),
'Content-Type': 'application/x-www-form-urlencoded',
'Cookie': 'botcust2=' + botcust2,
'DNT': '1',
'Host': 'kakko.pandorabots.com',
'Origin': 'https://kakko.pandorabots.com',
'Referer': 'https://kakko.pandorabots.com/pandora/talk?botid=f6a012073e345a08&skin=chat',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/58.0.3029.110 Safari/537.36'
}
data = {
'botcust2': botcust2,
'message': message
}
# Get response from http POST request to url
logger.debug("Sending POST request")
response = requests.post(
url,
params=params,
headers=headers,
data=data
)
logger.debug("POST response {}".format(response))
# Parse response
parsed = lxml.html.parse(io.StringIO(response.text)).getroot()
try:
result = parsed[1][2][0][2].tail[1:]
logger.debug("Getting botcust2 successful")
except IndexError:
result = False
logger.critical("Getting botcust2 from html failed")
return result | Sends a message to Mitsuku and retrieves the reply
Args:
botcust2 (str): The botcust2 identifier
message (str): The message to send to Mitsuku
Returns:
reply (str): The message Mitsuku sent back | entailment |
async def on_message(message):
"""The on_message event handler for this module
Args:
message (discord.Message): Input message
"""
# Simplify message info
server = message.server
author = message.author
channel = message.channel
content = message.content
data = datatools.get_data()
if not data["discord"]["servers"][server.id][_data.modulename]["activated"]:
return
# Only reply to server messages and don't reply to myself
if server is not None and author != channel.server.me:
# Commands section
prefix = data["discord"]["servers"][server.id]["prefix"]
if content.startswith(prefix):
# Parse message
package = content.split(" ")
command = package[0][len(prefix):]
args = package[1:]
arg = ' '.join(args)
# Lock on to server if not yet locked
if server.id not in _data.cache or _data.cache[server.id].state == 'destroyed':
_data.cache[server.id] = _musicplayer.MusicPlayer(server.id)
# Remove message
if command in ['play', 'playnext', 'playnow', 'playshuffle', 'insert',
'pause', 'resume', 'skip', 'remove',
'rewind', 'restart', 'shuffle', 'volume',
'stop', 'destroy', 'front', 'movehere',
'settopic', 'cleartopic', 'notopic', 'loop']:
try:
await client.delete_message(message)
except discord.errors.NotFound:
logger.warning("Could not delete music player command message - NotFound")
except discord.errors.Forbidden:
logger.warning("Could not delete music player command message - Forbidden")
# Commands
if command == 'play':
await _data.cache[server.id].play(author, channel, arg)
if command == 'playnext':
await _data.cache[server.id].play(author, channel, arg, index=1)
if command == 'playnow':
await _data.cache[server.id].play(author, channel, arg, index=1, stop_current=True)
if command == 'playshuffle':
await _data.cache[server.id].play(author, channel, arg, shuffle=True)
if command == 'insert':
if len(args) >= 2:
index = args[0]
query = ' '.join(args[1:])
await _data.cache[server.id].play(author, channel, query, index=index)
else:
await _data.cache[server.id].play(author, channel, arg)
elif command == 'pause':
await _data.cache[server.id].pause()
elif command == 'resume':
await _data.cache[server.id].resume()
elif command == 'skip':
await _data.cache[server.id].skip(query=arg)
elif command == 'remove':
await _data.cache[server.id].remove(index=arg)
elif command == 'rewind':
await _data.cache[server.id].rewind(query=arg)
elif command == 'restart':
await _data.cache[server.id].rewind(query="0")
elif command == 'shuffle':
await _data.cache[server.id].shuffle()
elif command == 'loop':
await _data.cache[server.id].set_loop(arg)
elif command == 'stop':
await _data.cache[server.id].stop(log_stop=True)
elif command == 'volume':
await _data.cache[server.id].setvolume(arg)
elif command == 'settopic':
await _data.cache[server.id].set_topic_channel(channel)
elif command == 'cleartopic' or command == 'notopic':
await _data.cache[server.id].clear_topic_channel(channel)
elif command == 'nowplaying':
await _data.cache[server.id].nowplaying_info(channel)
elif command == 'destroy':
await _data.cache[server.id].destroy()
elif command == 'front' or command == 'movehere':
await _data.cache[server.id].movehere(channel) | The on_message event handler for this module
Args:
message (discord.Message): Input message | entailment |
def start(token, client_id, loop, on_ready_handler=None):
"""Start the Discord client and log Modis into Discord."""
import discord
import asyncio
# Create client
logger.debug("Creating Discord client")
asyncio.set_event_loop(loop)
client = discord.Client()
from . import _client
_client.client = client
from .. import datatools
if datatools.has_data():
data = datatools.get_data()
else:
# Create a blank data file
data = {"discord": {}}
# Save default server info to data
if "servers" not in data["discord"]:
data["discord"]["servers"] = {}
# Save default key info to data
if "keys" not in data["discord"]:
data["discord"]["keys"] = {}
# Save logger info to data
if "log_level" not in data:
data["log_level"] = "DEBUG"
data["discord"]["token"] = token
data["discord"]["client_id"] = client_id
datatools.write_data(data)
# Import event handlers
logger.debug("Importing event handlers")
event_handlers = _get_event_handlers()
# Create event handler combiner
logger.debug("Compiling event handlers")
def create_event_handler(event_handler_type):
async def func(*args, **kwargs):
for module_event_handler in event_handlers[event_handler_type]:
# Check for errors in the module event
try:
module_event_handler_func = getattr(module_event_handler,
event_handler_type)
await module_event_handler_func(*args, **kwargs)
except Exception as module_exception:
logger.error("An error occured in '{}'".format(module_event_handler))
logger.exception(module_exception)
if on_ready_handler is not None and event_handler_type == "on_ready":
await on_ready_handler()
func.__name__ = event_handler_type
return func
# Register event handlers
logger.debug("Registering event handlers into client")
for event_handler in event_handlers.keys():
client.event(create_event_handler(event_handler))
# Run the client loop
logger.info("Connecting to Discord")
try:
client.loop.run_until_complete(client.login(token))
except Exception as e:
logger.exception(e)
logger.critical("Could not connect to Discord")
else:
logger.debug("Running the bot")
try:
client.loop.run_until_complete(client.connect())
except KeyboardInterrupt:
client.loop.run_until_complete(client.logout())
pending = asyncio.Task.all_tasks(loop=client.loop)
gathered = asyncio.gather(*pending, loop=client.loop)
try:
gathered.cancel()
client.loop.run_until_complete(gathered)
# we want to retrieve any exceptions to make sure that
# they don't nag us about it being un-retrieved.
gathered.exception()
except Exception as e:
logger.exception(e)
except Exception as e:
logger.exception(e)
pending = asyncio.Task.all_tasks(loop=client.loop)
gathered = asyncio.gather(*pending, loop=client.loop)
gathered.exception()
finally:
try:
client.loop.run_until_complete(client.logout())
except Exception as e:
logger.exception(e)
logger.critical("Bot stopped\n")
client.loop.close() | Start the Discord client and log Modis into Discord. | entailment |
def _get_event_handlers():
"""
Gets dictionary of event handlers and the modules that define them
Returns:
event_handlers (dict): Contains "all", "on_ready", "on_message", "on_reaction_add", "on_error"
"""
import os
import importlib
event_handlers = {
"on_ready": [],
"on_resume": [],
"on_error": [],
"on_message": [],
"on_socket_raw_receive": [],
"on_socket_raw_send": [],
"on_message_delete": [],
"on_message_edit": [],
"on_reaction_add": [],
"on_reaction_remove": [],
"on_reaction_clear": [],
"on_channel_delete": [],
"on_channel_create": [],
"on_channel_update": [],
"on_member_join": [],
"on_member_remove": [],
"on_member_update": [],
"on_server_join": [],
"on_server_remove": [],
"on_server_update": [],
"on_server_role_create": [],
"on_server_role_delete": [],
"on_server_role_update": [],
"on_server_emojis_update": [],
"on_server_available": [],
"on_server_unavailable": [],
"on_voice_state_update": [],
"on_member_ban": [],
"on_member_unban": [],
"on_typing": [],
"on_group_join": [],
"on_group_remove": []
}
# Iterate through module folders
database_dir = "{}/modules".format(
os.path.dirname(os.path.realpath(__file__)))
for module_name in os.listdir(database_dir):
module_dir = "{}/{}".format(database_dir, module_name)
# Iterate through files in module
if os.path.isdir(module_dir) and not module_name.startswith("_"):
# Add all defined event handlers in module files
module_event_handlers = os.listdir(module_dir)
for event_handler in event_handlers.keys():
if "{}.py".format(event_handler) in module_event_handlers:
import_name = ".discord_modis.modules.{}.{}".format(
module_name, event_handler)
logger.debug("Found event handler {}".format(import_name[23:]))
try:
event_handlers[event_handler].append(
importlib.import_module(import_name, "modis"))
except Exception as e:
# Log errors in modules
logger.exception(e)
return event_handlers | Gets dictionary of event handlers and the modules that define them
Returns:
event_handlers (dict): Contains "all", "on_ready", "on_message", "on_reaction_add", "on_error" | entailment |
def add_api_key(key, value):
"""
Adds a key to the bot's data
Args:
key: The name of the key to add
value: The value for the key
"""
if key is None or key == "":
logger.error("Key cannot be empty")
if value is None or value == "":
logger.error("Value cannot be empty")
from .. import datatools
data = datatools.get_data()
if "keys" not in data["discord"]:
data["discord"]["keys"] = {}
is_key_new = False
if key not in data["discord"]["keys"]:
is_key_new = True
elif data["discord"]["keys"][key] == value:
logger.info("API key '{}' already has value '{}'".format(key, value))
return
data["discord"]["keys"][key] = value
datatools.write_data(data)
key_text = "added" if is_key_new else "updated"
logger.info("API key '{}' {} with value '{}'".format(key, key_text, value)) | Adds a key to the bot's data
Args:
key: The name of the key to add
value: The value for the key | entailment |
def success(channel, title, datapacks):
"""
Creates an embed UI containing the help message
Args:
channel (discord.Channel): The Discord channel to bind the embed to
title (str): The title of the embed
datapacks (list): The hex value
Returns:
ui (ui_embed.UI): The embed UI object
"""
# Create embed UI object
gui = ui_embed.UI(
channel,
title,
"",
modulename=modulename,
datapacks=datapacks
)
return gui | Creates an embed UI containing the help message
Args:
channel (discord.Channel): The Discord channel to bind the embed to
title (str): The title of the embed
datapacks (list): The hex value
Returns:
ui (ui_embed.UI): The embed UI object | entailment |
def http_exception(channel, title):
"""
Creates an embed UI containing the 'too long' error message
Args:
channel (discord.Channel): The Discord channel to bind the embed to
title (str): The title of the embed
Returns:
ui (ui_embed.UI): The embed UI object
"""
# Create embed UI object
gui = ui_embed.UI(
channel,
"Too much help",
"{} is too helpful! Try trimming some of the help messages.".format(title),
modulename=modulename
)
return gui | Creates an embed UI containing the 'too long' error message
Args:
channel (discord.Channel): The Discord channel to bind the embed to
title (str): The title of the embed
Returns:
ui (ui_embed.UI): The embed UI object | entailment |
def tarbell_configure(command, args):
"""
Tarbell configuration routine.
"""
puts("Configuring Tarbell. Press ctrl-c to bail out!")
# Check if there's settings configured
settings = Settings()
path = settings.path
prompt = True
if len(args):
prompt = False
config = _get_or_create_config(path)
if prompt or "drive" in args:
config.update(_setup_google_spreadsheets(config, path, prompt))
if prompt or "s3" in args:
config.update(_setup_s3(config, path, prompt))
if prompt or "path" in args:
config.update(_setup_tarbell_project_path(config, path, prompt))
if prompt or "templates" in args:
if "project_templates" in config:
override_templates = raw_input("\nFound Base Template config. Would you like to override them? [Default: No, 'none' to skip]")
if override_templates and override_templates != "No" and override_templates != "no" and override_templates != "N" and override_templates != "n":
config.update(_setup_default_templates(config, path, prompt))
else:
puts("\nPreserving Base Template config...")
else:
config.update(_setup_default_templates(config, path, prompt))
settings.config = config
with open(path, 'w') as f:
puts("\nWriting {0}".format(colored.green(path)))
settings.save()
if all:
puts("\n- Done configuring Tarbell. Type `{0}` for help.\n"
.format(colored.green("tarbell")))
return settings | Tarbell configuration routine. | entailment |
def _get_or_create_config(path, prompt=True):
"""
Get or create a Tarbell configuration directory.
"""
dirname = os.path.dirname(path)
filename = os.path.basename(path)
try:
os.makedirs(dirname)
except OSError:
pass
try:
with open(path, 'r+') as f:
if os.path.isfile(path):
puts("{0} already exists, backing up".format(colored.green(path)))
_backup(dirname, filename)
return yaml.load(f)
except IOError:
return {} | Get or create a Tarbell configuration directory. | entailment |
def _setup_google_spreadsheets(settings, path, prompt=True):
"""
Set up a Google spreadsheet.
"""
ret = {}
if prompt:
use = raw_input("\nWould you like to use Google spreadsheets [Y/n]? ")
if use.lower() != "y" and use != "":
return settings
dirname = os.path.dirname(path)
path = os.path.join(dirname, "client_secrets.json")
write_secrets = True
if os.path.isfile(path):
write_secrets_input = raw_input("client_secrets.json already exists. Would you like to overwrite it? [y/N] ")
if not write_secrets_input.lower().startswith('y'):
write_secrets = False
if write_secrets:
puts(("\nLogin in to Google and go to {0} to create an app and generate a "
"\nclient_secrets authentication file. You should create credentials for an `installed app`. See "
"\n{1} for more information."
.format(colored.red("https://console.developers.google.com/project"),
colored.red("http://tarbell.readthedocs.org/en/{0}/install.html#configure-google-spreadsheet-access-optional".format(LONG_VERSION))
)
))
secrets_path = raw_input(("\nWhere is your client secrets file? "
"[~/Downloads/client_secrets.json] "
))
if secrets_path == "":
secrets_path = os.path.join("~", "Downloads/client_secrets.json")
secrets_path = os.path.expanduser(secrets_path)
puts("\nCopying {0} to {1}\n"
.format(colored.green(secrets_path),
colored.green(dirname))
)
_backup(dirname, "client_secrets.json")
try:
shutil.copy(secrets_path, os.path.join(dirname, 'client_secrets.json'))
except shutil.Error as e:
show_error(str(e))
# Now, try and obtain the API for the first time
get_api = raw_input("Would you like to authenticate your client_secrets.json? [Y/n] ")
if get_api == '' or get_api.lower().startswith('y'):
get_drive_api_from_client_secrets(path, reset_creds=True)
default_account = settings.get("google_account", "")
account = raw_input(("What Google account(s) should have access to new spreadsheets? "
"(e.g. somebody@gmail.com, leave blank to specify for each new "
"project, separate multiple addresses with commas) [{0}] "
.format(default_account)
))
if default_account != "" and account == "":
account = default_account
if account != "":
ret = { "google_account" : account }
puts("\n- Done configuring Google spreadsheets.")
return ret | Set up a Google spreadsheet. | entailment |
def _setup_s3(settings, path, prompt=True):
"""
Prompt user to set up Amazon S3.
"""
ret = {'default_s3_buckets': {}, 's3_credentials': settings.get('s3_credentials', {})}
if prompt:
use = raw_input("\nWould you like to set up Amazon S3? [Y/n] ")
if use.lower() != "y" and use != "":
puts("\n- Not configuring Amazon S3.")
return ret
existing_access_key = settings.get('default_s3_access_key_id', None) or \
os.environ.get('AWS_ACCESS_KEY_ID', None)
existing_secret_key = settings.get('default_s3_secret_access_key', None) or \
os.environ.get('AWS_SECRET_ACCESS_KEY', None)
access_key_prompt = "\nPlease enter your default Amazon Access Key ID:"
if existing_access_key:
access_key_prompt += ' [%s] ' % existing_access_key
else:
access_key_prompt += ' (leave blank to skip) '
default_aws_access_key_id = raw_input(access_key_prompt)
if default_aws_access_key_id == '' and existing_access_key:
default_aws_access_key_id = existing_access_key
if default_aws_access_key_id:
secret_key_prompt = "\nPlease enter your default Amazon Secret Access Key:"
if existing_secret_key:
secret_key_prompt += ' [%s] ' % existing_secret_key
else:
secret_key_prompt += ' (leave blank to skip) '
default_aws_secret_access_key = raw_input(secret_key_prompt)
if default_aws_secret_access_key == '' and existing_secret_key:
default_aws_secret_access_key = existing_secret_key
ret.update({
'default_s3_access_key_id': default_aws_access_key_id,
'default_s3_secret_access_key': default_aws_secret_access_key,
})
# If we're all set with AWS creds, we can setup our default
# staging and production buckets
if default_aws_access_key_id and default_aws_secret_access_key:
existing_staging_bucket = None
existing_production_bucket = None
if settings.get('default_s3_buckets'):
existing_staging_bucket = settings['default_s3_buckets'].get('staging', None)
existing_production_bucket = settings['default_s3_buckets'].get('production', None)
staging_prompt = "\nWhat is your default staging bucket?"
if existing_staging_bucket:
staging_prompt += ' [%s] ' % existing_staging_bucket
else:
staging_prompt += ' (e.g. apps.beta.myorg.com, leave blank to skip) '
staging = raw_input(staging_prompt)
if staging == '' and existing_staging_bucket:
staging = existing_staging_bucket
if staging != "":
ret['default_s3_buckets'].update({
'staging': staging,
})
production_prompt = "\nWhat is your default production bucket?"
if existing_production_bucket:
production_prompt += ' [%s] ' % existing_production_bucket
else:
production_prompt += ' (e.g. apps.myorg.com, leave blank to skip) '
production = raw_input(production_prompt)
if production == '' and existing_production_bucket:
production = existing_production_bucket
if production != "":
ret['default_s3_buckets'].update({
'production': production,
})
more_prompt = "\nWould you like to add additional buckets and credentials? [y/N] "
while raw_input(more_prompt).lower() == 'y':
## Ask for a uri
additional_s3_bucket = raw_input(
"\nPlease specify an additional bucket (e.g. "
"additional.bucket.myorg.com/, leave blank to skip adding bucket) ")
if additional_s3_bucket == "":
continue
## Ask for an access key, if it differs from the default
additional_access_key_prompt = "\nPlease specify an AWS Access Key ID for this bucket:"
if default_aws_access_key_id:
additional_access_key_prompt += ' [%s] ' % default_aws_access_key_id
else:
additional_access_key_prompt += ' (leave blank to skip adding bucket) '
additional_aws_access_key_id = raw_input(additional_access_key_prompt)
if additional_aws_access_key_id == "" and default_aws_access_key_id:
additional_aws_access_key_id = default_aws_access_key_id
elif additional_aws_access_key_id == "":
continue
# Ask for a secret key, if it differs from default
additional_secret_key_prompt = "\nPlease specify an AWS Secret Access Key for this bucket:"
if default_aws_secret_access_key:
additional_secret_key_prompt += ' [%s] ' % default_aws_secret_access_key
else:
additional_secret_key_prompt += ' (leave blank to skip adding bucket) '
additional_aws_secret_access_key = raw_input(
additional_secret_key_prompt)
if additional_aws_secret_access_key == "" and default_aws_secret_access_key:
additional_aws_secret_access_key = default_aws_secret_access_key
elif additional_aws_secret_access_key == "":
continue
ret['s3_credentials'][additional_s3_bucket] = {
'access_key_id': additional_aws_access_key_id,
'secret_access_key': additional_aws_secret_access_key,
}
puts("\n- Done configuring Amazon S3.")
return ret | Prompt user to set up Amazon S3. | entailment |
def _setup_tarbell_project_path(settings, path, prompt=True):
"""
Prompt user to set up project path.
"""
default_path = os.path.expanduser(os.path.join("~", "tarbell"))
projects_path = raw_input("\nWhat is your Tarbell projects path? [Default: {0}, 'none' to skip] ".format(default_path))
if projects_path == "":
projects_path = default_path
if projects_path.lower() == 'none':
puts("\n- Not creating projects directory.")
return {}
if os.path.isdir(projects_path):
puts("\nDirectory exists!")
else:
puts("\nDirectory does not exist.")
make = raw_input("\nWould you like to create it? [Y/n] ")
if make.lower() == "y" or not make:
os.makedirs(projects_path)
puts("\nProjects path is {0}".format(projects_path))
puts("\n- Done setting up projects path.")
return {"projects_path": projects_path} | Prompt user to set up project path. | entailment |
def _setup_default_templates(settings, path, prompt=True):
"""
Add some (hardcoded) default templates.
"""
project_templates = [{
"name": "Basic Bootstrap 3 template",
"url": "https://github.com/tarbell-project/tarbell-template",
}, {
"name": "Searchable map template",
"url": "https://github.com/tarbell-project/tarbell-map-template",
}, {
"name": "Tarbell template walkthrough",
"url": "https://github.com/tarbell-project/tarbell-tutorial-template",
}]
for project in project_templates:
puts("+ Adding {0} ({1})".format(project["name"], project["url"]))
puts("\n- Done configuring project templates.")
return {"project_templates": project_templates} | Add some (hardcoded) default templates. | entailment |
def _backup(path, filename):
"""
Backup a file.
"""
target = os.path.join(path, filename)
if os.path.isfile(target):
dt = datetime.now()
new_filename = ".{0}.{1}.{2}".format(
filename, dt.isoformat(), "backup"
)
destination = os.path.join(path, new_filename)
puts("- Backing up {0} to {1}".format(
colored.cyan(target),
colored.cyan(destination)
))
shutil.copy(target, destination) | Backup a file. | entailment |
def slughifi(value, overwrite_char_map={}):
"""
High Fidelity slugify - slughifi.py, v 0.1
Examples :
>>> text = 'C\'est déjà l\'été.'
>>> slughifi(text)
'cest-deja-lete'
>>> slughifi(text, overwrite_char_map={u'\': '-',})
'c-est-deja-l-ete'
>>> slughifi(text, do_slugify=False)
"C'est deja l'ete."
# Normal slugify removes accented characters
>>> slugify(text)
'cest-dj-lt'
"""
# unicodification
if type(value) != text_type:
value = value.decode('utf-8', 'ignore')
# overwrite chararcter mapping
char_map.update(overwrite_char_map)
# try to replace chars
value = re.sub('[^a-zA-Z0-9\\s\\-]{1}', replace_char, value)
value = slugify(value)
return value.encode('ascii', 'ignore').decode('ascii') | High Fidelity slugify - slughifi.py, v 0.1
Examples :
>>> text = 'C\'est déjà l\'été.'
>>> slughifi(text)
'cest-deja-lete'
>>> slughifi(text, overwrite_char_map={u'\': '-',})
'c-est-deja-l-ete'
>>> slughifi(text, do_slugify=False)
"C'est deja l'ete."
# Normal slugify removes accented characters
>>> slugify(text)
'cest-dj-lt' | entailment |
def puts(s='', newline=True, stream=STDOUT):
"""
Wrap puts to avoid getting called twice by Werkzeug reloader.
"""
if not is_werkzeug_process():
try:
return _puts(s, newline, stream)
except UnicodeEncodeError:
return _puts(s.encode(sys.stdout.encoding), newline, stream) | Wrap puts to avoid getting called twice by Werkzeug reloader. | entailment |
def list_get(l, idx, default=None):
"""
Get from a list with an optional default value.
"""
try:
if l[idx]:
return l[idx]
else:
return default
except IndexError:
return default | Get from a list with an optional default value. | entailment |
def split_sentences(s, pad=0):
"""
Split sentences for formatting.
"""
sentences = []
for index, sentence in enumerate(s.split('. ')):
padding = ''
if index > 0:
padding = ' ' * (pad + 1)
if sentence.endswith('.'):
sentence = sentence[:-1]
sentences.append('%s %s.' % (padding, sentence.strip()))
return "\n".join(sentences) | Split sentences for formatting. | entailment |
def ensure_directory(path):
"""
Ensure directory exists for a given file path.
"""
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname) | Ensure directory exists for a given file path. | entailment |
def show_error(msg):
"""
Displays error message.
"""
sys.stdout.flush()
sys.stderr.write("\n{0!s}: {1}".format(colored.red("Error"), msg + '\n')) | Displays error message. | entailment |
def add_stream(self, policy):
"""
Add a stream to the SRTP session, applying the given `policy`
to the stream.
:param policy: :class:`Policy`
"""
_srtp_assert(lib.srtp_add_stream(self._srtp[0], policy._policy)) | Add a stream to the SRTP session, applying the given `policy`
to the stream.
:param policy: :class:`Policy` | entailment |
def remove_stream(self, ssrc):
"""
Remove the stream with the given `ssrc` from the SRTP session.
:param ssrc: :class:`int`
"""
_srtp_assert(lib.srtp_remove_stream(self._srtp[0], htonl(ssrc))) | Remove the stream with the given `ssrc` from the SRTP session.
:param ssrc: :class:`int` | entailment |
def process_xlsx(content):
"""
Turn Excel file contents into Tarbell worksheet data
"""
data = {}
workbook = xlrd.open_workbook(file_contents=content)
worksheets = [w for w in workbook.sheet_names() if not w.startswith('_')]
for worksheet_name in worksheets:
if worksheet_name.startswith('_'):
continue
worksheet = workbook.sheet_by_name(worksheet_name)
merged_cells = worksheet.merged_cells
if len(merged_cells):
raise MergedCellError(worksheet.name, merged_cells)
worksheet.name = slughifi(worksheet.name)
headers = make_headers(worksheet)
worksheet_data = make_worksheet_data(headers, worksheet)
data[worksheet.name] = worksheet_data
return data | Turn Excel file contents into Tarbell worksheet data | entailment |
def copy_global_values(data):
"""
Copy values worksheet into global namespace.
"""
for k, v in data['values'].items():
if not data.get(k):
data[k] = v
else:
puts("There is both a worksheet and a "
"value named '{0}'. The worksheet data "
"will be preserved.".format(k))
data.pop("values", None)
return data | Copy values worksheet into global namespace. | entailment |
def make_headers(worksheet):
"""
Make headers from worksheet
"""
headers = {}
cell_idx = 0
while cell_idx < worksheet.ncols:
cell_type = worksheet.cell_type(0, cell_idx)
if cell_type == 1:
header = slughifi(worksheet.cell_value(0, cell_idx))
if not header.startswith("_"):
headers[cell_idx] = header
cell_idx += 1
return headers | Make headers from worksheet | entailment |
def make_worksheet_data(headers, worksheet):
"""
Make data from worksheet
"""
data = []
row_idx = 1
while row_idx < worksheet.nrows:
cell_idx = 0
row_dict = {}
while cell_idx < worksheet.ncols:
cell_type = worksheet.cell_type(row_idx, cell_idx)
if cell_type in VALID_CELL_TYPES:
cell_value = worksheet.cell_value(row_idx, cell_idx)
try:
if cell_type == 2 and cell_value.is_integer():
cell_value = int(cell_value)
row_dict[headers[cell_idx]] = cell_value
except KeyError:
try:
column = ascii_uppercase[cell_idx]
except IndexError:
column = cell_idx
puts("There is no header for cell with value '{0}' in column '{1}' of '{2}'" .format(
cell_value, column, worksheet.name
))
cell_idx += 1
data.append(row_dict)
row_idx += 1
# Magic key handling
if 'key' in headers.values():
keyed_data = {}
for row in data:
if 'key' in row.keys():
key = slughifi(row['key'])
if keyed_data.get(key):
puts("There is already a key named '{0}' with value "
"'{1}' in '{2}'. It is being overwritten with "
"value '{3}'.".format(key,
keyed_data.get(key),
worksheet.name,
row))
# Magic values worksheet
if worksheet.name == "values":
value = row.get('value')
if value not in ("", None):
keyed_data[key] = value
else:
keyed_data[key] = row
data = keyed_data
return data | Make data from worksheet | entailment |
def never_cache_preview(self, response):
"""
Ensure preview is never cached
"""
response.cache_control.max_age = 0
response.cache_control.no_cache = True
response.cache_control.must_revalidate = True
response.cache_control.no_store = True
return response | Ensure preview is never cached | entailment |
def call_hook(self, hook, *args, **kwargs):
"""
Calls each registered hook
"""
for function in self.hooks[hook]:
function.__call__(*args, **kwargs) | Calls each registered hook | entailment |
def _get_base(self, path):
"""
Get project blueprint
"""
base = None
# Slightly ugly DRY violation for backwards compatibility with old
# "_base" convention
if os.path.isdir(os.path.join(path, "_blueprint")):
base_dir = os.path.join(path, "_blueprint/")
# Get the blueprint template and register it as a blueprint
if os.path.exists(os.path.join(base_dir, "blueprint.py")):
filename, pathname, description = imp.find_module('blueprint', [base_dir])
base = imp.load_module('blueprint', filename, pathname, description)
self.blueprint_name = "_blueprint"
else:
puts("No _blueprint/blueprint.py file found")
elif os.path.isdir(os.path.join(path, "_base")):
puts("Using old '_base' convention")
base_dir = os.path.join(path, "_base/")
if os.path.exists(os.path.join(base_dir, "base.py")):
filename, pathname, description = imp.find_module('base', [base_dir])
base = imp.load_module('base', filename, pathname, description)
self.blueprint_name = "_base"
else:
puts("No _base/base.py file found")
if base:
base.base_dir = base_dir
if hasattr(base, 'blueprint') and isinstance(base.blueprint, Blueprint):
self.app.register_blueprint(base.blueprint, site=self)
return base | Get project blueprint | entailment |
def load_project(self, path):
"""
Load a Tarbell project
"""
base = self._get_base(path)
filename, pathname, description = imp.find_module('tarbell_config', [path])
project = imp.load_module('project', filename, pathname, description)
try:
self.key = project.SPREADSHEET_KEY
self.client = get_drive_api()
except AttributeError:
self.key = None
self.client = None
try:
project.CREATE_JSON
except AttributeError:
project.CREATE_JSON = False
try:
project.S3_BUCKETS
except AttributeError:
project.S3_BUCKETS = {}
project.EXCLUDES = list(set(EXCLUDES + getattr(project, 'EXCLUDES', []) + getattr(base, 'EXCLUDES', [])))
# merge project template types with defaults
project.TEMPLATE_TYPES = set(getattr(project, 'TEMPLATE_TYPES', [])) | set(TEMPLATE_TYPES)
try:
project.DEFAULT_CONTEXT
except AttributeError:
project.DEFAULT_CONTEXT = {}
project.DEFAULT_CONTEXT.update({
"PROJECT_PATH": self.path,
"ROOT_URL": "127.0.0.1:5000",
"SPREADSHEET_KEY": self.key,
"BUCKETS": project.S3_BUCKETS,
"SITE": self,
})
# Set up template loaders
template_dirs = [path]
if base:
template_dirs.append(base.base_dir)
error_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'error_templates')
template_dirs.append(error_path)
self.app.jinja_loader = TarbellFileSystemLoader(template_dirs)
# load the project blueprint, if it exists
if hasattr(project, 'blueprint') and isinstance(project.blueprint, Blueprint):
self.app.register_blueprint(project.blueprint, site=self)
return project, base | Load a Tarbell project | entailment |
def _resolve_path(self, path):
"""
Resolve static file paths
"""
filepath = None
mimetype = None
for root, dirs, files in self.filter_files(self.path):
# Does it exist in error path?
error_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'error_templates', path)
try:
with open(error_path):
mimetype, encoding = mimetypes.guess_type(error_path)
filepath = error_path
except IOError:
pass
# Does it exist in Tarbell blueprint?
if self.base:
basepath = os.path.join(root, self.blueprint_name, path)
try:
with open(basepath):
mimetype, encoding = mimetypes.guess_type(basepath)
filepath = basepath
except IOError:
pass
# Does it exist under regular path?
fullpath = os.path.join(root, path)
try:
with open(fullpath):
mimetype, encoding = mimetypes.guess_type(fullpath)
filepath = fullpath
except IOError:
pass
return filepath, mimetype | Resolve static file paths | entailment |
def data_json(self, extra_context=None, publish=False):
"""
Serve site context as JSON. Useful for debugging.
"""
if not self.project.CREATE_JSON:
# nothing to see here, but the right mimetype
return jsonify()
if not self.data:
# this sets site.data by spreadsheet or gdoc
self.get_context(publish)
return jsonify(self.data) | Serve site context as JSON. Useful for debugging. | entailment |
def preview(self, path=None, extra_context=None, publish=False):
"""
Serve up a project path
"""
try:
self.call_hook("preview", self)
if path is None:
path = 'index.html'
# Detect files
filepath, mimetype = self._resolve_path(path)
# Serve dynamic
if filepath and mimetype and mimetype in self.project.TEMPLATE_TYPES:
context = self.get_context(publish)
context.update({
"PATH": path,
"PREVIEW_SERVER": not publish,
"TIMESTAMP": int(time.time()),
})
if extra_context:
context.update(extra_context)
rendered = render_template(path, **context)
return Response(rendered, mimetype=mimetype)
# Serve static
if filepath:
dir, filename = os.path.split(filepath)
return send_from_directory(dir, filename)
except Exception as e:
ex_type, ex, tb = sys.exc_info()
try:
# Find template with name of error
cls = e.__class__
ex_type, ex, tb = sys.exc_info()
context = self.project.DEFAULT_CONTEXT
context.update({
'PATH': path,
'traceback': traceback.format_exception(ex_type, ex, tb),
'e': e,
})
if extra_context:
context.update(extra_context)
try:
error_path = '_{0}.{1}.html'.format(cls.__module__, cls.__name__)
rendered = render_template(error_path, **context)
except TemplateNotFound:
# Find template without underscore prefix, @TODO remove in v1.1
error_path = '{0}.{1}.html'.format(cls.__module__, cls.__name__)
rendered = render_template(error_path, **context)
return Response(rendered, mimetype="text/html")
except TemplateNotFound:
# Otherwise raise old error
reraise(ex_type, ex, tb)
# Last ditch effort -- see if path has "index.html" underneath it
if not path.endswith("index.html"):
if not path.endswith("/"):
path = "{0}/".format(path)
path = "{0}{1}".format(path, "index.html")
return self.preview(path)
# It's a 404
if path.endswith('/index.html'):
path = path[:-11]
rendered = render_template("404.html", PATH=path)
return Response(rendered, status=404) | Serve up a project path | entailment |
def get_context(self, publish=False):
"""
Use optional CONTEXT_SOURCE_FILE setting to determine data source.
Return the parsed data.
Can be an http|https url or local file. Supports csv and excel files.
"""
context = self.project.DEFAULT_CONTEXT
try:
file = self.project.CONTEXT_SOURCE_FILE
# CSV
if re.search(r'(csv|CSV)$', file):
context.update(self.get_context_from_csv())
# Excel
if re.search(r'(xlsx|XLSX|xls|XLS)$', file):
context.update(self.get_context_from_xlsx())
except AttributeError:
context.update(self.get_context_from_gdoc())
return context | Use optional CONTEXT_SOURCE_FILE setting to determine data source.
Return the parsed data.
Can be an http|https url or local file. Supports csv and excel files. | entailment |
def get_context_from_xlsx(self):
"""
Get context from an Excel file
"""
if re.search('^(http|https)://', self.project.CONTEXT_SOURCE_FILE):
resp = requests.get(self.project.CONTEXT_SOURCE_FILE)
content = resp.content
else:
try:
with open(self.project.CONTEXT_SOURCE_FILE) as xlsxfile:
content = xlsxfile.read()
except IOError:
filepath = "%s/%s" % (
os.path.abspath(self.path),
self.project.CONTEXT_SOURCE_FILE)
with open(filepath) as xlsxfile:
content = xlsxfile.read()
data = process_xlsx(content)
if 'values' in data:
data = copy_global_values(data)
return data | Get context from an Excel file | entailment |
def get_context_from_csv(self):
"""
Open CONTEXT_SOURCE_FILE, parse and return a context
"""
if re.search('^(http|https)://', self.project.CONTEXT_SOURCE_FILE):
data = requests.get(self.project.CONTEXT_SOURCE_FILE)
reader = csv.reader(
data.iter_lines(), delimiter=',', quotechar='"')
ret = {rows[0]: rows[1] for rows in reader}
else:
try:
with open(self.project.CONTEXT_SOURCE_FILE) as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
ret = {rows[0]: rows[1] for rows in reader}
except IOError:
file = "%s/%s" % (
os.path.abspath(self.path),
self.project.CONTEXT_SOURCE_FILE)
with open(file) as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
ret = {rows[0]: rows[1] for rows in reader}
ret.update({
"CONTEXT_SOURCE_FILE": self.project.CONTEXT_SOURCE_FILE,
})
return ret | Open CONTEXT_SOURCE_FILE, parse and return a context | entailment |
def get_context_from_gdoc(self):
"""
Wrap getting context from Google sheets in a simple caching mechanism.
"""
try:
start = int(time.time())
if not self.data or start > self.expires:
self.data = self._get_context_from_gdoc(self.project.SPREADSHEET_KEY)
end = int(time.time())
ttl = getattr(self.project, 'SPREADSHEET_CACHE_TTL',
SPREADSHEET_CACHE_TTL)
self.expires = end + ttl
return self.data
except AttributeError:
return {} | Wrap getting context from Google sheets in a simple caching mechanism. | entailment |
def _get_context_from_gdoc(self, key):
"""
Create a Jinja2 context from a Google spreadsheet.
"""
try:
content = self.export_xlsx(key)
data = process_xlsx(content)
if 'values' in data:
data = copy_global_values(data)
return data
except BadStatusLine:
# Stale connection, reset API and data
puts("Connection reset, reloading drive API")
self.client = get_drive_api()
return self._get_context_from_gdoc(key) | Create a Jinja2 context from a Google spreadsheet. | entailment |
def export_xlsx(self, key):
"""
Download xlsx version of spreadsheet.
"""
spreadsheet_file = self.client.files().get(fileId=key).execute()
links = spreadsheet_file.get('exportLinks')
downloadurl = links.get('application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
resp, content = self.client._http.request(downloadurl)
return content | Download xlsx version of spreadsheet. | entailment |
def generate_static_site(self, output_root=None, extra_context=None):
"""
Bake out static site
"""
self.app.config['BUILD_PATH'] = output_root
# use this hook for registering URLs to freeze
self.call_hook("generate", self, output_root, extra_context)
if output_root is not None:
# realpath or this gets generated relative to the tarbell package
self.app.config['FREEZER_DESTINATION'] = os.path.realpath(output_root)
self.freezer.freeze() | Bake out static site | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.