code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
self.logger.debug("shuffle command") if not self.state == 'ready': return self.statuslog.debug("Shuffling") random.shuffle(self.queue) self.update_queue() self.statuslog.debug("Shuffled")
async def shuffle(self)
The shuffle command
5.953297
5.141847
1.157813
if loop_value not in ['on', 'off', 'shuffle']: self.statuslog.error("Loop value must be `off`, `on`, or `shuffle`") return self.loop_type = loop_value if self.loop_type == 'on': self.statuslog.info("Looping on") elif self.loop_type == 'off': self.statuslog.info("Looping off") elif self.loop_type == 'shuffle': self.statuslog.info("Looping on and shuffling")
async def set_loop(self, loop_value)
Updates the loop value, can be 'off', 'on', or 'shuffle
2.465473
2.161606
1.140575
self.logger.debug("volume command") if self.state != 'ready': return logger.debug("Volume command received") if value == '+': if self.volume < 100: self.statuslog.debug("Volume up") self.volume = (10 * (self.volume // 10)) + 10 self.volumelog.info(str(self.volume)) try: self.streamer.volume = self.volume / 100 except AttributeError: pass else: self.statuslog.warning("Already at maximum volume") elif value == '-': if self.volume > 0: self.statuslog.debug("Volume down") self.volume = (10 * ((self.volume + 9) // 10)) - 10 self.volumelog.info(str(self.volume)) try: self.streamer.volume = self.volume / 100 except AttributeError: pass else: self.statuslog.warning("Already at minimum volume") else: try: value = int(value) except ValueError: self.statuslog.error("Volume argument must be +, -, or a %") else: if 0 <= value <= 200: self.statuslog.debug("Setting volume") self.volume = value self.volumelog.info(str(self.volume)) try: self.streamer.volume = self.volume / 100 except AttributeError: pass else: self.statuslog.error("Volume must be between 0 and 200") self.write_volume()
async def setvolume(self, value)
The volume command Args: value (str): The value to set the volume to
2.041068
2.006222
1.017369
# Update the volume data = datatools.get_data() data["discord"]["servers"][self.server_id][_data.modulename]["volume"] = self.volume datatools.write_data(data)
def write_volume(self)
Writes the current volume to the data.json
9.501169
8.200633
1.15859
self.logger.debug("movehere command") # Delete the old message await self.embed.delete() # Set the channel to this channel self.embed.channel = channel # Send a new embed to the channel await self.embed.send() # Re-add the reactions await self.add_reactions() self.statuslog.info("Moved to front")
async def movehere(self, channel)
Moves the embed message to a new channel; can also be used to move the musicplayer to the front Args: channel (discord.Channel): The channel to move to
5.887609
4.962394
1.186445
data = datatools.get_data() data["discord"]["servers"][self.server_id][_data.modulename]["topic_id"] = channel.id datatools.write_data(data) self.topicchannel = channel await self.set_topic(self.topic) await client.send_typing(channel) embed = ui_embed.topic_update(channel, self.topicchannel) await embed.send()
async def set_topic_channel(self, channel)
Set the topic channel for this server
6.204651
5.732705
1.082325
try: if self.topicchannel: await client.edit_channel(self.topicchannel, topic="") except Exception as e: logger.exception(e) self.topicchannel = None logger.debug("Clearing topic channel") data = datatools.get_data() data["discord"]["servers"][self.server_id][_data.modulename]["topic_id"] = "" datatools.write_data(data) await client.send_typing(channel) embed = ui_embed.topic_update(channel, self.topicchannel) await embed.send()
async def clear_topic_channel(self, channel)
Set the topic channel for this server
5.420691
5.132444
1.056162
if self.vready: logger.warning("Attempt to init voice when already initialised") return if self.state != 'starting': logger.error("Attempt to init from wrong state ('{}'), must be 'starting'.".format(self.state)) return self.logger.debug("Setting up voice") # Create voice client self.vchannel = author.voice.voice_channel if self.vchannel: self.statuslog.info("Connecting to voice") try: self.vclient = await client.join_voice_channel(self.vchannel) except discord.ClientException as e: logger.exception(e) self.statuslog.warning("I'm already connected to a voice channel.") return except discord.opus.OpusNotLoaded as e: logger.exception(e) logger.error("Could not load Opus. This is an error with your FFmpeg setup.") self.statuslog.error("Could not load Opus.") return except discord.DiscordException as e: logger.exception(e) self.statuslog.error("I couldn't connect to the voice channel. Check my permissions.") return except Exception as e: self.statuslog.error("Internal error connecting to voice, disconnecting.") logger.error("Error connecting to voice {}".format(e)) return else: self.statuslog.error("You're not connected to a voice channel.") return self.vready = True
async def vsetup(self, author)
Creates the voice client Args: author (discord.Member): The user that the voice ui will seek
3.150619
3.103496
1.015184
if self.mready: logger.warning("Attempt to init music when already initialised") return if self.state != 'starting': logger.error("Attempt to init from wrong state ('{}'), must be 'starting'.".format(self.state)) return self.logger.debug("Setting up gui") # Create gui self.mchannel = text_channel self.new_embed_ui() await self.embed.send() await self.embed.usend() await self.add_reactions() self.mready = True
async def msetup(self, text_channel)
Creates the gui Args: text_channel (discord.Channel): The channel for the embed ui to run in
6.712276
5.821869
1.152942
self.logger.debug("Creating new embed ui object") # Initial queue display queue_display = [] for i in range(self.queue_display): queue_display.append("{}. ---\n".format(str(i + 1))) # Initial datapacks datapacks = [ ("Now playing", "---", False), ("Author", "---", True), ("Source", "---", True), ("Time", "```http\n" + _timebar.make_timebar() + "\n```", False), ("Queue", "```md\n{}\n```".format(''.join(queue_display)), False), ("Songs left in queue", "---", True), ("Volume", "{}%".format(self.volume), True), ("Status", "```---```", False) ] # Create embed UI object self.embed = ui_embed_tools.UI( self.mchannel, "", "", modulename=_data.modulename, colour=_data.modulecolor, datapacks=datapacks ) # Add handlers to update gui noformatter = logging.Formatter("{message}", style="{") timeformatter = logging.Formatter("```http\n{message}\n```", style="{") mdformatter = logging.Formatter("```md\n{message}\n```", style="{") statusformatter = logging.Formatter("```__{levelname}__\n{message}\n```", style="{") volumeformatter = logging.Formatter("{message}%", style="{") nowplayinghandler = EmbedLogHandler(self, self.embed, 0) nowplayinghandler.setFormatter(noformatter) nowplayingauthorhandler = EmbedLogHandler(self, self.embed, 1) nowplayingauthorhandler.setFormatter(noformatter) nowplayingsourcehandler = EmbedLogHandler(self, self.embed, 2) nowplayingsourcehandler.setFormatter(noformatter) timehandler = EmbedLogHandler(self, self.embed, 3) timehandler.setFormatter(timeformatter) queuehandler = EmbedLogHandler(self, self.embed, 4) queuehandler.setFormatter(mdformatter) queuelenhandler = EmbedLogHandler(self, self.embed, 5) queuelenhandler.setFormatter(noformatter) volumehandler = EmbedLogHandler(self, self.embed, 6) volumehandler.setFormatter(volumeformatter) statushandler = EmbedLogHandler(self, self.embed, 7) statushandler.setFormatter(statusformatter) self.nowplayinglog.addHandler(nowplayinghandler) self.nowplayingauthorlog.addHandler(nowplayingauthorhandler) self.nowplayingsourcelog.addHandler(nowplayingsourcehandler) self.timelog.addHandler(timehandler) self.queuelog.addHandler(queuehandler) self.queuelenlog.addHandler(queuelenhandler) self.volumelog.addHandler(volumehandler) self.statuslog.addHandler(statushandler)
def new_embed_ui(self)
Create the embed UI object and save it to self
2.497576
2.464261
1.01352
self.statuslog.info("Loading buttons") for e in ("⏯", "⏮", "⏹", "⏭", "🔀", "🔉", "🔊"): try: if self.embed is not None: await client.add_reaction(self.embed.sent_embed, e) except discord.DiscordException as e: logger.exception(e) self.statuslog.error("I couldn't add the buttons. Check my permissions.") except Exception as e: logger.exception(e)
async def add_reactions(self)
Adds the reactions buttons to the current message
4.183209
3.883838
1.077081
if query is None or query == "": return self.statuslog.info("Parsing {}".format(query)) self.logger.debug("Enqueueing from query") indexnum = None if queue_index is not None: try: indexnum = int(queue_index) - 1 except TypeError: self.statuslog.error("Play index argument must be a number") return except ValueError: self.statuslog.error("Play index argument must be a number") return if not self.vready: self.parse_query(query, indexnum, stop_current, shuffle) else: parse_thread = threading.Thread( target=self.parse_query, args=[query, indexnum, stop_current, shuffle]) # Run threads parse_thread.start()
async def enqueue(self, query, queue_index=None, stop_current=False, shuffle=False)
Queues songs based on either a YouTube search or a link Args: query (str): Either a search term or a link queue_index (str): The queue index to enqueue at (None for end) stop_current (bool): Whether to stop the current song after the songs are queued shuffle (bool): Whether to shuffle the added songs
3.516958
3.566933
0.98599
if index is not None and len(self.queue) > 0: if index < 0 or index >= len(self.queue): if len(self.queue) == 1: self.statuslog.error("Play index must be 1 (1 song in queue)") return else: self.statuslog.error("Play index must be between 1 and {}".format(len(self.queue))) return try: yt_videos = api_music.parse_query(query, self.statuslog) if shuffle: random.shuffle(yt_videos) if len(yt_videos) == 0: self.statuslog.error("No results for: {}".format(query)) return if index is None: self.queue = self.queue + yt_videos else: if len(self.queue) > 0: self.queue = self.queue[:index] + yt_videos + self.queue[index:] else: self.queue = yt_videos self.update_queue() if stop_current: if self.streamer: self.streamer.stop() except Exception as e: logger.exception(e)
def parse_query(self, query, index, stop_current, shuffle)
Parses a query and adds it to the queue Args: query (str): Either a search term or a link index (int): The index to enqueue at (None for end) stop_current (bool): Whether to stop the current song after the songs are queued shuffle (bool): Whether to shuffle the added songs
2.518652
2.453348
1.026618
self.logger.debug("Updating queue display") queue_display = [] for i in range(self.queue_display): try: if len(self.queue[i][1]) > 40: songname = self.queue[i][1][:37] + "..." else: songname = self.queue[i][1] except IndexError: songname = "---" queue_display.append("{}. {}\n".format(str(i + 1), songname)) self.queuelog.debug(''.join(queue_display)) self.queuelenlog.debug(str(len(self.queue)))
def update_queue(self)
Updates the queue in the music player
3.047887
2.772081
1.099494
self.topic = topic try: if self.topicchannel: await client.edit_channel(self.topicchannel, topic=topic) except Exception as e: logger.exception(e)
async def set_topic(self, topic)
Sets the topic for the topic channel
3.990576
3.11085
1.282793
self.logger.debug("Clearing cache") if os.path.isdir(self.songcache_dir): for filename in os.listdir(self.songcache_dir): file_path = os.path.join(self.songcache_dir, filename) try: if os.path.isfile(file_path): os.unlink(file_path) except PermissionError: pass except Exception as e: logger.exception(e) self.logger.debug("Cache cleared")
def clear_cache(self)
Removes all files from the songcache dir
2.107954
1.804132
1.168403
if not os.path.isdir(self.songcache_next_dir): return logger.debug("Moving next cache") files = os.listdir(self.songcache_next_dir) for f in files: try: os.rename("{}/{}".format(self.songcache_next_dir, f), "{}/{}".format(self.songcache_dir, f)) except PermissionError: pass except Exception as e: logger.exception(e) logger.debug("Next cache moved")
def move_next_cache(self)
Moves files in the 'next' cache dir to the root
2.549497
2.360905
1.079881
if d['status'] == 'downloading': self.play_empty() if "elapsed" in d: if d["elapsed"] > self.current_download_elapsed + 4: self.current_download_elapsed = d["elapsed"] current_download = 0 current_download_total = 0 current_download_eta = 0 if "total_bytes" in d and d["total_bytes"] > 0: current_download_total = d["total_bytes"] elif "total_bytes_estimate" in d and d["total_bytes_estimate"] > 0: current_download_total = d["total_bytes_estimate"] if "downloaded_bytes" in d and d["downloaded_bytes"] > 0: current_download = d["downloaded_bytes"] if "eta" in d and d["eta"] > 0: current_download_eta = d["eta"] if current_download_total > 0: percent = round(100 * (current_download / current_download_total)) if percent > 100: percent = 100 elif percent < 0: percent = 0 seconds = str(round(current_download_eta)) if current_download_eta > 0 else "" eta = " ({} {} remaining)".format(seconds, "seconds" if seconds != 1 else "second") downloading = "Downloading song: {}%{}".format(percent, eta) if self.prev_time != downloading: self.timelog.debug(downloading) self.prev_time = downloading if d['status'] == 'error': self.statuslog.error("Error downloading song") elif d['status'] == 'finished': self.statuslog.info("Downloaded song") downloading = "Downloading song: {}%".format(100) if self.prev_time != downloading: self.timelog.debug(downloading) self.prev_time = downloading if "elapsed" in d: download_time = "{} {}".format(d["elapsed"] if d["elapsed"] > 0 else "<1", "seconds" if d["elapsed"] != 1 else "second") self.logger.debug("Downloaded song in {}".format(download_time)) # Create an FFmpeg player future = asyncio.run_coroutine_threadsafe(self.create_ffmpeg_player(d['filename']), client.loop) try: future.result() except Exception as e: logger.exception(e) return
def ytdl_progress_hook(self, d)
Called when youtube-dl updates progress
2.399318
2.378313
1.008832
if self.vclient: if self.streamer: self.streamer.volume = 0 self.vclient.play_audio("\n".encode(), encode=False)
def play_empty(self)
Play blank audio to let Discord know we're still here
11.196185
8.785538
1.274388
dl_ydl_opts = dict(ydl_opts) dl_ydl_opts["progress_hooks"] = [self.ytdl_progress_hook] dl_ydl_opts["outtmpl"] = self.output_format # Move the songs from the next cache to the current cache self.move_next_cache() self.state = 'ready' self.play_empty() # Download the file and create the stream with youtube_dl.YoutubeDL(dl_ydl_opts) as ydl: try: ydl.download([song]) except DownloadStreamException: # This is a livestream, use the appropriate player future = asyncio.run_coroutine_threadsafe(self.create_stream_player(song, dl_ydl_opts), client.loop) try: future.result() except Exception as e: logger.exception(e) self.vafter_ts() return except PermissionError: # File is still in use, it'll get cleared next time pass except youtube_dl.utils.DownloadError as e: self.logger.exception(e) self.statuslog.error(e) self.vafter_ts() return except Exception as e: self.logger.exception(e) self.vafter_ts() return
def download_next_song(self, song)
Downloads the next song and starts playing it
3.924067
3.876109
1.012373
if len(self.queue) == 0: return cache_ydl_opts = dict(ydl_opts) cache_ydl_opts["outtmpl"] = self.output_format_next with youtube_dl.YoutubeDL(cache_ydl_opts) as ydl: try: url = self.queue[0][0] ydl.download([url]) except: pass
def download_next_song_cache(self)
Downloads the next song in the queue to the cache
2.810804
2.681626
1.048172
self.current_download_elapsed = 0 self.streamer = self.vclient.create_ffmpeg_player(filepath, after=self.vafter_ts) self.state = "ready" await self.setup_streamer() try: # Read from the info json info_filename = "{}.info.json".format(filepath) with open(info_filename, 'r') as file: info = json.load(file) self.nowplayinglog.debug(info["title"]) self.is_live = False if "duration" in info and info["duration"] is not None: self.current_duration = info["duration"] else: self.current_duration = 0 if "uploader" in info: self.nowplayingauthorlog.info(info["uploader"]) else: self.nowplayingauthorlog.info("Unknown") self.nowplayingsourcelog.info(api_music.parse_source(info)) play_state = "Streaming" if self.is_live else "Playing" await self.set_topic("{} {}".format(play_state, info["title"])) self.statuslog.debug(play_state) except Exception as e: logger.exception(e)
async def create_ffmpeg_player(self, filepath)
Creates a streamer that plays from a file
4.197605
4.088032
1.026803
self.current_download_elapsed = 0 self.streamer = await self.vclient.create_ytdl_player(url, ytdl_options=opts, after=self.vafter_ts) self.state = "ready" await self.setup_streamer() self.nowplayinglog.debug(self.streamer.title) self.nowplayingauthorlog.debug(self.streamer.uploader if self.streamer.uploader is not None else "Unknown") self.current_duration = 0 self.is_live = True info = self.streamer.yt.extract_info(url, download=False) self.nowplayingsourcelog.info(api_music.parse_source(info)) play_state = "Streaming" if self.is_live else "Playing" await self.set_topic("{} {}".format(play_state, self.streamer.title)) self.statuslog.debug(play_state)
async def create_stream_player(self, url, opts=ydl_opts)
Creates a streamer that plays from a URL
4.882807
4.916753
0.993096
self.streamer.volume = self.volume / 100 self.streamer.start() self.pause_time = None self.vclient_starttime = self.vclient.loop.time() # Cache next song self.logger.debug("Caching next song") dl_thread = threading.Thread(target=self.download_next_song_cache) dl_thread.start()
async def setup_streamer(self)
Sets up basic defaults for the streamer
5.781162
5.622324
1.028251
logger.debug("Song finishing") future = asyncio.run_coroutine_threadsafe(self.vafter(), client.loop) try: future.result() except Exception as e: logger.exception(e)
def vafter_ts(self)
Function that is called after a song finishes playing
5.363763
4.070711
1.317648
self.logger.debug("Finished playing a song") if self.state != 'ready': self.logger.debug("Returning because player is in state {}".format(self.state)) return self.pause_time = None if self.vclient_task: loop = asyncio.get_event_loop() loop.call_soon(self.vclient_task.cancel) self.vclient_task = None try: if self.streamer is None: await self.stop() return if self.streamer.error is None: await self.vplay() else: self.statuslog.error(self.streamer.error) await self.destroy() except Exception as e: logger.exception(e) try: await self.destroy() except Exception as e: logger.exception(e)
async def vafter(self)
Function that is called after a song finishes playing
3.579737
3.300544
1.08459
# Simplify message info server = message.server author = message.author channel = message.channel content = message.content data = datatools.get_data() if not data["discord"]["servers"][server.id][_data.modulename]["activated"]: return # Only reply to server messages and don't reply to myself if server is not None and author != channel.server.me: # Retrieve replies from server data normal_replies = data["discord"]["servers"][server.id][_data.modulename]["normal"] tts_replies = data["discord"]["servers"][server.id][_data.modulename]["tts"] # Check normal replies for r in normal_replies.keys(): if r in content.lower().replace(' ', ''): await client.send_typing(channel) await client.send_message(channel, normal_replies[r]) # Check tts replies for r in tts_replies.keys(): if r in content.lower().replace(' ', ''): await client.send_typing(channel) await client.send_message(channel, tts_replies[r])
async def on_message(message)
The on_message event handler for this module Args: message (discord.Message): Input message
2.890134
2.851386
1.013589
data = datatools.get_data() if "google_api_key" not in data["discord"]["keys"]: logger.warning("No API key found with name 'google_api_key'") logger.info("Please add your Google API key with name 'google_api_key' " "in data.json to use YouTube features of the music module") return False logger.debug("Building YouTube discovery API") ytdevkey = data["discord"]["keys"]["google_api_key"] try: global ytdiscoveryapi ytdiscoveryapi = googleapiclient.discovery.build("youtube", "v3", developerKey=ytdevkey) logger.debug("YouTube API build successful") return True except Exception as e: logger.exception(e) logger.warning("HTTP error connecting to YouTube API, YouTube won't be available") return False
def build_yt_api()
Build the YouTube API for future use
3.972723
3.874193
1.025432
data = datatools.get_data() if "soundcloud_client_id" not in data["discord"]["keys"]: logger.warning("No API key found with name 'soundcloud_client_id'") logger.info("Please add your SoundCloud client id with name 'soundcloud_client_id' " "in data.json to use Soundcloud features of the music module") return False try: global scclient scclient = soundcloud.Client(client_id=data["discord"]["keys"]["soundcloud_client_id"]) logger.debug("SoundCloud build successful") return True except Exception as e: logger.exception(e) return False
def build_sc_api()
Build the SoundCloud API for future use
4.241187
3.914825
1.083366
data = datatools.get_data() if "spotify_client_id" not in data["discord"]["keys"]: logger.warning("No API key found with name 'spotify_client_id'") logger.info("Please add your Spotify client id with name 'spotify_client_id' " "in data.json to use Spotify features of the music module") return False if "spotify_client_secret" not in data["discord"]["keys"]: logger.warning("No API key found with name 'spotify_client_secret'") logger.info("Please add your Spotify client secret with name 'spotify_client_secret' " "in data.json to use Spotify features of the music module") return False try: global spclient client_credentials_manager = SpotifyClientCredentials( data["discord"]["keys"]["spotify_client_id"], data["discord"]["keys"]["spotify_client_secret"]) spclient = spotipy.Spotify(client_credentials_manager=client_credentials_manager) logger.debug("Spotify build successful") return True except Exception as e: logger.exception(e) return False
def build_spotify_api()
Build the Spotify API for future use
2.39508
2.343126
1.022173
queue = [] # Search YouTube search_result = ytdiscoveryapi.search().list( q=query, part="id,snippet", maxResults=1, type="video,playlist" ).execute() if not search_result["items"]: return [] # Get video/playlist title title = search_result["items"][0]["snippet"]["title"] ilogger.info("Queueing {}".format(title)) # Queue video if video if search_result["items"][0]["id"]["kind"] == "youtube#video": # Get ID of video videoid = search_result["items"][0]["id"]["videoId"] # Append video to queue queue.append(["https://www.youtube.com/watch?v={}".format(videoid), title]) # Queue playlist if playlist elif search_result["items"][0]["id"]["kind"] == "youtube#playlist": queue = get_queue_from_playlist(search_result["items"][0]["id"]["playlistId"]) return queue
def get_ytvideos(query, ilogger)
Gets either a list of videos from a playlist or a single video, using the first result of a YouTube search Args: query (str): The YouTube search query ilogger (logging.logger): The logger to log API calls to Returns: queue (list): The items obtained from the YouTube search
2.500821
2.49988
1.000377
m, s = divmod(duration, 60) h, m = divmod(m, 60) return "%d:%02d:%02d" % (h, m, s)
def duration_to_string(duration)
Converts a duration to a string Args: duration (int): The duration in seconds to convert Returns s (str): The duration as a string
1.800046
1.907866
0.943487
if "extractor_key" in info: source = info["extractor_key"] lower_source = source.lower() for key in SOURCE_TO_NAME: lower_key = key.lower() if lower_source == lower_key: source = SOURCE_TO_NAME[lower_key] if source != "Generic": return source if "url" in info and info["url"] is not None: p = urlparse(info["url"]) if p and p.netloc: return p.netloc return "Unknown"
def parse_source(info)
Parses the source info from an info dict generated by youtube-dl Args: info (dict): The info dict to parse Returns: source (str): The source of this song
3.053095
2.768709
1.102714
# Prevent tampering with flip punct = tamperdict = str.maketrans('', '', punct) tamperproof = content.translate(tamperdict) # Unflip if "(╯°□°)╯︵" in tamperproof: # For tables if "┻┻" in tamperproof: # Calculate table length length = 0 for letter in content: if letter == "━": length += 1.36 elif letter == "─": length += 1 elif letter == "-": length += 0.50 # Construct table putitback = "┬" for i in range(int(length)): putitback += "─" putitback += "┬ ノ( ゜-゜ノ)" return putitback # For text else: # Create dictionary for flipping text flipdict = str.maketrans( 'abcdefghijklmnopqrstuvwxyzɐqɔpǝɟbɥıظʞןɯuodbɹsʇnʌʍxʎz😅🙃😞😟😠😡☹🙁😱😨😰😦😧😢😓😥😭', 'ɐqɔpǝɟbɥıظʞןɯuodbɹsʇnʌʍxʎzabcdefghijklmnopqrstuvwxyz😄🙂🙂🙂🙂🙂🙂😀😀🙂😄🙂🙂😄😄😄😁' ) # Construct flipped text flipstart = content.index('︵') flipped = content[flipstart+1:] flipped = str.lower(flipped).translate(flipdict) putitback = ''.join(list(reversed(list(flipped)))) putitback += "ノ( ゜-゜ノ)" return putitback else: return False
def flipcheck(content)
Checks a string for anger and soothes said anger Args: content (str): The message to be flipchecked Returns: putitback (str): The righted table or text
4.304721
4.098611
1.050288
logger.debug("Getting new botcust2") # Set up http request packages params = { 'botid': 'f6a012073e345a08', 'amp;skin': 'chat' } headers = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate, sdch, br', 'Accept-Language': 'en-US,en;q=0.8', 'Connection': 'keep-alive', 'DNT': '1', 'Host': 'kakko.pandorabots.com', 'Upgrade-Insecure-Requests': '1', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/58.0.3029.110 Safari/537.36' } # Get response from http POST request to url logger.debug("Sending POST request") response = requests.post( url, params=params, headers=headers ) logger.debug("POST response {}".format(response)) # Try to extract Mitsuku response from POST response try: result = response.headers['set-cookie'][9:25] logger.debug("Getting botcust2 successful") except IndexError: result = False logger.critical("Getting botcust2 from html failed") return result
def get_botcust2()
Gets a botcust2, used to identify a speaker with Mitsuku Returns: botcust2 (str): The botcust2 identifier
2.973293
2.831616
1.050034
logger.debug("Getting Mitsuku reply") # Set up http request packages params = { 'botid': 'f6a012073e345a08', 'amp;skin': 'chat' } headers = { 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.8', 'Cache-Control': 'max-age=0', 'Connection': 'keep-alive', 'Content-Length': str(len(message) + 34), 'Content-Type': 'application/x-www-form-urlencoded', 'Cookie': 'botcust2=' + botcust2, 'DNT': '1', 'Host': 'kakko.pandorabots.com', 'Origin': 'https://kakko.pandorabots.com', 'Referer': 'https://kakko.pandorabots.com/pandora/talk?botid=f6a012073e345a08&amp;skin=chat', 'Upgrade-Insecure-Requests': '1', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/58.0.3029.110 Safari/537.36' } data = { 'botcust2': botcust2, 'message': message } # Get response from http POST request to url logger.debug("Sending POST request") response = requests.post( url, params=params, headers=headers, data=data ) logger.debug("POST response {}".format(response)) # Parse response parsed = lxml.html.parse(io.StringIO(response.text)).getroot() try: result = parsed[1][2][0][2].tail[1:] logger.debug("Getting botcust2 successful") except IndexError: result = False logger.critical("Getting botcust2 from html failed") return result
def query(botcust2, message)
Sends a message to Mitsuku and retrieves the reply Args: botcust2 (str): The botcust2 identifier message (str): The message to send to Mitsuku Returns: reply (str): The message Mitsuku sent back
2.589856
2.499697
1.036068
# Simplify message info server = message.server author = message.author channel = message.channel content = message.content data = datatools.get_data() if not data["discord"]["servers"][server.id][_data.modulename]["activated"]: return # Only reply to server messages and don't reply to myself if server is not None and author != channel.server.me: # Commands section prefix = data["discord"]["servers"][server.id]["prefix"] if content.startswith(prefix): # Parse message package = content.split(" ") command = package[0][len(prefix):] args = package[1:] arg = ' '.join(args) # Lock on to server if not yet locked if server.id not in _data.cache or _data.cache[server.id].state == 'destroyed': _data.cache[server.id] = _musicplayer.MusicPlayer(server.id) # Remove message if command in ['play', 'playnext', 'playnow', 'playshuffle', 'insert', 'pause', 'resume', 'skip', 'remove', 'rewind', 'restart', 'shuffle', 'volume', 'stop', 'destroy', 'front', 'movehere', 'settopic', 'cleartopic', 'notopic', 'loop']: try: await client.delete_message(message) except discord.errors.NotFound: logger.warning("Could not delete music player command message - NotFound") except discord.errors.Forbidden: logger.warning("Could not delete music player command message - Forbidden") # Commands if command == 'play': await _data.cache[server.id].play(author, channel, arg) if command == 'playnext': await _data.cache[server.id].play(author, channel, arg, index=1) if command == 'playnow': await _data.cache[server.id].play(author, channel, arg, index=1, stop_current=True) if command == 'playshuffle': await _data.cache[server.id].play(author, channel, arg, shuffle=True) if command == 'insert': if len(args) >= 2: index = args[0] query = ' '.join(args[1:]) await _data.cache[server.id].play(author, channel, query, index=index) else: await _data.cache[server.id].play(author, channel, arg) elif command == 'pause': await _data.cache[server.id].pause() elif command == 'resume': await _data.cache[server.id].resume() elif command == 'skip': await _data.cache[server.id].skip(query=arg) elif command == 'remove': await _data.cache[server.id].remove(index=arg) elif command == 'rewind': await _data.cache[server.id].rewind(query=arg) elif command == 'restart': await _data.cache[server.id].rewind(query="0") elif command == 'shuffle': await _data.cache[server.id].shuffle() elif command == 'loop': await _data.cache[server.id].set_loop(arg) elif command == 'stop': await _data.cache[server.id].stop(log_stop=True) elif command == 'volume': await _data.cache[server.id].setvolume(arg) elif command == 'settopic': await _data.cache[server.id].set_topic_channel(channel) elif command == 'cleartopic' or command == 'notopic': await _data.cache[server.id].clear_topic_channel(channel) elif command == 'nowplaying': await _data.cache[server.id].nowplaying_info(channel) elif command == 'destroy': await _data.cache[server.id].destroy() elif command == 'front' or command == 'movehere': await _data.cache[server.id].movehere(channel)
async def on_message(message)
The on_message event handler for this module Args: message (discord.Message): Input message
2.301347
2.29653
1.002097
import discord import asyncio # Create client logger.debug("Creating Discord client") asyncio.set_event_loop(loop) client = discord.Client() from . import _client _client.client = client from .. import datatools if datatools.has_data(): data = datatools.get_data() else: # Create a blank data file data = {"discord": {}} # Save default server info to data if "servers" not in data["discord"]: data["discord"]["servers"] = {} # Save default key info to data if "keys" not in data["discord"]: data["discord"]["keys"] = {} # Save logger info to data if "log_level" not in data: data["log_level"] = "DEBUG" data["discord"]["token"] = token data["discord"]["client_id"] = client_id datatools.write_data(data) # Import event handlers logger.debug("Importing event handlers") event_handlers = _get_event_handlers() # Create event handler combiner logger.debug("Compiling event handlers") def create_event_handler(event_handler_type): async def func(*args, **kwargs): for module_event_handler in event_handlers[event_handler_type]: # Check for errors in the module event try: module_event_handler_func = getattr(module_event_handler, event_handler_type) await module_event_handler_func(*args, **kwargs) except Exception as module_exception: logger.error("An error occured in '{}'".format(module_event_handler)) logger.exception(module_exception) if on_ready_handler is not None and event_handler_type == "on_ready": await on_ready_handler() func.__name__ = event_handler_type return func # Register event handlers logger.debug("Registering event handlers into client") for event_handler in event_handlers.keys(): client.event(create_event_handler(event_handler)) # Run the client loop logger.info("Connecting to Discord") try: client.loop.run_until_complete(client.login(token)) except Exception as e: logger.exception(e) logger.critical("Could not connect to Discord") else: logger.debug("Running the bot") try: client.loop.run_until_complete(client.connect()) except KeyboardInterrupt: client.loop.run_until_complete(client.logout()) pending = asyncio.Task.all_tasks(loop=client.loop) gathered = asyncio.gather(*pending, loop=client.loop) try: gathered.cancel() client.loop.run_until_complete(gathered) # we want to retrieve any exceptions to make sure that # they don't nag us about it being un-retrieved. gathered.exception() except Exception as e: logger.exception(e) except Exception as e: logger.exception(e) pending = asyncio.Task.all_tasks(loop=client.loop) gathered = asyncio.gather(*pending, loop=client.loop) gathered.exception() finally: try: client.loop.run_until_complete(client.logout()) except Exception as e: logger.exception(e) logger.critical("Bot stopped\n") client.loop.close()
def start(token, client_id, loop, on_ready_handler=None)
Start the Discord client and log Modis into Discord.
2.357196
2.334225
1.009841
import os import importlib event_handlers = { "on_ready": [], "on_resume": [], "on_error": [], "on_message": [], "on_socket_raw_receive": [], "on_socket_raw_send": [], "on_message_delete": [], "on_message_edit": [], "on_reaction_add": [], "on_reaction_remove": [], "on_reaction_clear": [], "on_channel_delete": [], "on_channel_create": [], "on_channel_update": [], "on_member_join": [], "on_member_remove": [], "on_member_update": [], "on_server_join": [], "on_server_remove": [], "on_server_update": [], "on_server_role_create": [], "on_server_role_delete": [], "on_server_role_update": [], "on_server_emojis_update": [], "on_server_available": [], "on_server_unavailable": [], "on_voice_state_update": [], "on_member_ban": [], "on_member_unban": [], "on_typing": [], "on_group_join": [], "on_group_remove": [] } # Iterate through module folders database_dir = "{}/modules".format( os.path.dirname(os.path.realpath(__file__))) for module_name in os.listdir(database_dir): module_dir = "{}/{}".format(database_dir, module_name) # Iterate through files in module if os.path.isdir(module_dir) and not module_name.startswith("_"): # Add all defined event handlers in module files module_event_handlers = os.listdir(module_dir) for event_handler in event_handlers.keys(): if "{}.py".format(event_handler) in module_event_handlers: import_name = ".discord_modis.modules.{}.{}".format( module_name, event_handler) logger.debug("Found event handler {}".format(import_name[23:])) try: event_handlers[event_handler].append( importlib.import_module(import_name, "modis")) except Exception as e: # Log errors in modules logger.exception(e) return event_handlers
def _get_event_handlers()
Gets dictionary of event handlers and the modules that define them Returns: event_handlers (dict): Contains "all", "on_ready", "on_message", "on_reaction_add", "on_error"
1.949412
1.923614
1.013411
if key is None or key == "": logger.error("Key cannot be empty") if value is None or value == "": logger.error("Value cannot be empty") from .. import datatools data = datatools.get_data() if "keys" not in data["discord"]: data["discord"]["keys"] = {} is_key_new = False if key not in data["discord"]["keys"]: is_key_new = True elif data["discord"]["keys"][key] == value: logger.info("API key '{}' already has value '{}'".format(key, value)) return data["discord"]["keys"][key] = value datatools.write_data(data) key_text = "added" if is_key_new else "updated" logger.info("API key '{}' {} with value '{}'".format(key, key_text, value))
def add_api_key(key, value)
Adds a key to the bot's data Args: key: The name of the key to add value: The value for the key
2.463665
2.477262
0.994511
# Create embed UI object gui = ui_embed.UI( channel, title, "", modulename=modulename, datapacks=datapacks ) return gui
def success(channel, title, datapacks)
Creates an embed UI containing the help message Args: channel (discord.Channel): The Discord channel to bind the embed to title (str): The title of the embed datapacks (list): The hex value Returns: ui (ui_embed.UI): The embed UI object
10.057536
6.261783
1.606178
# Create embed UI object gui = ui_embed.UI( channel, "Too much help", "{} is too helpful! Try trimming some of the help messages.".format(title), modulename=modulename ) return gui
def http_exception(channel, title)
Creates an embed UI containing the 'too long' error message Args: channel (discord.Channel): The Discord channel to bind the embed to title (str): The title of the embed Returns: ui (ui_embed.UI): The embed UI object
20.448622
10.698138
1.911419
puts("Configuring Tarbell. Press ctrl-c to bail out!") # Check if there's settings configured settings = Settings() path = settings.path prompt = True if len(args): prompt = False config = _get_or_create_config(path) if prompt or "drive" in args: config.update(_setup_google_spreadsheets(config, path, prompt)) if prompt or "s3" in args: config.update(_setup_s3(config, path, prompt)) if prompt or "path" in args: config.update(_setup_tarbell_project_path(config, path, prompt)) if prompt or "templates" in args: if "project_templates" in config: override_templates = raw_input("\nFound Base Template config. Would you like to override them? [Default: No, 'none' to skip]") if override_templates and override_templates != "No" and override_templates != "no" and override_templates != "N" and override_templates != "n": config.update(_setup_default_templates(config, path, prompt)) else: puts("\nPreserving Base Template config...") else: config.update(_setup_default_templates(config, path, prompt)) settings.config = config with open(path, 'w') as f: puts("\nWriting {0}".format(colored.green(path))) settings.save() if all: puts("\n- Done configuring Tarbell. Type `{0}` for help.\n" .format(colored.green("tarbell"))) return settings
def tarbell_configure(command, args)
Tarbell configuration routine.
4.385233
4.449092
0.985647
dirname = os.path.dirname(path) filename = os.path.basename(path) try: os.makedirs(dirname) except OSError: pass try: with open(path, 'r+') as f: if os.path.isfile(path): puts("{0} already exists, backing up".format(colored.green(path))) _backup(dirname, filename) return yaml.load(f) except IOError: return {}
def _get_or_create_config(path, prompt=True)
Get or create a Tarbell configuration directory.
3.251756
3.308632
0.98281
ret = {} if prompt: use = raw_input("\nWould you like to use Google spreadsheets [Y/n]? ") if use.lower() != "y" and use != "": return settings dirname = os.path.dirname(path) path = os.path.join(dirname, "client_secrets.json") write_secrets = True if os.path.isfile(path): write_secrets_input = raw_input("client_secrets.json already exists. Would you like to overwrite it? [y/N] ") if not write_secrets_input.lower().startswith('y'): write_secrets = False if write_secrets: puts(("\nLogin in to Google and go to {0} to create an app and generate a " "\nclient_secrets authentication file. You should create credentials for an `installed app`. See " "\n{1} for more information." .format(colored.red("https://console.developers.google.com/project"), colored.red("http://tarbell.readthedocs.org/en/{0}/install.html#configure-google-spreadsheet-access-optional".format(LONG_VERSION)) ) )) secrets_path = raw_input(("\nWhere is your client secrets file? " "[~/Downloads/client_secrets.json] " )) if secrets_path == "": secrets_path = os.path.join("~", "Downloads/client_secrets.json") secrets_path = os.path.expanduser(secrets_path) puts("\nCopying {0} to {1}\n" .format(colored.green(secrets_path), colored.green(dirname)) ) _backup(dirname, "client_secrets.json") try: shutil.copy(secrets_path, os.path.join(dirname, 'client_secrets.json')) except shutil.Error as e: show_error(str(e)) # Now, try and obtain the API for the first time get_api = raw_input("Would you like to authenticate your client_secrets.json? [Y/n] ") if get_api == '' or get_api.lower().startswith('y'): get_drive_api_from_client_secrets(path, reset_creds=True) default_account = settings.get("google_account", "") account = raw_input(("What Google account(s) should have access to new spreadsheets? " "(e.g. somebody@gmail.com, leave blank to specify for each new " "project, separate multiple addresses with commas) [{0}] " .format(default_account) )) if default_account != "" and account == "": account = default_account if account != "": ret = { "google_account" : account } puts("\n- Done configuring Google spreadsheets.") return ret
def _setup_google_spreadsheets(settings, path, prompt=True)
Set up a Google spreadsheet.
4.190787
4.153405
1.009
default_path = os.path.expanduser(os.path.join("~", "tarbell")) projects_path = raw_input("\nWhat is your Tarbell projects path? [Default: {0}, 'none' to skip] ".format(default_path)) if projects_path == "": projects_path = default_path if projects_path.lower() == 'none': puts("\n- Not creating projects directory.") return {} if os.path.isdir(projects_path): puts("\nDirectory exists!") else: puts("\nDirectory does not exist.") make = raw_input("\nWould you like to create it? [Y/n] ") if make.lower() == "y" or not make: os.makedirs(projects_path) puts("\nProjects path is {0}".format(projects_path)) puts("\n- Done setting up projects path.") return {"projects_path": projects_path}
def _setup_tarbell_project_path(settings, path, prompt=True)
Prompt user to set up project path.
3.132306
3.016257
1.038474
project_templates = [{ "name": "Basic Bootstrap 3 template", "url": "https://github.com/tarbell-project/tarbell-template", }, { "name": "Searchable map template", "url": "https://github.com/tarbell-project/tarbell-map-template", }, { "name": "Tarbell template walkthrough", "url": "https://github.com/tarbell-project/tarbell-tutorial-template", }] for project in project_templates: puts("+ Adding {0} ({1})".format(project["name"], project["url"])) puts("\n- Done configuring project templates.") return {"project_templates": project_templates}
def _setup_default_templates(settings, path, prompt=True)
Add some (hardcoded) default templates.
4.11486
3.868935
1.063564
target = os.path.join(path, filename) if os.path.isfile(target): dt = datetime.now() new_filename = ".{0}.{1}.{2}".format( filename, dt.isoformat(), "backup" ) destination = os.path.join(path, new_filename) puts("- Backing up {0} to {1}".format( colored.cyan(target), colored.cyan(destination) )) shutil.copy(target, destination)
def _backup(path, filename)
Backup a file.
3.188492
3.161486
1.008542
# unicodification if type(value) != text_type: value = value.decode('utf-8', 'ignore') # overwrite chararcter mapping char_map.update(overwrite_char_map) # try to replace chars value = re.sub('[^a-zA-Z0-9\\s\\-]{1}', replace_char, value) value = slugify(value) return value.encode('ascii', 'ignore').decode('ascii')
def slughifi(value, overwrite_char_map={})
High Fidelity slugify - slughifi.py, v 0.1 Examples : >>> text = 'C\'est déjà l\'été.' >>> slughifi(text) 'cest-deja-lete' >>> slughifi(text, overwrite_char_map={u'\': '-',}) 'c-est-deja-l-ete' >>> slughifi(text, do_slugify=False) "C'est deja l'ete." # Normal slugify removes accented characters >>> slugify(text) 'cest-dj-lt'
4.196535
4.150454
1.011103
if not is_werkzeug_process(): try: return _puts(s, newline, stream) except UnicodeEncodeError: return _puts(s.encode(sys.stdout.encoding), newline, stream)
def puts(s='', newline=True, stream=STDOUT)
Wrap puts to avoid getting called twice by Werkzeug reloader.
4.139122
3.244381
1.275782
try: if l[idx]: return l[idx] else: return default except IndexError: return default
def list_get(l, idx, default=None)
Get from a list with an optional default value.
2.678545
2.669108
1.003536
sentences = [] for index, sentence in enumerate(s.split('. ')): padding = '' if index > 0: padding = ' ' * (pad + 1) if sentence.endswith('.'): sentence = sentence[:-1] sentences.append('%s %s.' % (padding, sentence.strip())) return "\n".join(sentences)
def split_sentences(s, pad=0)
Split sentences for formatting.
2.919621
2.766328
1.055414
dirname = os.path.dirname(path) if not os.path.exists(dirname): os.makedirs(dirname)
def ensure_directory(path)
Ensure directory exists for a given file path.
1.911357
2.002734
0.954374
sys.stdout.flush() sys.stderr.write("\n{0!s}: {1}".format(colored.red("Error"), msg + '\n'))
def show_error(msg)
Displays error message.
6.797616
7.198706
0.944283
_srtp_assert(lib.srtp_add_stream(self._srtp[0], policy._policy))
def add_stream(self, policy)
Add a stream to the SRTP session, applying the given `policy` to the stream. :param policy: :class:`Policy`
14.794142
13.756671
1.075416
_srtp_assert(lib.srtp_remove_stream(self._srtp[0], htonl(ssrc)))
def remove_stream(self, ssrc)
Remove the stream with the given `ssrc` from the SRTP session. :param ssrc: :class:`int`
11.416483
11.375182
1.003631
data = {} workbook = xlrd.open_workbook(file_contents=content) worksheets = [w for w in workbook.sheet_names() if not w.startswith('_')] for worksheet_name in worksheets: if worksheet_name.startswith('_'): continue worksheet = workbook.sheet_by_name(worksheet_name) merged_cells = worksheet.merged_cells if len(merged_cells): raise MergedCellError(worksheet.name, merged_cells) worksheet.name = slughifi(worksheet.name) headers = make_headers(worksheet) worksheet_data = make_worksheet_data(headers, worksheet) data[worksheet.name] = worksheet_data return data
def process_xlsx(content)
Turn Excel file contents into Tarbell worksheet data
2.726949
2.599309
1.049105
for k, v in data['values'].items(): if not data.get(k): data[k] = v else: puts("There is both a worksheet and a " "value named '{0}'. The worksheet data " "will be preserved.".format(k)) data.pop("values", None) return data
def copy_global_values(data)
Copy values worksheet into global namespace.
5.787867
4.788786
1.208629
headers = {} cell_idx = 0 while cell_idx < worksheet.ncols: cell_type = worksheet.cell_type(0, cell_idx) if cell_type == 1: header = slughifi(worksheet.cell_value(0, cell_idx)) if not header.startswith("_"): headers[cell_idx] = header cell_idx += 1 return headers
def make_headers(worksheet)
Make headers from worksheet
2.851074
2.778429
1.026146
data = [] row_idx = 1 while row_idx < worksheet.nrows: cell_idx = 0 row_dict = {} while cell_idx < worksheet.ncols: cell_type = worksheet.cell_type(row_idx, cell_idx) if cell_type in VALID_CELL_TYPES: cell_value = worksheet.cell_value(row_idx, cell_idx) try: if cell_type == 2 and cell_value.is_integer(): cell_value = int(cell_value) row_dict[headers[cell_idx]] = cell_value except KeyError: try: column = ascii_uppercase[cell_idx] except IndexError: column = cell_idx puts("There is no header for cell with value '{0}' in column '{1}' of '{2}'" .format( cell_value, column, worksheet.name )) cell_idx += 1 data.append(row_dict) row_idx += 1 # Magic key handling if 'key' in headers.values(): keyed_data = {} for row in data: if 'key' in row.keys(): key = slughifi(row['key']) if keyed_data.get(key): puts("There is already a key named '{0}' with value " "'{1}' in '{2}'. It is being overwritten with " "value '{3}'.".format(key, keyed_data.get(key), worksheet.name, row)) # Magic values worksheet if worksheet.name == "values": value = row.get('value') if value not in ("", None): keyed_data[key] = value else: keyed_data[key] = row data = keyed_data return data
def make_worksheet_data(headers, worksheet)
Make data from worksheet
2.841851
2.801182
1.014518
response.cache_control.max_age = 0 response.cache_control.no_cache = True response.cache_control.must_revalidate = True response.cache_control.no_store = True return response
def never_cache_preview(self, response)
Ensure preview is never cached
1.772355
1.617481
1.09575
for function in self.hooks[hook]: function.__call__(*args, **kwargs)
def call_hook(self, hook, *args, **kwargs)
Calls each registered hook
5.587897
5.522636
1.011817
base = None # Slightly ugly DRY violation for backwards compatibility with old # "_base" convention if os.path.isdir(os.path.join(path, "_blueprint")): base_dir = os.path.join(path, "_blueprint/") # Get the blueprint template and register it as a blueprint if os.path.exists(os.path.join(base_dir, "blueprint.py")): filename, pathname, description = imp.find_module('blueprint', [base_dir]) base = imp.load_module('blueprint', filename, pathname, description) self.blueprint_name = "_blueprint" else: puts("No _blueprint/blueprint.py file found") elif os.path.isdir(os.path.join(path, "_base")): puts("Using old '_base' convention") base_dir = os.path.join(path, "_base/") if os.path.exists(os.path.join(base_dir, "base.py")): filename, pathname, description = imp.find_module('base', [base_dir]) base = imp.load_module('base', filename, pathname, description) self.blueprint_name = "_base" else: puts("No _base/base.py file found") if base: base.base_dir = base_dir if hasattr(base, 'blueprint') and isinstance(base.blueprint, Blueprint): self.app.register_blueprint(base.blueprint, site=self) return base
def _get_base(self, path)
Get project blueprint
2.571948
2.523041
1.019384
base = self._get_base(path) filename, pathname, description = imp.find_module('tarbell_config', [path]) project = imp.load_module('project', filename, pathname, description) try: self.key = project.SPREADSHEET_KEY self.client = get_drive_api() except AttributeError: self.key = None self.client = None try: project.CREATE_JSON except AttributeError: project.CREATE_JSON = False try: project.S3_BUCKETS except AttributeError: project.S3_BUCKETS = {} project.EXCLUDES = list(set(EXCLUDES + getattr(project, 'EXCLUDES', []) + getattr(base, 'EXCLUDES', []))) # merge project template types with defaults project.TEMPLATE_TYPES = set(getattr(project, 'TEMPLATE_TYPES', [])) | set(TEMPLATE_TYPES) try: project.DEFAULT_CONTEXT except AttributeError: project.DEFAULT_CONTEXT = {} project.DEFAULT_CONTEXT.update({ "PROJECT_PATH": self.path, "ROOT_URL": "127.0.0.1:5000", "SPREADSHEET_KEY": self.key, "BUCKETS": project.S3_BUCKETS, "SITE": self, }) # Set up template loaders template_dirs = [path] if base: template_dirs.append(base.base_dir) error_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'error_templates') template_dirs.append(error_path) self.app.jinja_loader = TarbellFileSystemLoader(template_dirs) # load the project blueprint, if it exists if hasattr(project, 'blueprint') and isinstance(project.blueprint, Blueprint): self.app.register_blueprint(project.blueprint, site=self) return project, base
def load_project(self, path)
Load a Tarbell project
3.178164
3.106509
1.023066
filepath = None mimetype = None for root, dirs, files in self.filter_files(self.path): # Does it exist in error path? error_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'error_templates', path) try: with open(error_path): mimetype, encoding = mimetypes.guess_type(error_path) filepath = error_path except IOError: pass # Does it exist in Tarbell blueprint? if self.base: basepath = os.path.join(root, self.blueprint_name, path) try: with open(basepath): mimetype, encoding = mimetypes.guess_type(basepath) filepath = basepath except IOError: pass # Does it exist under regular path? fullpath = os.path.join(root, path) try: with open(fullpath): mimetype, encoding = mimetypes.guess_type(fullpath) filepath = fullpath except IOError: pass return filepath, mimetype
def _resolve_path(self, path)
Resolve static file paths
2.692037
2.63463
1.02179
if not self.project.CREATE_JSON: # nothing to see here, but the right mimetype return jsonify() if not self.data: # this sets site.data by spreadsheet or gdoc self.get_context(publish) return jsonify(self.data)
def data_json(self, extra_context=None, publish=False)
Serve site context as JSON. Useful for debugging.
20.696764
16.351294
1.265757
try: self.call_hook("preview", self) if path is None: path = 'index.html' # Detect files filepath, mimetype = self._resolve_path(path) # Serve dynamic if filepath and mimetype and mimetype in self.project.TEMPLATE_TYPES: context = self.get_context(publish) context.update({ "PATH": path, "PREVIEW_SERVER": not publish, "TIMESTAMP": int(time.time()), }) if extra_context: context.update(extra_context) rendered = render_template(path, **context) return Response(rendered, mimetype=mimetype) # Serve static if filepath: dir, filename = os.path.split(filepath) return send_from_directory(dir, filename) except Exception as e: ex_type, ex, tb = sys.exc_info() try: # Find template with name of error cls = e.__class__ ex_type, ex, tb = sys.exc_info() context = self.project.DEFAULT_CONTEXT context.update({ 'PATH': path, 'traceback': traceback.format_exception(ex_type, ex, tb), 'e': e, }) if extra_context: context.update(extra_context) try: error_path = '_{0}.{1}.html'.format(cls.__module__, cls.__name__) rendered = render_template(error_path, **context) except TemplateNotFound: # Find template without underscore prefix, @TODO remove in v1.1 error_path = '{0}.{1}.html'.format(cls.__module__, cls.__name__) rendered = render_template(error_path, **context) return Response(rendered, mimetype="text/html") except TemplateNotFound: # Otherwise raise old error reraise(ex_type, ex, tb) # Last ditch effort -- see if path has "index.html" underneath it if not path.endswith("index.html"): if not path.endswith("/"): path = "{0}/".format(path) path = "{0}{1}".format(path, "index.html") return self.preview(path) # It's a 404 if path.endswith('/index.html'): path = path[:-11] rendered = render_template("404.html", PATH=path) return Response(rendered, status=404)
def preview(self, path=None, extra_context=None, publish=False)
Serve up a project path
2.910424
2.904639
1.001992
context = self.project.DEFAULT_CONTEXT try: file = self.project.CONTEXT_SOURCE_FILE # CSV if re.search(r'(csv|CSV)$', file): context.update(self.get_context_from_csv()) # Excel if re.search(r'(xlsx|XLSX|xls|XLS)$', file): context.update(self.get_context_from_xlsx()) except AttributeError: context.update(self.get_context_from_gdoc()) return context
def get_context(self, publish=False)
Use optional CONTEXT_SOURCE_FILE setting to determine data source. Return the parsed data. Can be an http|https url or local file. Supports csv and excel files.
3.720543
2.973658
1.251167
if re.search('^(http|https)://', self.project.CONTEXT_SOURCE_FILE): resp = requests.get(self.project.CONTEXT_SOURCE_FILE) content = resp.content else: try: with open(self.project.CONTEXT_SOURCE_FILE) as xlsxfile: content = xlsxfile.read() except IOError: filepath = "%s/%s" % ( os.path.abspath(self.path), self.project.CONTEXT_SOURCE_FILE) with open(filepath) as xlsxfile: content = xlsxfile.read() data = process_xlsx(content) if 'values' in data: data = copy_global_values(data) return data
def get_context_from_xlsx(self)
Get context from an Excel file
2.964094
2.926321
1.012908
if re.search('^(http|https)://', self.project.CONTEXT_SOURCE_FILE): data = requests.get(self.project.CONTEXT_SOURCE_FILE) reader = csv.reader( data.iter_lines(), delimiter=',', quotechar='"') ret = {rows[0]: rows[1] for rows in reader} else: try: with open(self.project.CONTEXT_SOURCE_FILE) as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') ret = {rows[0]: rows[1] for rows in reader} except IOError: file = "%s/%s" % ( os.path.abspath(self.path), self.project.CONTEXT_SOURCE_FILE) with open(file) as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') ret = {rows[0]: rows[1] for rows in reader} ret.update({ "CONTEXT_SOURCE_FILE": self.project.CONTEXT_SOURCE_FILE, }) return ret
def get_context_from_csv(self)
Open CONTEXT_SOURCE_FILE, parse and return a context
2.124343
1.978838
1.073531
try: start = int(time.time()) if not self.data or start > self.expires: self.data = self._get_context_from_gdoc(self.project.SPREADSHEET_KEY) end = int(time.time()) ttl = getattr(self.project, 'SPREADSHEET_CACHE_TTL', SPREADSHEET_CACHE_TTL) self.expires = end + ttl return self.data except AttributeError: return {}
def get_context_from_gdoc(self)
Wrap getting context from Google sheets in a simple caching mechanism.
3.725437
3.382489
1.101389
try: content = self.export_xlsx(key) data = process_xlsx(content) if 'values' in data: data = copy_global_values(data) return data except BadStatusLine: # Stale connection, reset API and data puts("Connection reset, reloading drive API") self.client = get_drive_api() return self._get_context_from_gdoc(key)
def _get_context_from_gdoc(self, key)
Create a Jinja2 context from a Google spreadsheet.
7.99379
7.52238
1.062668
spreadsheet_file = self.client.files().get(fileId=key).execute() links = spreadsheet_file.get('exportLinks') downloadurl = links.get('application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') resp, content = self.client._http.request(downloadurl) return content
def export_xlsx(self, key)
Download xlsx version of spreadsheet.
2.905639
2.645536
1.098318
self.app.config['BUILD_PATH'] = output_root # use this hook for registering URLs to freeze self.call_hook("generate", self, output_root, extra_context) if output_root is not None: # realpath or this gets generated relative to the tarbell package self.app.config['FREEZER_DESTINATION'] = os.path.realpath(output_root) self.freezer.freeze()
def generate_static_site(self, output_root=None, extra_context=None)
Bake out static site
8.542717
8.413586
1.015348
excludes = r'|'.join([fnmatch.translate(x) for x in self.project.EXCLUDES]) or r'$.' for root, dirs, files in os.walk(path, topdown=True): dirs[:] = [d for d in dirs if not re.match(excludes, d)] dirs[:] = [os.path.join(root, d) for d in dirs] rel_path = os.path.relpath(root, path) paths = [] for f in files: if rel_path == '.': file_path = f else: file_path = os.path.join(rel_path, f) if not re.match(excludes, file_path): paths.append(f) files[:] = paths yield root, dirs, files
def filter_files(self, path)
Exclude files based on blueprint and project configuration as well as hidden files.
2.097207
2.054291
1.020891
# yield blueprint paths first if getattr(self, 'blueprint_name', None): for path in walk_directory(os.path.join(self.path, self.blueprint_name), ignore=self.project.EXCLUDES): yield 'preview', {'path': path} # then yield project paths for path in walk_directory(self.path, ignore=self.project.EXCLUDES): yield 'preview', {'path': path}
def find_files(self)
Find all file paths for publishing, yield (urlname, kwargs)
4.647479
3.817604
1.217381
self.tempdir = tempfile.mkdtemp('s3deploy') for keyname, absolute_path in self.find_file_paths(): self.s3_upload(keyname, absolute_path) shutil.rmtree(self.tempdir, True) return True
def deploy_to_s3(self)
Deploy a directory to an s3 bucket.
4.641601
4.341798
1.06905
mimetype = mimetypes.guess_type(absolute_path) options = {'Content-Type': mimetype[0]} if mimetype[0] is not None and mimetype[0].startswith('text/'): upload = open(absolute_path, 'rb') options['Content-Encoding'] = 'gzip' key_parts = keyname.split('/') filename = key_parts.pop() temp_path = os.path.join(self.tempdir, filename) gzfile = gzip.GzipFile(temp_path, 'wb', 9, None, GZIP_TIMESTAMP) gzfile.write(upload.read()) gzfile.close() absolute_path = temp_path hash = '"{0}"'.format(hashlib.md5(open(absolute_path, 'rb').read()).hexdigest()) key = "{0}/{1}".format(self.bucket.path, keyname) existing = self.connection.get_key(key) if self.force or not existing or (existing.etag != hash): k = Key(self.connection) k.key = key puts("+ Uploading {0}/{1}".format(self.bucket, keyname)) k.set_contents_from_filename(absolute_path, options, policy='public-read') else: puts("- Skipping {0}/{1}, files match".format(self.bucket, keyname))
def s3_upload(self, keyname, absolute_path)
Upload a file to s3
2.711093
2.73133
0.992591
paths = [] for root, dirs, files in os.walk(self.directory, topdown=True): rel_path = os.path.relpath(root, self.directory) for f in files: if rel_path == '.': path = (f, os.path.join(root, f)) else: path = (os.path.join(rel_path, f), os.path.join(root, f)) paths.append(path) return paths
def find_file_paths(self)
A generator function that recursively finds all files in the upload directory.
1.77213
1.71744
1.031843
settings = Settings() if settings.credentials: return get_drive_api_from_file(settings.credentials_path) if settings.client_secrets: return get_drive_api_from_client_secrets(settings.client_secrets_path)
def get_drive_api()
Get drive API client based on settings.
2.911695
2.636927
1.1042
storage = keyring_storage.Storage('tarbell', getpass.getuser()) credentials = None if not reset_creds: credentials = storage.get() if path and not credentials: flow = client.flow_from_clientsecrets(path, scope=OAUTH_SCOPE) credentials = tools.run_flow(flow, storage, flags) storage.put(credentials) return _get_drive_api(credentials)
def get_drive_api_from_client_secrets(path, reset_creds=False)
Reads the local client secrets file if available (otherwise, opens a browser tab to walk through the OAuth 2.0 process, and stores the client secrets for future use) and then authorizes those credentials. Returns a Google Drive API service object.
3.957547
4.132975
0.957554
f = open(path) credentials = client.OAuth2Credentials.from_json(f.read()) return _get_drive_api(credentials)
def get_drive_api_from_file(path)
Open file with OAuth tokens.
3.58801
2.943418
1.218994
http = httplib2.Http() http = credentials.authorize(http) service = discovery.build('drive', 'v2', http=http) service.credentials = credentials # duck punch service obj. with credentials return service
def _get_drive_api(credentials)
For a given set of credentials, return a drive API object.
4.306924
4.129331
1.043008
command = Command.lookup(args.get(0)) if len(args) == 0 or args.contains(('-h', '--help', 'help')): display_info(args) sys.exit(1) elif args.contains(('-v', '--version')): display_version() sys.exit(1) elif command: arg = args.get(0) args.remove(arg) command.__call__(command, args) sys.exit() else: show_error(colored.red('Error! Unknown command \'{0}\'.\n' .format(args.get(0)))) display_info(args) sys.exit(1)
def main()
Primary Tarbell command dispatch.
3.633242
3.429517
1.059404
puts('\nTarbell: Simple web publishing\n') puts('Usage: {0}\n'.format(colored.cyan('tarbell <command>'))) puts('Commands:\n') for command in Command.all_commands(): usage = command.usage or command.name help = command.help or '' puts('{0} {1}'.format( colored.yellow('{0: <37}'.format(usage)), split_sentences(help, 37) )) puts("") settings = Settings() if settings.file_missing: puts('---\n{0}: {1}'.format( colored.red("Warning"), "No Tarbell configuration found. Run:" )) puts('\n{0}'.format( colored.green("tarbell configure") )) puts('\n{0}\n---'.format( "to configure Tarbell." ))
def display_info(args)
Displays Tarbell info.
5.230482
4.854829
1.077377
output_root = None with ensure_settings(command, args) as settings, ensure_project(command, args) as site: if not skip_args: output_root = list_get(args, 0, False) if output_root: is_folder = os.path.exists(output_root) else: puts("\nYou must specify an output directory (e.g. `{0}`)".format( colored.cyan("tarbell generate _out") )) sys.exit() if quiet: site.quiet = True if not output_root: output_root = tempfile.mkdtemp(prefix="{0}-".format(site.project.__name__)) is_folder = False if args.contains('--context'): site.project.CONTEXT_SOURCE_FILE = args.value_after('--context') if args.contains('--overwrite'): is_folder = False #check to see if the folder we're trying to create already exists if is_folder: output_file = raw_input(("\nA folder named {0} already exists! Do you want to delete it? (selecting 'N' will quit) [y/N] ").format( output_root )) if output_file and output_file.lower() == "y": puts(("\nDeleting {0}...\n").format( colored.cyan(output_root) )) _delete_dir(output_root) else: puts("\nNot overwriting. See ya!") sys.exit() site.generate_static_site(output_root, extra_context) if not quiet: puts("\nCreated site in {0}".format(colored.cyan(output_root))) return output_root
def tarbell_generate(command, args, skip_args=False, extra_context=None, quiet=False)
Generate static files.
4.209682
4.223467
0.996736
with ensure_settings(command, args) as settings: project_url = args.get(0) puts("\n- Getting project information for {0}".format(project_url)) project_name = project_url.split("/").pop() error = None # Create a tempdir and clone tempdir = tempfile.mkdtemp() try: testgit = sh.git.bake(_cwd=tempdir, _tty_in=True, _tty_out=False) # _err_to_out=True) testclone = testgit.clone(project_url, '.', '--depth=1', '--bare') puts(testclone) config = testgit.show("HEAD:tarbell_config.py") puts("\n- Found tarbell_config.py") path = _get_path(_clean_suffix(project_name, ".git"), settings) _mkdir(path) git = sh.git.bake(_cwd=path) clone = git.clone(project_url, '.', _tty_in=True, _tty_out=False, _err_to_out=True) puts(clone) puts(git.submodule.update('--init', '--recursive', _tty_in=True, _tty_out=False, _err_to_out=True)) _install_requirements(path) # Get site, run hook with ensure_project(command, args, path) as site: site.call_hook("install", site, git) except sh.ErrorReturnCode_128 as e: if e.message.endswith('Device not configured\n'): error = 'Git tried to prompt for a username or password.\n\nTarbell doesn\'t support interactive sessions. Please configure ssh key access to your Git repository. (See https://help.github.com/articles/generating-ssh-keys/)' else: error = 'Not a valid repository or Tarbell project' finally: _delete_dir(tempdir) if error: show_error(error) else: puts("\n- Done installing project in {0}".format(colored.yellow(path)))
def tarbell_install(command, args)
Install a project.
4.668844
4.677901
0.998064
with ensure_settings(command, args) as settings: name = None error = None template_url = args.get(0) matches = [template for template in settings.config["project_templates"] if template.get("url") == template_url] tempdir = tempfile.mkdtemp() if matches: puts("\n{0} already exists. Nothing more to do.\n".format( colored.yellow(template_url) )) sys.exit() try: puts("\nInstalling {0}".format(colored.cyan(template_url))) puts("\n- Cloning repo") git = sh.git.bake(_cwd=tempdir, _tty_in=True, _tty_out=False, _err_to_out=True) puts(git.clone(template_url, '.')) _install_requirements(tempdir) filename, pathname, description = imp.find_module('blueprint', [tempdir]) blueprint = imp.load_module('blueprint', filename, pathname, description) puts("\n- Found _blueprint/blueprint.py") name = blueprint.NAME puts("\n- Name specified in blueprint.py: {0}".format(colored.yellow(name))) settings.config["project_templates"].append({"name": name, "url": template_url}) settings.save() except AttributeError: name = template_url.split("/")[-1] error = "\n- No name specified in blueprint.py, using '{0}'".format(colored.yellow(name)) except ImportError: error = 'No blueprint.py found' except sh.ErrorReturnCode_128 as e: if e.stdout.strip('\n').endswith('Device not configured'): error = 'Git tried to prompt for a username or password.\n\nTarbell doesn\'t support interactive sessions. Please configure ssh key access to your Git repository. (See https://help.github.com/articles/generating-ssh-keys/)' else: error = 'Not a valid repository or Tarbell project' finally: _delete_dir(tempdir) if error: show_error(error) else: puts("\n+ Added new project template: {0}".format(colored.yellow(name)))
def tarbell_install_blueprint(command, args)
Install a project template.
4.079836
4.000357
1.019868
with ensure_settings(command, args) as settings: projects_path = settings.config.get("projects_path") if not projects_path: show_error("{0} does not exist".format(projects_path)) sys.exit() puts("Listing projects in {0}\n".format( colored.yellow(projects_path) )) longest_title = 0 projects = [] for directory in os.listdir(projects_path): project_path = os.path.join(projects_path, directory) try: filename, pathname, description = imp.find_module('tarbell_config', [project_path]) config = imp.load_module(directory, filename, pathname, description) title = config.DEFAULT_CONTEXT.get("title", directory) projects.append((directory, title)) if len(title) > longest_title: longest_title = len(title) except ImportError: pass if len(projects): fmt = "{0: <"+str(longest_title+1)+"} {1}" puts(fmt.format( 'title', 'project name' )) for projectname, title in projects: title = codecs.encode(title, 'utf8') puts(colored.yellow(fmt.format( title, colored.cyan(projectname) ))) puts("\nUse {0} to switch to a project".format( colored.green("tarbell switch <project name>") )) else: puts("No projects found")
def tarbell_list(command, args)
List tarbell projects.
2.901896
2.889205
1.004393
with ensure_settings(command, args) as settings: puts("\nAvailable project templates\n") _list_templates(settings) puts("")
def tarbell_list_templates(command, args)
List available Tarbell blueprints.
9.86818
11.020662
0.895425
with ensure_settings(command, args) as settings, ensure_project(command, args) as site: bucket_name = list_get(args, 0, "staging") try: bucket_url = S3Url(site.project.S3_BUCKETS[bucket_name]) except KeyError: show_error( "\nThere's no bucket configuration called '{0}' in " "tarbell_config.py.".format(colored.yellow(bucket_name))) sys.exit(1) extra_context = { "ROOT_URL": bucket_url, "S3_BUCKET": bucket_url.root, "BUCKET_NAME": bucket_name, } tempdir = "{0}/".format(tarbell_generate(command, args, extra_context=extra_context, skip_args=True, quiet=True)) try: title = site.project.DEFAULT_CONTEXT.get("title", "") puts("\nDeploying {0} to {1} ({2})\n".format( colored.yellow(title), colored.red(bucket_name), colored.green(bucket_url) )) # Get creds if settings.config: # If settings has a config section, use it kwargs = settings.config['s3_credentials'].get(bucket_url.root) if not kwargs: kwargs = { 'access_key_id': settings.config.get('default_s3_access_key_id'), 'secret_access_key': settings.config.get('default_s3_secret_access_key'), } puts("Using default bucket credentials") else: puts("Using custom bucket configuration for {0}".format(bucket_url.root)) else: # If no configuration exists, read from environment variables if possible puts("Attemping to use AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY") kwargs = { 'access_key_id': os.environ["AWS_ACCESS_KEY_ID"], 'secret_access_key': os.environ["AWS_SECRET_ACCESS_KEY"], } if not kwargs.get('access_key_id') and not kwargs.get('secret_access_key'): show_error('S3 access is not configured. Set up S3 with {0} to publish.' .format(colored.green('tarbell configure'))) sys.exit() s3 = S3Sync(tempdir, bucket_url, **kwargs) s3.deploy_to_s3() site.call_hook("publish", site, s3) puts("\nIf you have website hosting enabled, you can see your project at:") puts(colored.green("http://{0}\n".format(bucket_url))) except KeyboardInterrupt: show_error("ctrl-c pressed, bailing out!") finally: _delete_dir(tempdir)
def tarbell_publish(command, args)
Publish to s3.
3.726612
3.72419
1.00065
with ensure_settings(command, args) as settings: # Set it up and make the directory name = _get_project_name(args) puts("Creating {0}".format(colored.cyan(name))) path = _get_path(name, settings) _mkdir(path) try: _newproject(command, path, name, settings) except KeyboardInterrupt: _delete_dir(path) show_error("ctrl-c pressed, not creating new project.") except: _delete_dir(path) show_error("Unexpected error: {0}".format(sys.exc_info()[0])) raise
def tarbell_newproject(command, args)
Create new Tarbell project.
4.317109
4.398706
0.98145
with ensure_project(command, args) as site: with ensure_settings(command, args) as settings: address = list_get(args, 0, "").split(":") ip = list_get(address, 0, settings.config['default_server_ip']) port = int(list_get(address, 1, settings.config['default_server_port'])) puts("\n * Running local server. Press {0} to stop the server".format(colored.red("ctrl-c"))) puts(" * Edit this project's templates at {0}".format(colored.yellow(site.path))) try: if not is_werkzeug_process(): site.call_hook("server_start", site) site.app.run(ip, port=port) if not is_werkzeug_process(): site.call_hook("server_stop", site) except socket.error: show_error("Address {0} is already in use, please try another port or address." .format(colored.yellow("{0}:{1}".format(ip, port))))
def tarbell_serve(command, args)
Serve the current Tarbell project.
4.887783
4.878518
1.001899
with ensure_settings(command, args) as settings: projects_path = settings.config.get("projects_path") if not projects_path: show_error("{0} does not exist".format(projects_path)) sys.exit() project = args.get(0) args.remove(project) project_path = os.path.join(projects_path, project) if os.path.isdir(project_path): os.chdir(project_path) puts("\nSwitching to {0}".format(colored.red(project))) tarbell_serve(command, args) else: show_error("{0} isn't a tarbell project".format(project_path))
def tarbell_switch(command, args)
Switch to a project.
3.115329
3.132308
0.994579
with ensure_settings(command, args) as settings, ensure_project(command, args) as site: puts("Updating to latest blueprint\n") git = sh.git.bake(_cwd=site.base.base_dir) # stash then pull puts(colored.yellow("Stashing local changes")) puts(git.stash()) puts(colored.yellow("Pull latest changes")) puts(git.pull()) # need to pop any local changes back to get back on the original branch # this may behave oddly if you have old changes stashed if git.stash.list(): puts(git.stash.pop())
def tarbell_update(command, args)
Update the current tarbell project.
7.858861
8.158681
0.963251
with ensure_settings(command, args) as settings, ensure_project(command, args) as site: show_error("Not implemented!")
def tarbell_unpublish(command, args)
Delete a project.
11.554045
13.661534
0.845736
with ensure_settings(command, args) as settings, ensure_project(command, args) as site: try: # First, try to get the Google Spreadsheet URL spreadsheet_url = _google_spreadsheet_url(site.project.SPREADSHEET_KEY) except AttributeError: # The project doesn't seem to be using a Google Spreadsheet. # Try the URL or path specified in the CONTEXT_SOURCE_FILE setting try: spreadsheet_url = _context_source_file_url( site.project.CONTEXT_SOURCE_FILE) print(spreadsheet_url) except AttributeError: puts(colored.red("No Google spreadsheet or context source file " "has been configured.\n")) return # Use the webbrowser package to try to open the file whether it's a # remote URL on the web, or a local file. On some platforms it will # successfully open local files in the default application. # This seems preferable to trying to do os detection and calling # the system-specific command for opening files in default # applications. # See # http://stackoverflow.com/questions/434597/open-document-with-default-application-in-python webbrowser.open(spreadsheet_url)
def tarbell_spreadsheet(command, args)
Open context spreadsheet
6.002113
5.938141
1.010773
if path_or_url.startswith('http'): # Remote CSV. Just return the URL return path_or_url if path_or_url.startswith('/'): # Absolute path return "file://" + path_or_url return "file://" + os.path.join(os.path.realpath(os.getcwd()), path_or_url)
def _context_source_file_url(path_or_url)
Returns a URL for a remote or local context CSV file
2.930269
2.458476
1.191904
key = None title = _get_project_title() template = _get_template(settings) # Init repo git = sh.git.bake(_cwd=path) puts(git.init()) if template.get("url"): # Create submodule puts(git.submodule.add(template['url'], '_blueprint')) puts(git.submodule.update(*['--init'])) # Create spreadsheet key = _create_spreadsheet(name, title, path, settings) # Copy html files puts(colored.green("\nCopying html files...")) files = glob.iglob(os.path.join(path, "_blueprint", "*.html")) for file in files: if os.path.isfile(file): dir, filename = os.path.split(file) if not filename.startswith("_") and not filename.startswith("."): puts("Copying {0} to {1}".format(filename, path)) shutil.copy2(file, path) ignore = os.path.join(path, "_blueprint", ".gitignore") if os.path.isfile(ignore): shutil.copy2(ignore, path) else: empty_index_path = os.path.join(path, "index.html") open(empty_index_path, "w") # Create config file _copy_config_template(name, title, template, path, key, settings) # Commit puts(colored.green("\nInitial commit")) puts(git.add('.')) puts(git.commit(m='Created {0} from {1}'.format(name, template['name']))) _install_requirements(path) # Get site, run hook with ensure_project(command, args, path) as site: site.call_hook("newproject", site, git) # Messages puts("\nAll done! To preview your new project, type:\n") puts("{0} {1}".format(colored.green("tarbell switch"), colored.green(name))) puts("\nor\n") puts("{0}".format(colored.green("cd %s" % path))) puts("{0}".format(colored.green("tarbell serve\n"))) puts("\nYou got this!\n")
def _newproject(command, path, name, settings)
Helper to create new project.
4.04471
4.02488
1.004927
locations = [os.path.join(path, "_blueprint"), os.path.join(path, "_base"), path] success = True for location in locations: try: with open(os.path.join(location, "requirements.txt")): puts("\nRequirements file found at {0}".format(os.path.join(location, "requirements.txt"))) install_reqs = raw_input("Install requirements now with pip install -r requirements.txt? [Y/n] ") if not install_reqs or install_reqs.lower() == 'y': pip = sh.pip.bake(_cwd=location) puts("\nInstalling requirements...") puts(pip("install", "-r", "requirements.txt")) else: success = False puts("Not installing requirements. This may break everything! Vaya con dios.") except IOError: pass return success
def _install_requirements(path)
Install a blueprint's requirements.txt
3.882927
3.691368
1.051894