id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
9,400
ellmetha/django-machina
machina/apps/forum_conversation/forum_polls/views.py
TopicPollVoteView.form_invalid
def form_invalid(self, form): """ Handles an invalid form. """ messages.error(self.request, form.errors[NON_FIELD_ERRORS]) return redirect( reverse( 'forum_conversation:topic', kwargs={ 'forum_slug': self.object.topic.forum.slug, 'forum_pk': self.object.topic.forum.pk, 'slug': self.object.topic.slug, 'pk': self.object.topic.pk }, ), )
python
def form_invalid(self, form): """ Handles an invalid form. """ messages.error(self.request, form.errors[NON_FIELD_ERRORS]) return redirect( reverse( 'forum_conversation:topic', kwargs={ 'forum_slug': self.object.topic.forum.slug, 'forum_pk': self.object.topic.forum.pk, 'slug': self.object.topic.slug, 'pk': self.object.topic.pk }, ), )
[ "def", "form_invalid", "(", "self", ",", "form", ")", ":", "messages", ".", "error", "(", "self", ".", "request", ",", "form", ".", "errors", "[", "NON_FIELD_ERRORS", "]", ")", "return", "redirect", "(", "reverse", "(", "'forum_conversation:topic'", ",", "kwargs", "=", "{", "'forum_slug'", ":", "self", ".", "object", ".", "topic", ".", "forum", ".", "slug", ",", "'forum_pk'", ":", "self", ".", "object", ".", "topic", ".", "forum", ".", "pk", ",", "'slug'", ":", "self", ".", "object", ".", "topic", ".", "slug", ",", "'pk'", ":", "self", ".", "object", ".", "topic", ".", "pk", "}", ",", ")", ",", ")" ]
Handles an invalid form.
[ "Handles", "an", "invalid", "form", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/forum_polls/views.py#L62-L75
9,401
ellmetha/django-machina
machina/templatetags/forum_polls_tags.py
has_been_completed_by
def has_been_completed_by(poll, user): """ This will return a boolean indicating if the passed user has already voted in the given poll. Usage:: {% if poll|has_been_completed_by:user %}...{% endif %} """ user_votes = TopicPollVote.objects.filter( poll_option__poll=poll) if user.is_anonymous: forum_key = get_anonymous_user_forum_key(user) user_votes = user_votes.filter(anonymous_key=forum_key) if forum_key \ else user_votes.none() else: user_votes = user_votes.filter(voter=user) return user_votes.exists()
python
def has_been_completed_by(poll, user): """ This will return a boolean indicating if the passed user has already voted in the given poll. Usage:: {% if poll|has_been_completed_by:user %}...{% endif %} """ user_votes = TopicPollVote.objects.filter( poll_option__poll=poll) if user.is_anonymous: forum_key = get_anonymous_user_forum_key(user) user_votes = user_votes.filter(anonymous_key=forum_key) if forum_key \ else user_votes.none() else: user_votes = user_votes.filter(voter=user) return user_votes.exists()
[ "def", "has_been_completed_by", "(", "poll", ",", "user", ")", ":", "user_votes", "=", "TopicPollVote", ".", "objects", ".", "filter", "(", "poll_option__poll", "=", "poll", ")", "if", "user", ".", "is_anonymous", ":", "forum_key", "=", "get_anonymous_user_forum_key", "(", "user", ")", "user_votes", "=", "user_votes", ".", "filter", "(", "anonymous_key", "=", "forum_key", ")", "if", "forum_key", "else", "user_votes", ".", "none", "(", ")", "else", ":", "user_votes", "=", "user_votes", ".", "filter", "(", "voter", "=", "user", ")", "return", "user_votes", ".", "exists", "(", ")" ]
This will return a boolean indicating if the passed user has already voted in the given poll. Usage:: {% if poll|has_been_completed_by:user %}...{% endif %}
[ "This", "will", "return", "a", "boolean", "indicating", "if", "the", "passed", "user", "has", "already", "voted", "in", "the", "given", "poll", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/templatetags/forum_polls_tags.py#L17-L33
9,402
ellmetha/django-machina
machina/apps/forum_tracking/managers.py
ForumReadTrackManager.get_unread_forums_from_list
def get_unread_forums_from_list(self, forums, user): """ Filter a list of forums and return only those which are unread. Given a list of forums find and returns the list of forums that are unread for the passed user. If a forum is unread all of its ancestors are also unread and will be included in the final list. """ unread_forums = [] visibility_contents = ForumVisibilityContentTree.from_forums(forums) forum_ids_to_visibility_nodes = visibility_contents.as_dict tracks = super().get_queryset().select_related('forum').filter( user=user, forum__in=forums) tracked_forums = [] for track in tracks: forum_last_post_on = forum_ids_to_visibility_nodes[track.forum_id].last_post_on if (forum_last_post_on and track.mark_time < forum_last_post_on) \ and track.forum not in unread_forums: unread_forums.extend(track.forum.get_ancestors(include_self=True)) tracked_forums.append(track.forum) for forum in forums: if forum not in tracked_forums and forum not in unread_forums \ and forum.direct_topics_count > 0: unread_forums.extend(forum.get_ancestors(include_self=True)) return list(set(unread_forums))
python
def get_unread_forums_from_list(self, forums, user): """ Filter a list of forums and return only those which are unread. Given a list of forums find and returns the list of forums that are unread for the passed user. If a forum is unread all of its ancestors are also unread and will be included in the final list. """ unread_forums = [] visibility_contents = ForumVisibilityContentTree.from_forums(forums) forum_ids_to_visibility_nodes = visibility_contents.as_dict tracks = super().get_queryset().select_related('forum').filter( user=user, forum__in=forums) tracked_forums = [] for track in tracks: forum_last_post_on = forum_ids_to_visibility_nodes[track.forum_id].last_post_on if (forum_last_post_on and track.mark_time < forum_last_post_on) \ and track.forum not in unread_forums: unread_forums.extend(track.forum.get_ancestors(include_self=True)) tracked_forums.append(track.forum) for forum in forums: if forum not in tracked_forums and forum not in unread_forums \ and forum.direct_topics_count > 0: unread_forums.extend(forum.get_ancestors(include_self=True)) return list(set(unread_forums))
[ "def", "get_unread_forums_from_list", "(", "self", ",", "forums", ",", "user", ")", ":", "unread_forums", "=", "[", "]", "visibility_contents", "=", "ForumVisibilityContentTree", ".", "from_forums", "(", "forums", ")", "forum_ids_to_visibility_nodes", "=", "visibility_contents", ".", "as_dict", "tracks", "=", "super", "(", ")", ".", "get_queryset", "(", ")", ".", "select_related", "(", "'forum'", ")", ".", "filter", "(", "user", "=", "user", ",", "forum__in", "=", "forums", ")", "tracked_forums", "=", "[", "]", "for", "track", "in", "tracks", ":", "forum_last_post_on", "=", "forum_ids_to_visibility_nodes", "[", "track", ".", "forum_id", "]", ".", "last_post_on", "if", "(", "forum_last_post_on", "and", "track", ".", "mark_time", "<", "forum_last_post_on", ")", "and", "track", ".", "forum", "not", "in", "unread_forums", ":", "unread_forums", ".", "extend", "(", "track", ".", "forum", ".", "get_ancestors", "(", "include_self", "=", "True", ")", ")", "tracked_forums", ".", "append", "(", "track", ".", "forum", ")", "for", "forum", "in", "forums", ":", "if", "forum", "not", "in", "tracked_forums", "and", "forum", "not", "in", "unread_forums", "and", "forum", ".", "direct_topics_count", ">", "0", ":", "unread_forums", ".", "extend", "(", "forum", ".", "get_ancestors", "(", "include_self", "=", "True", ")", ")", "return", "list", "(", "set", "(", "unread_forums", ")", ")" ]
Filter a list of forums and return only those which are unread. Given a list of forums find and returns the list of forums that are unread for the passed user. If a forum is unread all of its ancestors are also unread and will be included in the final list.
[ "Filter", "a", "list", "of", "forums", "and", "return", "only", "those", "which", "are", "unread", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_tracking/managers.py#L20-L48
9,403
ellmetha/django-machina
machina/apps/forum_member/receivers.py
increase_posts_count
def increase_posts_count(sender, instance, **kwargs): """ Increases the member's post count after a post save. This receiver handles the update of the profile related to the user who is the poster of the forum post being created or updated. """ if instance.poster is None: # An anonymous post is considered. No profile can be updated in # that case. return profile, dummy = ForumProfile.objects.get_or_create(user=instance.poster) increase_posts_count = False if instance.pk: try: old_instance = instance.__class__._default_manager.get(pk=instance.pk) except ObjectDoesNotExist: # pragma: no cover # This should never happen (except with django loaddata command) increase_posts_count = True old_instance = None if old_instance and old_instance.approved is False and instance.approved is True: increase_posts_count = True elif instance.approved: increase_posts_count = True if increase_posts_count: profile.posts_count = F('posts_count') + 1 profile.save()
python
def increase_posts_count(sender, instance, **kwargs): """ Increases the member's post count after a post save. This receiver handles the update of the profile related to the user who is the poster of the forum post being created or updated. """ if instance.poster is None: # An anonymous post is considered. No profile can be updated in # that case. return profile, dummy = ForumProfile.objects.get_or_create(user=instance.poster) increase_posts_count = False if instance.pk: try: old_instance = instance.__class__._default_manager.get(pk=instance.pk) except ObjectDoesNotExist: # pragma: no cover # This should never happen (except with django loaddata command) increase_posts_count = True old_instance = None if old_instance and old_instance.approved is False and instance.approved is True: increase_posts_count = True elif instance.approved: increase_posts_count = True if increase_posts_count: profile.posts_count = F('posts_count') + 1 profile.save()
[ "def", "increase_posts_count", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "if", "instance", ".", "poster", "is", "None", ":", "# An anonymous post is considered. No profile can be updated in", "# that case.", "return", "profile", ",", "dummy", "=", "ForumProfile", ".", "objects", ".", "get_or_create", "(", "user", "=", "instance", ".", "poster", ")", "increase_posts_count", "=", "False", "if", "instance", ".", "pk", ":", "try", ":", "old_instance", "=", "instance", ".", "__class__", ".", "_default_manager", ".", "get", "(", "pk", "=", "instance", ".", "pk", ")", "except", "ObjectDoesNotExist", ":", "# pragma: no cover", "# This should never happen (except with django loaddata command)", "increase_posts_count", "=", "True", "old_instance", "=", "None", "if", "old_instance", "and", "old_instance", ".", "approved", "is", "False", "and", "instance", ".", "approved", "is", "True", ":", "increase_posts_count", "=", "True", "elif", "instance", ".", "approved", ":", "increase_posts_count", "=", "True", "if", "increase_posts_count", ":", "profile", ".", "posts_count", "=", "F", "(", "'posts_count'", ")", "+", "1", "profile", ".", "save", "(", ")" ]
Increases the member's post count after a post save. This receiver handles the update of the profile related to the user who is the poster of the forum post being created or updated.
[ "Increases", "the", "member", "s", "post", "count", "after", "a", "post", "save", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_member/receivers.py#L25-L54
9,404
ellmetha/django-machina
machina/apps/forum_member/receivers.py
decrease_posts_count_after_post_unaproval
def decrease_posts_count_after_post_unaproval(sender, instance, **kwargs): """ Decreases the member's post count after a post unaproval. This receiver handles the unaproval of a forum post: the posts count associated with the post's author is decreased. """ if not instance.pk: # Do not consider posts being created. return profile, dummy = ForumProfile.objects.get_or_create(user=instance.poster) try: old_instance = instance.__class__._default_manager.get(pk=instance.pk) except ObjectDoesNotExist: # pragma: no cover # This should never happen (except with django loaddata command) return if old_instance and old_instance.approved is True and instance.approved is False: profile.posts_count = F('posts_count') - 1 profile.save()
python
def decrease_posts_count_after_post_unaproval(sender, instance, **kwargs): """ Decreases the member's post count after a post unaproval. This receiver handles the unaproval of a forum post: the posts count associated with the post's author is decreased. """ if not instance.pk: # Do not consider posts being created. return profile, dummy = ForumProfile.objects.get_or_create(user=instance.poster) try: old_instance = instance.__class__._default_manager.get(pk=instance.pk) except ObjectDoesNotExist: # pragma: no cover # This should never happen (except with django loaddata command) return if old_instance and old_instance.approved is True and instance.approved is False: profile.posts_count = F('posts_count') - 1 profile.save()
[ "def", "decrease_posts_count_after_post_unaproval", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "if", "not", "instance", ".", "pk", ":", "# Do not consider posts being created.", "return", "profile", ",", "dummy", "=", "ForumProfile", ".", "objects", ".", "get_or_create", "(", "user", "=", "instance", ".", "poster", ")", "try", ":", "old_instance", "=", "instance", ".", "__class__", ".", "_default_manager", ".", "get", "(", "pk", "=", "instance", ".", "pk", ")", "except", "ObjectDoesNotExist", ":", "# pragma: no cover", "# This should never happen (except with django loaddata command)", "return", "if", "old_instance", "and", "old_instance", ".", "approved", "is", "True", "and", "instance", ".", "approved", "is", "False", ":", "profile", ".", "posts_count", "=", "F", "(", "'posts_count'", ")", "-", "1", "profile", ".", "save", "(", ")" ]
Decreases the member's post count after a post unaproval. This receiver handles the unaproval of a forum post: the posts count associated with the post's author is decreased.
[ "Decreases", "the", "member", "s", "post", "count", "after", "a", "post", "unaproval", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_member/receivers.py#L58-L78
9,405
ellmetha/django-machina
machina/apps/forum_member/receivers.py
decrease_posts_count_after_post_deletion
def decrease_posts_count_after_post_deletion(sender, instance, **kwargs): """ Decreases the member's post count after a post deletion. This receiver handles the deletion of a forum post: the posts count related to the post's author is decreased. """ if not instance.approved: # If a post has not been approved, it has not been counted. # So do not decrement count return try: assert instance.poster_id is not None poster = User.objects.get(pk=instance.poster_id) except AssertionError: # An anonymous post is considered. No profile can be updated in # that case. return except ObjectDoesNotExist: # pragma: no cover # This can happen if a User instance is deleted. In that case the # User instance is not available and the receiver should return. return profile, dummy = ForumProfile.objects.get_or_create(user=poster) if profile.posts_count: profile.posts_count = F('posts_count') - 1 profile.save()
python
def decrease_posts_count_after_post_deletion(sender, instance, **kwargs): """ Decreases the member's post count after a post deletion. This receiver handles the deletion of a forum post: the posts count related to the post's author is decreased. """ if not instance.approved: # If a post has not been approved, it has not been counted. # So do not decrement count return try: assert instance.poster_id is not None poster = User.objects.get(pk=instance.poster_id) except AssertionError: # An anonymous post is considered. No profile can be updated in # that case. return except ObjectDoesNotExist: # pragma: no cover # This can happen if a User instance is deleted. In that case the # User instance is not available and the receiver should return. return profile, dummy = ForumProfile.objects.get_or_create(user=poster) if profile.posts_count: profile.posts_count = F('posts_count') - 1 profile.save()
[ "def", "decrease_posts_count_after_post_deletion", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "if", "not", "instance", ".", "approved", ":", "# If a post has not been approved, it has not been counted.", "# So do not decrement count", "return", "try", ":", "assert", "instance", ".", "poster_id", "is", "not", "None", "poster", "=", "User", ".", "objects", ".", "get", "(", "pk", "=", "instance", ".", "poster_id", ")", "except", "AssertionError", ":", "# An anonymous post is considered. No profile can be updated in", "# that case.", "return", "except", "ObjectDoesNotExist", ":", "# pragma: no cover", "# This can happen if a User instance is deleted. In that case the", "# User instance is not available and the receiver should return.", "return", "profile", ",", "dummy", "=", "ForumProfile", ".", "objects", ".", "get_or_create", "(", "user", "=", "poster", ")", "if", "profile", ".", "posts_count", ":", "profile", ".", "posts_count", "=", "F", "(", "'posts_count'", ")", "-", "1", "profile", ".", "save", "(", ")" ]
Decreases the member's post count after a post deletion. This receiver handles the deletion of a forum post: the posts count related to the post's author is decreased.
[ "Decreases", "the", "member", "s", "post", "count", "after", "a", "post", "deletion", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_member/receivers.py#L82-L108
9,406
ellmetha/django-machina
machina/apps/forum_moderation/views.py
TopicLockView.lock
def lock(self, request, *args, **kwargs): """ Locks the considered topic and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.status = Topic.TOPIC_LOCKED self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
python
def lock(self, request, *args, **kwargs): """ Locks the considered topic and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.status = Topic.TOPIC_LOCKED self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
[ "def", "lock", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "success_url", "=", "self", ".", "get_success_url", "(", ")", "self", ".", "object", ".", "status", "=", "Topic", ".", "TOPIC_LOCKED", "self", ".", "object", ".", "save", "(", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "success_url", ")" ]
Locks the considered topic and retirects the user to the success URL.
[ "Locks", "the", "considered", "topic", "and", "retirects", "the", "user", "to", "the", "success", "URL", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_moderation/views.py#L42-L49
9,407
ellmetha/django-machina
machina/apps/forum_moderation/views.py
TopicUnlockView.unlock
def unlock(self, request, *args, **kwargs): """ Unlocks the considered topic and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.status = Topic.TOPIC_UNLOCKED self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
python
def unlock(self, request, *args, **kwargs): """ Unlocks the considered topic and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.status = Topic.TOPIC_UNLOCKED self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
[ "def", "unlock", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "success_url", "=", "self", ".", "get_success_url", "(", ")", "self", ".", "object", ".", "status", "=", "Topic", ".", "TOPIC_UNLOCKED", "self", ".", "object", ".", "save", "(", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "success_url", ")" ]
Unlocks the considered topic and retirects the user to the success URL.
[ "Unlocks", "the", "considered", "topic", "and", "retirects", "the", "user", "to", "the", "success", "URL", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_moderation/views.py#L91-L98
9,408
ellmetha/django-machina
machina/apps/forum_moderation/views.py
TopicUpdateTypeBaseView.update_type
def update_type(self, request, *args, **kwargs): """ Updates the type of the considered topic and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.type = self.target_type self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
python
def update_type(self, request, *args, **kwargs): """ Updates the type of the considered topic and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.type = self.target_type self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
[ "def", "update_type", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "success_url", "=", "self", ".", "get_success_url", "(", ")", "self", ".", "object", ".", "type", "=", "self", ".", "target_type", "self", ".", "object", ".", "save", "(", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "success_url", ")" ]
Updates the type of the considered topic and retirects the user to the success URL.
[ "Updates", "the", "type", "of", "the", "considered", "topic", "and", "retirects", "the", "user", "to", "the", "success", "URL", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_moderation/views.py#L257-L265
9,409
ellmetha/django-machina
machina/apps/forum_moderation/views.py
PostApproveView.approve
def approve(self, request, *args, **kwargs): """ Approves the considered post and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.approved = True self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
python
def approve(self, request, *args, **kwargs): """ Approves the considered post and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.approved = True self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
[ "def", "approve", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "success_url", "=", "self", ".", "get_success_url", "(", ")", "self", ".", "object", ".", "approved", "=", "True", "self", ".", "object", ".", "save", "(", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "success_url", ")" ]
Approves the considered post and retirects the user to the success URL.
[ "Approves", "the", "considered", "post", "and", "retirects", "the", "user", "to", "the", "success", "URL", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_moderation/views.py#L404-L411
9,410
ellmetha/django-machina
machina/apps/forum_moderation/views.py
PostDisapproveView.disapprove
def disapprove(self, request, *args, **kwargs): """ Disapproves the considered post and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.delete() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
python
def disapprove(self, request, *args, **kwargs): """ Disapproves the considered post and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.delete() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
[ "def", "disapprove", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "success_url", "=", "self", ".", "get_success_url", "(", ")", "self", ".", "object", ".", "delete", "(", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "success_url", ")" ]
Disapproves the considered post and retirects the user to the success URL.
[ "Disapproves", "the", "considered", "post", "and", "retirects", "the", "user", "to", "the", "success", "URL", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_moderation/views.py#L446-L452
9,411
ellmetha/django-machina
machina/apps/forum_member/views.py
UserPostsView.poster
def poster(self): """ Returns the considered user. """ user_model = get_user_model() return get_object_or_404(user_model, pk=self.kwargs[self.user_pk_url_kwarg])
python
def poster(self): """ Returns the considered user. """ user_model = get_user_model() return get_object_or_404(user_model, pk=self.kwargs[self.user_pk_url_kwarg])
[ "def", "poster", "(", "self", ")", ":", "user_model", "=", "get_user_model", "(", ")", "return", "get_object_or_404", "(", "user_model", ",", "pk", "=", "self", ".", "kwargs", "[", "self", ".", "user_pk_url_kwarg", "]", ")" ]
Returns the considered user.
[ "Returns", "the", "considered", "user", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_member/views.py#L66-L69
9,412
ellmetha/django-machina
machina/apps/forum_member/views.py
TopicSubscribeView.subscribe
def subscribe(self, request, *args, **kwargs): """ Performs the subscribe action. """ self.object = self.get_object() self.object.subscribers.add(request.user) messages.success(self.request, self.success_message) return HttpResponseRedirect(self.get_success_url())
python
def subscribe(self, request, *args, **kwargs): """ Performs the subscribe action. """ self.object = self.get_object() self.object.subscribers.add(request.user) messages.success(self.request, self.success_message) return HttpResponseRedirect(self.get_success_url())
[ "def", "subscribe", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "self", ".", "object", ".", "subscribers", ".", "add", "(", "request", ".", "user", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "self", ".", "get_success_url", "(", ")", ")" ]
Performs the subscribe action.
[ "Performs", "the", "subscribe", "action", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_member/views.py#L145-L150
9,413
ellmetha/django-machina
machina/apps/forum_member/views.py
TopicUnsubscribeView.unsubscribe
def unsubscribe(self, request, *args, **kwargs): """ Performs the unsubscribe action. """ self.object = self.get_object() self.object.subscribers.remove(request.user) messages.success(self.request, self.success_message) return HttpResponseRedirect(self.get_success_url())
python
def unsubscribe(self, request, *args, **kwargs): """ Performs the unsubscribe action. """ self.object = self.get_object() self.object.subscribers.remove(request.user) messages.success(self.request, self.success_message) return HttpResponseRedirect(self.get_success_url())
[ "def", "unsubscribe", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "self", ".", "object", ".", "subscribers", ".", "remove", "(", "request", ".", "user", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "self", ".", "get_success_url", "(", ")", ")" ]
Performs the unsubscribe action.
[ "Performs", "the", "unsubscribe", "action", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_member/views.py#L191-L196
9,414
ellmetha/django-machina
machina/apps/forum_conversation/receivers.py
update_topic_counter
def update_topic_counter(sender, topic, user, request, response, **kwargs): """ Handles the update of the views counter associated with topics. """ topic.__class__._default_manager.filter(id=topic.id).update(views_count=F('views_count') + 1)
python
def update_topic_counter(sender, topic, user, request, response, **kwargs): """ Handles the update of the views counter associated with topics. """ topic.__class__._default_manager.filter(id=topic.id).update(views_count=F('views_count') + 1)
[ "def", "update_topic_counter", "(", "sender", ",", "topic", ",", "user", ",", "request", ",", "response", ",", "*", "*", "kwargs", ")", ":", "topic", ".", "__class__", ".", "_default_manager", ".", "filter", "(", "id", "=", "topic", ".", "id", ")", ".", "update", "(", "views_count", "=", "F", "(", "'views_count'", ")", "+", "1", ")" ]
Handles the update of the views counter associated with topics.
[ "Handles", "the", "update", "of", "the", "views", "counter", "associated", "with", "topics", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/receivers.py#L16-L18
9,415
ellmetha/django-machina
machina/apps/forum_conversation/views.py
TopicView.get_topic
def get_topic(self): """ Returns the topic to consider. """ if not hasattr(self, 'topic'): self.topic = get_object_or_404( Topic.objects.select_related('forum').all(), pk=self.kwargs['pk'], ) return self.topic
python
def get_topic(self): """ Returns the topic to consider. """ if not hasattr(self, 'topic'): self.topic = get_object_or_404( Topic.objects.select_related('forum').all(), pk=self.kwargs['pk'], ) return self.topic
[ "def", "get_topic", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'topic'", ")", ":", "self", ".", "topic", "=", "get_object_or_404", "(", "Topic", ".", "objects", ".", "select_related", "(", "'forum'", ")", ".", "all", "(", ")", ",", "pk", "=", "self", ".", "kwargs", "[", "'pk'", "]", ",", ")", "return", "self", ".", "topic" ]
Returns the topic to consider.
[ "Returns", "the", "topic", "to", "consider", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L73-L79
9,416
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BasePostFormView.init_attachment_cache
def init_attachment_cache(self): """ Initializes the attachment cache for the current view. """ if self.request.method == 'GET': # Invalidates previous attachments attachments_cache.delete(self.get_attachments_cache_key(self.request)) return # Try to restore previous uploaded attachments if applicable attachments_cache_key = self.get_attachments_cache_key(self.request) restored_attachments_dict = attachments_cache.get(attachments_cache_key) if restored_attachments_dict: restored_attachments_dict.update(self.request.FILES) self.request._files = restored_attachments_dict # Updates the attachment cache if files are available if self.request.FILES: attachments_cache.set(attachments_cache_key, self.request.FILES)
python
def init_attachment_cache(self): """ Initializes the attachment cache for the current view. """ if self.request.method == 'GET': # Invalidates previous attachments attachments_cache.delete(self.get_attachments_cache_key(self.request)) return # Try to restore previous uploaded attachments if applicable attachments_cache_key = self.get_attachments_cache_key(self.request) restored_attachments_dict = attachments_cache.get(attachments_cache_key) if restored_attachments_dict: restored_attachments_dict.update(self.request.FILES) self.request._files = restored_attachments_dict # Updates the attachment cache if files are available if self.request.FILES: attachments_cache.set(attachments_cache_key, self.request.FILES)
[ "def", "init_attachment_cache", "(", "self", ")", ":", "if", "self", ".", "request", ".", "method", "==", "'GET'", ":", "# Invalidates previous attachments", "attachments_cache", ".", "delete", "(", "self", ".", "get_attachments_cache_key", "(", "self", ".", "request", ")", ")", "return", "# Try to restore previous uploaded attachments if applicable", "attachments_cache_key", "=", "self", ".", "get_attachments_cache_key", "(", "self", ".", "request", ")", "restored_attachments_dict", "=", "attachments_cache", ".", "get", "(", "attachments_cache_key", ")", "if", "restored_attachments_dict", ":", "restored_attachments_dict", ".", "update", "(", "self", ".", "request", ".", "FILES", ")", "self", ".", "request", ".", "_files", "=", "restored_attachments_dict", "# Updates the attachment cache if files are available", "if", "self", ".", "request", ".", "FILES", ":", "attachments_cache", ".", "set", "(", "attachments_cache_key", ",", "self", ".", "request", ".", "FILES", ")" ]
Initializes the attachment cache for the current view.
[ "Initializes", "the", "attachment", "cache", "for", "the", "current", "view", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L179-L195
9,417
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BasePostFormView.get_post_form_kwargs
def get_post_form_kwargs(self): """ Returns the keyword arguments for instantiating the post form. """ kwargs = { 'user': self.request.user, 'forum': self.get_forum(), 'topic': self.get_topic(), } post = self.get_post() if post: kwargs.update({'instance': post}) if self.request.method in ('POST', 'PUT'): kwargs.update({ 'data': self.request.POST, 'files': self.request.FILES, }) return kwargs
python
def get_post_form_kwargs(self): """ Returns the keyword arguments for instantiating the post form. """ kwargs = { 'user': self.request.user, 'forum': self.get_forum(), 'topic': self.get_topic(), } post = self.get_post() if post: kwargs.update({'instance': post}) if self.request.method in ('POST', 'PUT'): kwargs.update({ 'data': self.request.POST, 'files': self.request.FILES, }) return kwargs
[ "def", "get_post_form_kwargs", "(", "self", ")", ":", "kwargs", "=", "{", "'user'", ":", "self", ".", "request", ".", "user", ",", "'forum'", ":", "self", ".", "get_forum", "(", ")", ",", "'topic'", ":", "self", ".", "get_topic", "(", ")", ",", "}", "post", "=", "self", ".", "get_post", "(", ")", "if", "post", ":", "kwargs", ".", "update", "(", "{", "'instance'", ":", "post", "}", ")", "if", "self", ".", "request", ".", "method", "in", "(", "'POST'", ",", "'PUT'", ")", ":", "kwargs", ".", "update", "(", "{", "'data'", ":", "self", ".", "request", ".", "POST", ",", "'files'", ":", "self", ".", "request", ".", "FILES", ",", "}", ")", "return", "kwargs" ]
Returns the keyword arguments for instantiating the post form.
[ "Returns", "the", "keyword", "arguments", "for", "instantiating", "the", "post", "form", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L209-L226
9,418
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BasePostFormView.get_attachment_formset
def get_attachment_formset(self, formset_class): """ Returns an instance of the attachment formset to be used in the view. """ if ( self.request.forum_permission_handler.can_attach_files( self.get_forum(), self.request.user, ) ): return formset_class(**self.get_attachment_formset_kwargs())
python
def get_attachment_formset(self, formset_class): """ Returns an instance of the attachment formset to be used in the view. """ if ( self.request.forum_permission_handler.can_attach_files( self.get_forum(), self.request.user, ) ): return formset_class(**self.get_attachment_formset_kwargs())
[ "def", "get_attachment_formset", "(", "self", ",", "formset_class", ")", ":", "if", "(", "self", ".", "request", ".", "forum_permission_handler", ".", "can_attach_files", "(", "self", ".", "get_forum", "(", ")", ",", "self", ".", "request", ".", "user", ",", ")", ")", ":", "return", "formset_class", "(", "*", "*", "self", ".", "get_attachment_formset_kwargs", "(", ")", ")" ]
Returns an instance of the attachment formset to be used in the view.
[ "Returns", "an", "instance", "of", "the", "attachment", "formset", "to", "be", "used", "in", "the", "view", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L228-L235
9,419
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BasePostFormView.get_attachment_formset_kwargs
def get_attachment_formset_kwargs(self): """ Returns the keyword arguments for instantiating the attachment formset. """ kwargs = { 'prefix': 'attachment', } if self.request.method in ('POST', 'PUT'): kwargs.update({ 'data': self.request.POST, 'files': self.request.FILES, }) else: post = self.get_post() attachment_queryset = Attachment.objects.filter(post=post) kwargs.update({ 'queryset': attachment_queryset, }) return kwargs
python
def get_attachment_formset_kwargs(self): """ Returns the keyword arguments for instantiating the attachment formset. """ kwargs = { 'prefix': 'attachment', } if self.request.method in ('POST', 'PUT'): kwargs.update({ 'data': self.request.POST, 'files': self.request.FILES, }) else: post = self.get_post() attachment_queryset = Attachment.objects.filter(post=post) kwargs.update({ 'queryset': attachment_queryset, }) return kwargs
[ "def", "get_attachment_formset_kwargs", "(", "self", ")", ":", "kwargs", "=", "{", "'prefix'", ":", "'attachment'", ",", "}", "if", "self", ".", "request", ".", "method", "in", "(", "'POST'", ",", "'PUT'", ")", ":", "kwargs", ".", "update", "(", "{", "'data'", ":", "self", ".", "request", ".", "POST", ",", "'files'", ":", "self", ".", "request", ".", "FILES", ",", "}", ")", "else", ":", "post", "=", "self", ".", "get_post", "(", ")", "attachment_queryset", "=", "Attachment", ".", "objects", ".", "filter", "(", "post", "=", "post", ")", "kwargs", ".", "update", "(", "{", "'queryset'", ":", "attachment_queryset", ",", "}", ")", "return", "kwargs" ]
Returns the keyword arguments for instantiating the attachment formset.
[ "Returns", "the", "keyword", "arguments", "for", "instantiating", "the", "attachment", "formset", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L241-L258
9,420
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BasePostFormView.get_forum
def get_forum(self): """ Returns the considered forum. """ pk = self.kwargs.get(self.forum_pk_url_kwarg, None) if not pk: # pragma: no cover # This should never happen return if not hasattr(self, '_forum'): self._forum = get_object_or_404(Forum, pk=pk) return self._forum
python
def get_forum(self): """ Returns the considered forum. """ pk = self.kwargs.get(self.forum_pk_url_kwarg, None) if not pk: # pragma: no cover # This should never happen return if not hasattr(self, '_forum'): self._forum = get_object_or_404(Forum, pk=pk) return self._forum
[ "def", "get_forum", "(", "self", ")", ":", "pk", "=", "self", ".", "kwargs", ".", "get", "(", "self", ".", "forum_pk_url_kwarg", ",", "None", ")", "if", "not", "pk", ":", "# pragma: no cover", "# This should never happen", "return", "if", "not", "hasattr", "(", "self", ",", "'_forum'", ")", ":", "self", ".", "_forum", "=", "get_object_or_404", "(", "Forum", ",", "pk", "=", "pk", ")", "return", "self", ".", "_forum" ]
Returns the considered forum.
[ "Returns", "the", "considered", "forum", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L295-L303
9,421
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BasePostFormView.get_topic
def get_topic(self): """ Returns the considered topic if applicable. """ pk = self.kwargs.get(self.topic_pk_url_kwarg, None) if not pk: return if not hasattr(self, '_topic'): self._topic = get_object_or_404(Topic, pk=pk) return self._topic
python
def get_topic(self): """ Returns the considered topic if applicable. """ pk = self.kwargs.get(self.topic_pk_url_kwarg, None) if not pk: return if not hasattr(self, '_topic'): self._topic = get_object_or_404(Topic, pk=pk) return self._topic
[ "def", "get_topic", "(", "self", ")", ":", "pk", "=", "self", ".", "kwargs", ".", "get", "(", "self", ".", "topic_pk_url_kwarg", ",", "None", ")", "if", "not", "pk", ":", "return", "if", "not", "hasattr", "(", "self", ",", "'_topic'", ")", ":", "self", ".", "_topic", "=", "get_object_or_404", "(", "Topic", ",", "pk", "=", "pk", ")", "return", "self", ".", "_topic" ]
Returns the considered topic if applicable.
[ "Returns", "the", "considered", "topic", "if", "applicable", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L305-L312
9,422
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BasePostFormView.get_post
def get_post(self): """ Returns the considered post if applicable. """ pk = self.kwargs.get(self.post_pk_url_kwarg, None) if not pk: return if not hasattr(self, '_forum_post'): self._forum_post = get_object_or_404(Post, pk=pk) return self._forum_post
python
def get_post(self): """ Returns the considered post if applicable. """ pk = self.kwargs.get(self.post_pk_url_kwarg, None) if not pk: return if not hasattr(self, '_forum_post'): self._forum_post = get_object_or_404(Post, pk=pk) return self._forum_post
[ "def", "get_post", "(", "self", ")", ":", "pk", "=", "self", ".", "kwargs", ".", "get", "(", "self", ".", "post_pk_url_kwarg", ",", "None", ")", "if", "not", "pk", ":", "return", "if", "not", "hasattr", "(", "self", ",", "'_forum_post'", ")", ":", "self", ".", "_forum_post", "=", "get_object_or_404", "(", "Post", ",", "pk", "=", "pk", ")", "return", "self", ".", "_forum_post" ]
Returns the considered post if applicable.
[ "Returns", "the", "considered", "post", "if", "applicable", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L314-L321
9,423
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BaseTopicFormView.get_poll_option_formset
def get_poll_option_formset(self, formset_class): """ Returns an instance of the poll option formset to be used in the view. """ if self.request.forum_permission_handler.can_create_polls( self.get_forum(), self.request.user, ): return formset_class(**self.get_poll_option_formset_kwargs())
python
def get_poll_option_formset(self, formset_class): """ Returns an instance of the poll option formset to be used in the view. """ if self.request.forum_permission_handler.can_create_polls( self.get_forum(), self.request.user, ): return formset_class(**self.get_poll_option_formset_kwargs())
[ "def", "get_poll_option_formset", "(", "self", ",", "formset_class", ")", ":", "if", "self", ".", "request", ".", "forum_permission_handler", ".", "can_create_polls", "(", "self", ".", "get_forum", "(", ")", ",", "self", ".", "request", ".", "user", ",", ")", ":", "return", "formset_class", "(", "*", "*", "self", ".", "get_poll_option_formset_kwargs", "(", ")", ")" ]
Returns an instance of the poll option formset to be used in the view.
[ "Returns", "an", "instance", "of", "the", "poll", "option", "formset", "to", "be", "used", "in", "the", "view", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L448-L453
9,424
ellmetha/django-machina
machina/apps/forum_conversation/views.py
BaseTopicFormView.get_poll_option_formset_kwargs
def get_poll_option_formset_kwargs(self): """ Returns the keyword arguments for instantiating the poll option formset. """ kwargs = { 'prefix': 'poll', } if self.request.method in ('POST', 'PUT'): kwargs.update({ 'data': self.request.POST, 'files': self.request.FILES, }) else: topic = self.get_topic() poll_option_queryset = TopicPollOption.objects.filter(poll__topic=topic) kwargs.update({ 'queryset': poll_option_queryset, }) return kwargs
python
def get_poll_option_formset_kwargs(self): """ Returns the keyword arguments for instantiating the poll option formset. """ kwargs = { 'prefix': 'poll', } if self.request.method in ('POST', 'PUT'): kwargs.update({ 'data': self.request.POST, 'files': self.request.FILES, }) else: topic = self.get_topic() poll_option_queryset = TopicPollOption.objects.filter(poll__topic=topic) kwargs.update({ 'queryset': poll_option_queryset, }) return kwargs
[ "def", "get_poll_option_formset_kwargs", "(", "self", ")", ":", "kwargs", "=", "{", "'prefix'", ":", "'poll'", ",", "}", "if", "self", ".", "request", ".", "method", "in", "(", "'POST'", ",", "'PUT'", ")", ":", "kwargs", ".", "update", "(", "{", "'data'", ":", "self", ".", "request", ".", "POST", ",", "'files'", ":", "self", ".", "request", ".", "FILES", ",", "}", ")", "else", ":", "topic", "=", "self", ".", "get_topic", "(", ")", "poll_option_queryset", "=", "TopicPollOption", ".", "objects", ".", "filter", "(", "poll__topic", "=", "topic", ")", "kwargs", ".", "update", "(", "{", "'queryset'", ":", "poll_option_queryset", ",", "}", ")", "return", "kwargs" ]
Returns the keyword arguments for instantiating the poll option formset.
[ "Returns", "the", "keyword", "arguments", "for", "instantiating", "the", "poll", "option", "formset", "." ]
89ac083c1eaf1cfdeae6686ee094cc86362e8c69
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_conversation/views.py#L459-L476
9,425
e1ven/Robohash
robohash/robohash.py
Robohash._remove_exts
def _remove_exts(self,string): """ Sets the string, to create the Robohash """ # If the user hasn't disabled it, we will detect image extensions, such as .png, .jpg, etc. # We'll remove them from the string before hashing. # This ensures that /Bear.png and /Bear.bmp will send back the same image, in different formats. if string.lower().endswith(('.png','.gif','.jpg','.bmp','.jpeg','.ppm','.datauri')): format = string[string.rfind('.') +1 :len(string)] if format.lower() == 'jpg': format = 'jpeg' self.format = format string = string[0:string.rfind('.')] return string
python
def _remove_exts(self,string): """ Sets the string, to create the Robohash """ # If the user hasn't disabled it, we will detect image extensions, such as .png, .jpg, etc. # We'll remove them from the string before hashing. # This ensures that /Bear.png and /Bear.bmp will send back the same image, in different formats. if string.lower().endswith(('.png','.gif','.jpg','.bmp','.jpeg','.ppm','.datauri')): format = string[string.rfind('.') +1 :len(string)] if format.lower() == 'jpg': format = 'jpeg' self.format = format string = string[0:string.rfind('.')] return string
[ "def", "_remove_exts", "(", "self", ",", "string", ")", ":", "# If the user hasn't disabled it, we will detect image extensions, such as .png, .jpg, etc.", "# We'll remove them from the string before hashing.", "# This ensures that /Bear.png and /Bear.bmp will send back the same image, in different formats.", "if", "string", ".", "lower", "(", ")", ".", "endswith", "(", "(", "'.png'", ",", "'.gif'", ",", "'.jpg'", ",", "'.bmp'", ",", "'.jpeg'", ",", "'.ppm'", ",", "'.datauri'", ")", ")", ":", "format", "=", "string", "[", "string", ".", "rfind", "(", "'.'", ")", "+", "1", ":", "len", "(", "string", ")", "]", "if", "format", ".", "lower", "(", ")", "==", "'jpg'", ":", "format", "=", "'jpeg'", "self", ".", "format", "=", "format", "string", "=", "string", "[", "0", ":", "string", ".", "rfind", "(", "'.'", ")", "]", "return", "string" ]
Sets the string, to create the Robohash
[ "Sets", "the", "string", "to", "create", "the", "Robohash" ]
8dbbf9e69948ae2abc93c27511ef04f90b56c4d3
https://github.com/e1ven/Robohash/blob/8dbbf9e69948ae2abc93c27511ef04f90b56c4d3/robohash/robohash.py#L46-L61
9,426
e1ven/Robohash
robohash/robohash.py
Robohash._get_list_of_files
def _get_list_of_files(self,path): """ Go through each subdirectory of `path`, and choose one file from each to use in our hash. Continue to increase self.iter, so we use a different 'slot' of randomness each time. """ chosen_files = [] # Get a list of all subdirectories directories = [] for root, dirs, files in natsort.natsorted(os.walk(path, topdown=False)): for name in dirs: if name[:1] is not '.': directories.append(os.path.join(root, name)) directories = natsort.natsorted(directories) # Go through each directory in the list, and choose one file from each. # Add this file to our master list of robotparts. for directory in directories: files_in_dir = [] for imagefile in natsort.natsorted(os.listdir(directory)): files_in_dir.append(os.path.join(directory,imagefile)) files_in_dir = natsort.natsorted(files_in_dir) # Use some of our hash bits to choose which file element_in_list = self.hasharray[self.iter] % len(files_in_dir) chosen_files.append(files_in_dir[element_in_list]) self.iter += 1 return chosen_files
python
def _get_list_of_files(self,path): """ Go through each subdirectory of `path`, and choose one file from each to use in our hash. Continue to increase self.iter, so we use a different 'slot' of randomness each time. """ chosen_files = [] # Get a list of all subdirectories directories = [] for root, dirs, files in natsort.natsorted(os.walk(path, topdown=False)): for name in dirs: if name[:1] is not '.': directories.append(os.path.join(root, name)) directories = natsort.natsorted(directories) # Go through each directory in the list, and choose one file from each. # Add this file to our master list of robotparts. for directory in directories: files_in_dir = [] for imagefile in natsort.natsorted(os.listdir(directory)): files_in_dir.append(os.path.join(directory,imagefile)) files_in_dir = natsort.natsorted(files_in_dir) # Use some of our hash bits to choose which file element_in_list = self.hasharray[self.iter] % len(files_in_dir) chosen_files.append(files_in_dir[element_in_list]) self.iter += 1 return chosen_files
[ "def", "_get_list_of_files", "(", "self", ",", "path", ")", ":", "chosen_files", "=", "[", "]", "# Get a list of all subdirectories", "directories", "=", "[", "]", "for", "root", ",", "dirs", ",", "files", "in", "natsort", ".", "natsorted", "(", "os", ".", "walk", "(", "path", ",", "topdown", "=", "False", ")", ")", ":", "for", "name", "in", "dirs", ":", "if", "name", "[", ":", "1", "]", "is", "not", "'.'", ":", "directories", ".", "append", "(", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", ")", "directories", "=", "natsort", ".", "natsorted", "(", "directories", ")", "# Go through each directory in the list, and choose one file from each.", "# Add this file to our master list of robotparts.", "for", "directory", "in", "directories", ":", "files_in_dir", "=", "[", "]", "for", "imagefile", "in", "natsort", ".", "natsorted", "(", "os", ".", "listdir", "(", "directory", ")", ")", ":", "files_in_dir", ".", "append", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "imagefile", ")", ")", "files_in_dir", "=", "natsort", ".", "natsorted", "(", "files_in_dir", ")", "# Use some of our hash bits to choose which file", "element_in_list", "=", "self", ".", "hasharray", "[", "self", ".", "iter", "]", "%", "len", "(", "files_in_dir", ")", "chosen_files", ".", "append", "(", "files_in_dir", "[", "element_in_list", "]", ")", "self", ".", "iter", "+=", "1", "return", "chosen_files" ]
Go through each subdirectory of `path`, and choose one file from each to use in our hash. Continue to increase self.iter, so we use a different 'slot' of randomness each time.
[ "Go", "through", "each", "subdirectory", "of", "path", "and", "choose", "one", "file", "from", "each", "to", "use", "in", "our", "hash", ".", "Continue", "to", "increase", "self", ".", "iter", "so", "we", "use", "a", "different", "slot", "of", "randomness", "each", "time", "." ]
8dbbf9e69948ae2abc93c27511ef04f90b56c4d3
https://github.com/e1ven/Robohash/blob/8dbbf9e69948ae2abc93c27511ef04f90b56c4d3/robohash/robohash.py#L85-L113
9,427
e1ven/Robohash
robohash/robohash.py
Robohash.assemble
def assemble(self,roboset=None,color=None,format=None,bgset=None,sizex=300,sizey=300): """ Build our Robot! Returns the robot image itself. """ # Allow users to manually specify a robot 'set' that they like. # Ensure that this is one of the allowed choices, or allow all # If they don't set one, take the first entry from sets above. if roboset == 'any': roboset = self.sets[self.hasharray[1] % len(self.sets) ] elif roboset in self.sets: roboset = roboset else: roboset = self.sets[0] # Only set1 is setup to be color-seletable. The others don't have enough pieces in various colors. # This could/should probably be expanded at some point.. # Right now, this feature is almost never used. ( It was < 44 requests this year, out of 78M reqs ) if roboset == 'set1': if color in self.colors: roboset = 'set1/' + color else: randomcolor = self.colors[self.hasharray[0] % len(self.colors) ] roboset = 'set1/' + randomcolor # If they specified a background, ensure it's legal, then give it to them. if bgset in self.bgsets: bgset = bgset elif bgset == 'any': bgset = self.bgsets[ self.hasharray[2] % len(self.bgsets) ] # If we set a format based on extension earlier, use that. Otherwise, PNG. if format is None: format = self.format # Each directory in our set represents one piece of the Robot, such as the eyes, nose, mouth, etc. # Each directory is named with two numbers - The number before the # is the sort order. # This ensures that they always go in the same order when choosing pieces, regardless of OS. # The second number is the order in which to apply the pieces. # For instance, the head has to go down BEFORE the eyes, or the eyes would be hidden. # First, we'll get a list of parts of our robot. roboparts = self._get_list_of_files(self.resourcedir + 'sets/' + roboset) # Now that we've sorted them by the first number, we need to sort each sub-category by the second. roboparts.sort(key=lambda x: x.split("#")[1]) if bgset is not None: bglist = [] backgrounds = natsort.natsorted(os.listdir(self.resourcedir + 'backgrounds/' + bgset)) backgrounds.sort() for ls in backgrounds: if not ls.startswith("."): bglist.append(self.resourcedir + 'backgrounds/' + bgset + "/" + ls) background = bglist[self.hasharray[3] % len(bglist)] # Paste in each piece of the Robot. roboimg = Image.open(roboparts[0]) roboimg = roboimg.resize((1024,1024)) for png in roboparts: img = Image.open(png) img = img.resize((1024,1024)) roboimg.paste(img,(0,0),img) # If we're a BMP, flatten the image. if format == 'bmp': #Flatten bmps r, g, b, a = roboimg.split() roboimg = Image.merge("RGB", (r, g, b)) if bgset is not None: bg = Image.open(background) bg = bg.resize((1024,1024)) bg.paste(roboimg,(0,0),roboimg) roboimg = bg self.img = roboimg.resize((sizex,sizey),Image.ANTIALIAS) self.format = format
python
def assemble(self,roboset=None,color=None,format=None,bgset=None,sizex=300,sizey=300): """ Build our Robot! Returns the robot image itself. """ # Allow users to manually specify a robot 'set' that they like. # Ensure that this is one of the allowed choices, or allow all # If they don't set one, take the first entry from sets above. if roboset == 'any': roboset = self.sets[self.hasharray[1] % len(self.sets) ] elif roboset in self.sets: roboset = roboset else: roboset = self.sets[0] # Only set1 is setup to be color-seletable. The others don't have enough pieces in various colors. # This could/should probably be expanded at some point.. # Right now, this feature is almost never used. ( It was < 44 requests this year, out of 78M reqs ) if roboset == 'set1': if color in self.colors: roboset = 'set1/' + color else: randomcolor = self.colors[self.hasharray[0] % len(self.colors) ] roboset = 'set1/' + randomcolor # If they specified a background, ensure it's legal, then give it to them. if bgset in self.bgsets: bgset = bgset elif bgset == 'any': bgset = self.bgsets[ self.hasharray[2] % len(self.bgsets) ] # If we set a format based on extension earlier, use that. Otherwise, PNG. if format is None: format = self.format # Each directory in our set represents one piece of the Robot, such as the eyes, nose, mouth, etc. # Each directory is named with two numbers - The number before the # is the sort order. # This ensures that they always go in the same order when choosing pieces, regardless of OS. # The second number is the order in which to apply the pieces. # For instance, the head has to go down BEFORE the eyes, or the eyes would be hidden. # First, we'll get a list of parts of our robot. roboparts = self._get_list_of_files(self.resourcedir + 'sets/' + roboset) # Now that we've sorted them by the first number, we need to sort each sub-category by the second. roboparts.sort(key=lambda x: x.split("#")[1]) if bgset is not None: bglist = [] backgrounds = natsort.natsorted(os.listdir(self.resourcedir + 'backgrounds/' + bgset)) backgrounds.sort() for ls in backgrounds: if not ls.startswith("."): bglist.append(self.resourcedir + 'backgrounds/' + bgset + "/" + ls) background = bglist[self.hasharray[3] % len(bglist)] # Paste in each piece of the Robot. roboimg = Image.open(roboparts[0]) roboimg = roboimg.resize((1024,1024)) for png in roboparts: img = Image.open(png) img = img.resize((1024,1024)) roboimg.paste(img,(0,0),img) # If we're a BMP, flatten the image. if format == 'bmp': #Flatten bmps r, g, b, a = roboimg.split() roboimg = Image.merge("RGB", (r, g, b)) if bgset is not None: bg = Image.open(background) bg = bg.resize((1024,1024)) bg.paste(roboimg,(0,0),roboimg) roboimg = bg self.img = roboimg.resize((sizex,sizey),Image.ANTIALIAS) self.format = format
[ "def", "assemble", "(", "self", ",", "roboset", "=", "None", ",", "color", "=", "None", ",", "format", "=", "None", ",", "bgset", "=", "None", ",", "sizex", "=", "300", ",", "sizey", "=", "300", ")", ":", "# Allow users to manually specify a robot 'set' that they like.", "# Ensure that this is one of the allowed choices, or allow all", "# If they don't set one, take the first entry from sets above.", "if", "roboset", "==", "'any'", ":", "roboset", "=", "self", ".", "sets", "[", "self", ".", "hasharray", "[", "1", "]", "%", "len", "(", "self", ".", "sets", ")", "]", "elif", "roboset", "in", "self", ".", "sets", ":", "roboset", "=", "roboset", "else", ":", "roboset", "=", "self", ".", "sets", "[", "0", "]", "# Only set1 is setup to be color-seletable. The others don't have enough pieces in various colors.", "# This could/should probably be expanded at some point..", "# Right now, this feature is almost never used. ( It was < 44 requests this year, out of 78M reqs )", "if", "roboset", "==", "'set1'", ":", "if", "color", "in", "self", ".", "colors", ":", "roboset", "=", "'set1/'", "+", "color", "else", ":", "randomcolor", "=", "self", ".", "colors", "[", "self", ".", "hasharray", "[", "0", "]", "%", "len", "(", "self", ".", "colors", ")", "]", "roboset", "=", "'set1/'", "+", "randomcolor", "# If they specified a background, ensure it's legal, then give it to them.", "if", "bgset", "in", "self", ".", "bgsets", ":", "bgset", "=", "bgset", "elif", "bgset", "==", "'any'", ":", "bgset", "=", "self", ".", "bgsets", "[", "self", ".", "hasharray", "[", "2", "]", "%", "len", "(", "self", ".", "bgsets", ")", "]", "# If we set a format based on extension earlier, use that. Otherwise, PNG.", "if", "format", "is", "None", ":", "format", "=", "self", ".", "format", "# Each directory in our set represents one piece of the Robot, such as the eyes, nose, mouth, etc.", "# Each directory is named with two numbers - The number before the # is the sort order.", "# This ensures that they always go in the same order when choosing pieces, regardless of OS.", "# The second number is the order in which to apply the pieces.", "# For instance, the head has to go down BEFORE the eyes, or the eyes would be hidden.", "# First, we'll get a list of parts of our robot.", "roboparts", "=", "self", ".", "_get_list_of_files", "(", "self", ".", "resourcedir", "+", "'sets/'", "+", "roboset", ")", "# Now that we've sorted them by the first number, we need to sort each sub-category by the second.", "roboparts", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", ".", "split", "(", "\"#\"", ")", "[", "1", "]", ")", "if", "bgset", "is", "not", "None", ":", "bglist", "=", "[", "]", "backgrounds", "=", "natsort", ".", "natsorted", "(", "os", ".", "listdir", "(", "self", ".", "resourcedir", "+", "'backgrounds/'", "+", "bgset", ")", ")", "backgrounds", ".", "sort", "(", ")", "for", "ls", "in", "backgrounds", ":", "if", "not", "ls", ".", "startswith", "(", "\".\"", ")", ":", "bglist", ".", "append", "(", "self", ".", "resourcedir", "+", "'backgrounds/'", "+", "bgset", "+", "\"/\"", "+", "ls", ")", "background", "=", "bglist", "[", "self", ".", "hasharray", "[", "3", "]", "%", "len", "(", "bglist", ")", "]", "# Paste in each piece of the Robot.", "roboimg", "=", "Image", ".", "open", "(", "roboparts", "[", "0", "]", ")", "roboimg", "=", "roboimg", ".", "resize", "(", "(", "1024", ",", "1024", ")", ")", "for", "png", "in", "roboparts", ":", "img", "=", "Image", ".", "open", "(", "png", ")", "img", "=", "img", ".", "resize", "(", "(", "1024", ",", "1024", ")", ")", "roboimg", ".", "paste", "(", "img", ",", "(", "0", ",", "0", ")", ",", "img", ")", "# If we're a BMP, flatten the image.", "if", "format", "==", "'bmp'", ":", "#Flatten bmps", "r", ",", "g", ",", "b", ",", "a", "=", "roboimg", ".", "split", "(", ")", "roboimg", "=", "Image", ".", "merge", "(", "\"RGB\"", ",", "(", "r", ",", "g", ",", "b", ")", ")", "if", "bgset", "is", "not", "None", ":", "bg", "=", "Image", ".", "open", "(", "background", ")", "bg", "=", "bg", ".", "resize", "(", "(", "1024", ",", "1024", ")", ")", "bg", ".", "paste", "(", "roboimg", ",", "(", "0", ",", "0", ")", ",", "roboimg", ")", "roboimg", "=", "bg", "self", ".", "img", "=", "roboimg", ".", "resize", "(", "(", "sizex", ",", "sizey", ")", ",", "Image", ".", "ANTIALIAS", ")", "self", ".", "format", "=", "format" ]
Build our Robot! Returns the robot image itself.
[ "Build", "our", "Robot!", "Returns", "the", "robot", "image", "itself", "." ]
8dbbf9e69948ae2abc93c27511ef04f90b56c4d3
https://github.com/e1ven/Robohash/blob/8dbbf9e69948ae2abc93c27511ef04f90b56c4d3/robohash/robohash.py#L115-L198
9,428
tensorflow/skflow
scripts/docs/docs.py
collect_members
def collect_members(module_to_name): """Collect all symbols from a list of modules. Args: module_to_name: Dictionary mapping modules to short names. Returns: Dictionary mapping name to (fullname, member) pairs. """ members = {} for module, module_name in module_to_name.items(): all_names = getattr(module, "__all__", None) for name, member in inspect.getmembers(module): if ((inspect.isfunction(member) or inspect.isclass(member)) and not _always_drop_symbol_re.match(name) and (all_names is None or name in all_names)): fullname = '%s.%s' % (module_name, name) if name in members: other_fullname, other_member = members[name] if member is not other_member: raise RuntimeError("Short name collision between %s and %s" % (fullname, other_fullname)) if len(fullname) == len(other_fullname): raise RuntimeError("Can't decide whether to use %s or %s for %s: " "both full names have length %d" % (fullname, other_fullname, name, len(fullname))) if len(fullname) > len(other_fullname): continue # Use the shorter full name members[name] = fullname, member return members
python
def collect_members(module_to_name): """Collect all symbols from a list of modules. Args: module_to_name: Dictionary mapping modules to short names. Returns: Dictionary mapping name to (fullname, member) pairs. """ members = {} for module, module_name in module_to_name.items(): all_names = getattr(module, "__all__", None) for name, member in inspect.getmembers(module): if ((inspect.isfunction(member) or inspect.isclass(member)) and not _always_drop_symbol_re.match(name) and (all_names is None or name in all_names)): fullname = '%s.%s' % (module_name, name) if name in members: other_fullname, other_member = members[name] if member is not other_member: raise RuntimeError("Short name collision between %s and %s" % (fullname, other_fullname)) if len(fullname) == len(other_fullname): raise RuntimeError("Can't decide whether to use %s or %s for %s: " "both full names have length %d" % (fullname, other_fullname, name, len(fullname))) if len(fullname) > len(other_fullname): continue # Use the shorter full name members[name] = fullname, member return members
[ "def", "collect_members", "(", "module_to_name", ")", ":", "members", "=", "{", "}", "for", "module", ",", "module_name", "in", "module_to_name", ".", "items", "(", ")", ":", "all_names", "=", "getattr", "(", "module", ",", "\"__all__\"", ",", "None", ")", "for", "name", ",", "member", "in", "inspect", ".", "getmembers", "(", "module", ")", ":", "if", "(", "(", "inspect", ".", "isfunction", "(", "member", ")", "or", "inspect", ".", "isclass", "(", "member", ")", ")", "and", "not", "_always_drop_symbol_re", ".", "match", "(", "name", ")", "and", "(", "all_names", "is", "None", "or", "name", "in", "all_names", ")", ")", ":", "fullname", "=", "'%s.%s'", "%", "(", "module_name", ",", "name", ")", "if", "name", "in", "members", ":", "other_fullname", ",", "other_member", "=", "members", "[", "name", "]", "if", "member", "is", "not", "other_member", ":", "raise", "RuntimeError", "(", "\"Short name collision between %s and %s\"", "%", "(", "fullname", ",", "other_fullname", ")", ")", "if", "len", "(", "fullname", ")", "==", "len", "(", "other_fullname", ")", ":", "raise", "RuntimeError", "(", "\"Can't decide whether to use %s or %s for %s: \"", "\"both full names have length %d\"", "%", "(", "fullname", ",", "other_fullname", ",", "name", ",", "len", "(", "fullname", ")", ")", ")", "if", "len", "(", "fullname", ")", ">", "len", "(", "other_fullname", ")", ":", "continue", "# Use the shorter full name", "members", "[", "name", "]", "=", "fullname", ",", "member", "return", "members" ]
Collect all symbols from a list of modules. Args: module_to_name: Dictionary mapping modules to short names. Returns: Dictionary mapping name to (fullname, member) pairs.
[ "Collect", "all", "symbols", "from", "a", "list", "of", "modules", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L103-L132
9,429
tensorflow/skflow
scripts/docs/docs.py
_get_anchor
def _get_anchor(module_to_name, fullname): """Turn a full member name into an anchor. Args: module_to_name: Dictionary mapping modules to short names. fullname: Fully qualified name of symbol. Returns: HTML anchor string. The longest module name prefix of fullname is removed to make the anchor. Raises: ValueError: If fullname uses characters invalid in an anchor. """ if not _anchor_re.match(fullname): raise ValueError("'%s' is not a valid anchor" % fullname) anchor = fullname for module_name in module_to_name.values(): if fullname.startswith(module_name + "."): rest = fullname[len(module_name)+1:] # Use this prefix iff it is longer than any found before if len(anchor) > len(rest): anchor = rest return anchor
python
def _get_anchor(module_to_name, fullname): """Turn a full member name into an anchor. Args: module_to_name: Dictionary mapping modules to short names. fullname: Fully qualified name of symbol. Returns: HTML anchor string. The longest module name prefix of fullname is removed to make the anchor. Raises: ValueError: If fullname uses characters invalid in an anchor. """ if not _anchor_re.match(fullname): raise ValueError("'%s' is not a valid anchor" % fullname) anchor = fullname for module_name in module_to_name.values(): if fullname.startswith(module_name + "."): rest = fullname[len(module_name)+1:] # Use this prefix iff it is longer than any found before if len(anchor) > len(rest): anchor = rest return anchor
[ "def", "_get_anchor", "(", "module_to_name", ",", "fullname", ")", ":", "if", "not", "_anchor_re", ".", "match", "(", "fullname", ")", ":", "raise", "ValueError", "(", "\"'%s' is not a valid anchor\"", "%", "fullname", ")", "anchor", "=", "fullname", "for", "module_name", "in", "module_to_name", ".", "values", "(", ")", ":", "if", "fullname", ".", "startswith", "(", "module_name", "+", "\".\"", ")", ":", "rest", "=", "fullname", "[", "len", "(", "module_name", ")", "+", "1", ":", "]", "# Use this prefix iff it is longer than any found before", "if", "len", "(", "anchor", ")", ">", "len", "(", "rest", ")", ":", "anchor", "=", "rest", "return", "anchor" ]
Turn a full member name into an anchor. Args: module_to_name: Dictionary mapping modules to short names. fullname: Fully qualified name of symbol. Returns: HTML anchor string. The longest module name prefix of fullname is removed to make the anchor. Raises: ValueError: If fullname uses characters invalid in an anchor.
[ "Turn", "a", "full", "member", "name", "into", "an", "anchor", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L135-L158
9,430
tensorflow/skflow
scripts/docs/docs.py
write_libraries
def write_libraries(dir, libraries): """Write a list of libraries to disk. Args: dir: Output directory. libraries: List of (filename, library) pairs. """ files = [open(os.path.join(dir, k), "w") for k, _ in libraries] # Document mentioned symbols for all libraries for f, (_, v) in zip(files, libraries): v.write_markdown_to_file(f) # Document symbols that no library mentioned. We do this after writing # out all libraries so that earlier libraries know what later libraries # documented. for f, (_, v) in zip(files, libraries): v.write_other_members(f) f.close()
python
def write_libraries(dir, libraries): """Write a list of libraries to disk. Args: dir: Output directory. libraries: List of (filename, library) pairs. """ files = [open(os.path.join(dir, k), "w") for k, _ in libraries] # Document mentioned symbols for all libraries for f, (_, v) in zip(files, libraries): v.write_markdown_to_file(f) # Document symbols that no library mentioned. We do this after writing # out all libraries so that earlier libraries know what later libraries # documented. for f, (_, v) in zip(files, libraries): v.write_other_members(f) f.close()
[ "def", "write_libraries", "(", "dir", ",", "libraries", ")", ":", "files", "=", "[", "open", "(", "os", ".", "path", ".", "join", "(", "dir", ",", "k", ")", ",", "\"w\"", ")", "for", "k", ",", "_", "in", "libraries", "]", "# Document mentioned symbols for all libraries", "for", "f", ",", "(", "_", ",", "v", ")", "in", "zip", "(", "files", ",", "libraries", ")", ":", "v", ".", "write_markdown_to_file", "(", "f", ")", "# Document symbols that no library mentioned. We do this after writing", "# out all libraries so that earlier libraries know what later libraries", "# documented.", "for", "f", ",", "(", "_", ",", "v", ")", "in", "zip", "(", "files", ",", "libraries", ")", ":", "v", ".", "write_other_members", "(", "f", ")", "f", ".", "close", "(", ")" ]
Write a list of libraries to disk. Args: dir: Output directory. libraries: List of (filename, library) pairs.
[ "Write", "a", "list", "of", "libraries", "to", "disk", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L514-L530
9,431
tensorflow/skflow
scripts/docs/docs.py
Index.write_markdown_to_file
def write_markdown_to_file(self, f): """Writes this index to file `f`. The output is formatted as an unordered list. Each list element contains the title of the library, followed by a list of symbols in that library hyperlinked to the corresponding anchor in that library. Args: f: The output file. """ print("---", file=f) print("---", file=f) print("<!-- This file is machine generated: DO NOT EDIT! -->", file=f) print("", file=f) print("# TensorFlow Python reference documentation", file=f) print("", file=f) fullname_f = lambda name: self._members[name][0] anchor_f = lambda name: _get_anchor(self._module_to_name, fullname_f(name)) for filename, library in self._filename_to_library_map: sorted_names = sorted(library.mentioned, key=lambda x: (str.lower(x), x)) member_names = [n for n in sorted_names if n in self._members] # TODO: This is a hack that should be removed as soon as the website code # allows it. full_filename = self._path_prefix + filename links = ["[`%s`](%s#%s)" % (name, full_filename[:-3], anchor_f(name)) for name in member_names] if links: print("* **[%s](%s)**:" % (library.title, full_filename[:-3]), file=f) for link in links: print(" * %s" % link, file=f) print("", file=f)
python
def write_markdown_to_file(self, f): """Writes this index to file `f`. The output is formatted as an unordered list. Each list element contains the title of the library, followed by a list of symbols in that library hyperlinked to the corresponding anchor in that library. Args: f: The output file. """ print("---", file=f) print("---", file=f) print("<!-- This file is machine generated: DO NOT EDIT! -->", file=f) print("", file=f) print("# TensorFlow Python reference documentation", file=f) print("", file=f) fullname_f = lambda name: self._members[name][0] anchor_f = lambda name: _get_anchor(self._module_to_name, fullname_f(name)) for filename, library in self._filename_to_library_map: sorted_names = sorted(library.mentioned, key=lambda x: (str.lower(x), x)) member_names = [n for n in sorted_names if n in self._members] # TODO: This is a hack that should be removed as soon as the website code # allows it. full_filename = self._path_prefix + filename links = ["[`%s`](%s#%s)" % (name, full_filename[:-3], anchor_f(name)) for name in member_names] if links: print("* **[%s](%s)**:" % (library.title, full_filename[:-3]), file=f) for link in links: print(" * %s" % link, file=f) print("", file=f)
[ "def", "write_markdown_to_file", "(", "self", ",", "f", ")", ":", "print", "(", "\"---\"", ",", "file", "=", "f", ")", "print", "(", "\"---\"", ",", "file", "=", "f", ")", "print", "(", "\"<!-- This file is machine generated: DO NOT EDIT! -->\"", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "print", "(", "\"# TensorFlow Python reference documentation\"", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "fullname_f", "=", "lambda", "name", ":", "self", ".", "_members", "[", "name", "]", "[", "0", "]", "anchor_f", "=", "lambda", "name", ":", "_get_anchor", "(", "self", ".", "_module_to_name", ",", "fullname_f", "(", "name", ")", ")", "for", "filename", ",", "library", "in", "self", ".", "_filename_to_library_map", ":", "sorted_names", "=", "sorted", "(", "library", ".", "mentioned", ",", "key", "=", "lambda", "x", ":", "(", "str", ".", "lower", "(", "x", ")", ",", "x", ")", ")", "member_names", "=", "[", "n", "for", "n", "in", "sorted_names", "if", "n", "in", "self", ".", "_members", "]", "# TODO: This is a hack that should be removed as soon as the website code", "# allows it.", "full_filename", "=", "self", ".", "_path_prefix", "+", "filename", "links", "=", "[", "\"[`%s`](%s#%s)\"", "%", "(", "name", ",", "full_filename", "[", ":", "-", "3", "]", ",", "anchor_f", "(", "name", ")", ")", "for", "name", "in", "member_names", "]", "if", "links", ":", "print", "(", "\"* **[%s](%s)**:\"", "%", "(", "library", ".", "title", ",", "full_filename", "[", ":", "-", "3", "]", ")", ",", "file", "=", "f", ")", "for", "link", "in", "links", ":", "print", "(", "\" * %s\"", "%", "link", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")" ]
Writes this index to file `f`. The output is formatted as an unordered list. Each list element contains the title of the library, followed by a list of symbols in that library hyperlinked to the corresponding anchor in that library. Args: f: The output file.
[ "Writes", "this", "index", "to", "file", "f", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L68-L100
9,432
tensorflow/skflow
scripts/docs/docs.py
Library._should_include_member
def _should_include_member(self, name, member): """Returns True if this member should be included in the document.""" # Always exclude symbols matching _always_drop_symbol_re. if _always_drop_symbol_re.match(name): return False # Finally, exclude any specifically-excluded symbols. if name in self._exclude_symbols: return False return True
python
def _should_include_member(self, name, member): """Returns True if this member should be included in the document.""" # Always exclude symbols matching _always_drop_symbol_re. if _always_drop_symbol_re.match(name): return False # Finally, exclude any specifically-excluded symbols. if name in self._exclude_symbols: return False return True
[ "def", "_should_include_member", "(", "self", ",", "name", ",", "member", ")", ":", "# Always exclude symbols matching _always_drop_symbol_re.", "if", "_always_drop_symbol_re", ".", "match", "(", "name", ")", ":", "return", "False", "# Finally, exclude any specifically-excluded symbols.", "if", "name", "in", "self", ".", "_exclude_symbols", ":", "return", "False", "return", "True" ]
Returns True if this member should be included in the document.
[ "Returns", "True", "if", "this", "member", "should", "be", "included", "in", "the", "document", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L209-L217
9,433
tensorflow/skflow
scripts/docs/docs.py
Library.get_imported_modules
def get_imported_modules(self, module): """Returns the list of modules imported from `module`.""" for name, member in inspect.getmembers(module): if inspect.ismodule(member): yield name, member
python
def get_imported_modules(self, module): """Returns the list of modules imported from `module`.""" for name, member in inspect.getmembers(module): if inspect.ismodule(member): yield name, member
[ "def", "get_imported_modules", "(", "self", ",", "module", ")", ":", "for", "name", ",", "member", "in", "inspect", ".", "getmembers", "(", "module", ")", ":", "if", "inspect", ".", "ismodule", "(", "member", ")", ":", "yield", "name", ",", "member" ]
Returns the list of modules imported from `module`.
[ "Returns", "the", "list", "of", "modules", "imported", "from", "module", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L219-L223
9,434
tensorflow/skflow
scripts/docs/docs.py
Library.get_class_members
def get_class_members(self, cls_name, cls): """Returns the list of class members to document in `cls`. This function filters the class member to ONLY return those defined by the class. It drops the inherited ones. Args: cls_name: Qualified name of `cls`. cls: An inspect object of type 'class'. Yields: name, member tuples. """ for name, member in inspect.getmembers(cls): # Only show methods and properties presently. In Python 3, # methods register as isfunction. is_method = inspect.ismethod(member) or inspect.isfunction(member) if not (is_method or isinstance(member, property)): continue if ((is_method and member.__name__ == "__init__") or self._should_include_member(name, member)): yield name, ("%s.%s" % (cls_name, name), member)
python
def get_class_members(self, cls_name, cls): """Returns the list of class members to document in `cls`. This function filters the class member to ONLY return those defined by the class. It drops the inherited ones. Args: cls_name: Qualified name of `cls`. cls: An inspect object of type 'class'. Yields: name, member tuples. """ for name, member in inspect.getmembers(cls): # Only show methods and properties presently. In Python 3, # methods register as isfunction. is_method = inspect.ismethod(member) or inspect.isfunction(member) if not (is_method or isinstance(member, property)): continue if ((is_method and member.__name__ == "__init__") or self._should_include_member(name, member)): yield name, ("%s.%s" % (cls_name, name), member)
[ "def", "get_class_members", "(", "self", ",", "cls_name", ",", "cls", ")", ":", "for", "name", ",", "member", "in", "inspect", ".", "getmembers", "(", "cls", ")", ":", "# Only show methods and properties presently. In Python 3,", "# methods register as isfunction.", "is_method", "=", "inspect", ".", "ismethod", "(", "member", ")", "or", "inspect", ".", "isfunction", "(", "member", ")", "if", "not", "(", "is_method", "or", "isinstance", "(", "member", ",", "property", ")", ")", ":", "continue", "if", "(", "(", "is_method", "and", "member", ".", "__name__", "==", "\"__init__\"", ")", "or", "self", ".", "_should_include_member", "(", "name", ",", "member", ")", ")", ":", "yield", "name", ",", "(", "\"%s.%s\"", "%", "(", "cls_name", ",", "name", ")", ",", "member", ")" ]
Returns the list of class members to document in `cls`. This function filters the class member to ONLY return those defined by the class. It drops the inherited ones. Args: cls_name: Qualified name of `cls`. cls: An inspect object of type 'class'. Yields: name, member tuples.
[ "Returns", "the", "list", "of", "class", "members", "to", "document", "in", "cls", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L225-L246
9,435
tensorflow/skflow
scripts/docs/docs.py
Library._generate_signature_for_function
def _generate_signature_for_function(self, func): """Given a function, returns a string representing its args.""" args_list = [] argspec = inspect.getargspec(func) first_arg_with_default = ( len(argspec.args or []) - len(argspec.defaults or [])) for arg in argspec.args[:first_arg_with_default]: if arg == "self": # Python documentation typically skips `self` when printing method # signatures. continue args_list.append(arg) # TODO(mrry): This is a workaround for documenting signature of # functions that have the @contextlib.contextmanager decorator. # We should do something better. if argspec.varargs == "args" and argspec.keywords == "kwds": original_func = func.__closure__[0].cell_contents return self._generate_signature_for_function(original_func) if argspec.defaults: for arg, default in zip( argspec.args[first_arg_with_default:], argspec.defaults): if callable(default): args_list.append("%s=%s" % (arg, default.__name__)) else: args_list.append("%s=%r" % (arg, default)) if argspec.varargs: args_list.append("*" + argspec.varargs) if argspec.keywords: args_list.append("**" + argspec.keywords) return "(" + ", ".join(args_list) + ")"
python
def _generate_signature_for_function(self, func): """Given a function, returns a string representing its args.""" args_list = [] argspec = inspect.getargspec(func) first_arg_with_default = ( len(argspec.args or []) - len(argspec.defaults or [])) for arg in argspec.args[:first_arg_with_default]: if arg == "self": # Python documentation typically skips `self` when printing method # signatures. continue args_list.append(arg) # TODO(mrry): This is a workaround for documenting signature of # functions that have the @contextlib.contextmanager decorator. # We should do something better. if argspec.varargs == "args" and argspec.keywords == "kwds": original_func = func.__closure__[0].cell_contents return self._generate_signature_for_function(original_func) if argspec.defaults: for arg, default in zip( argspec.args[first_arg_with_default:], argspec.defaults): if callable(default): args_list.append("%s=%s" % (arg, default.__name__)) else: args_list.append("%s=%r" % (arg, default)) if argspec.varargs: args_list.append("*" + argspec.varargs) if argspec.keywords: args_list.append("**" + argspec.keywords) return "(" + ", ".join(args_list) + ")"
[ "def", "_generate_signature_for_function", "(", "self", ",", "func", ")", ":", "args_list", "=", "[", "]", "argspec", "=", "inspect", ".", "getargspec", "(", "func", ")", "first_arg_with_default", "=", "(", "len", "(", "argspec", ".", "args", "or", "[", "]", ")", "-", "len", "(", "argspec", ".", "defaults", "or", "[", "]", ")", ")", "for", "arg", "in", "argspec", ".", "args", "[", ":", "first_arg_with_default", "]", ":", "if", "arg", "==", "\"self\"", ":", "# Python documentation typically skips `self` when printing method", "# signatures.", "continue", "args_list", ".", "append", "(", "arg", ")", "# TODO(mrry): This is a workaround for documenting signature of", "# functions that have the @contextlib.contextmanager decorator.", "# We should do something better.", "if", "argspec", ".", "varargs", "==", "\"args\"", "and", "argspec", ".", "keywords", "==", "\"kwds\"", ":", "original_func", "=", "func", ".", "__closure__", "[", "0", "]", ".", "cell_contents", "return", "self", ".", "_generate_signature_for_function", "(", "original_func", ")", "if", "argspec", ".", "defaults", ":", "for", "arg", ",", "default", "in", "zip", "(", "argspec", ".", "args", "[", "first_arg_with_default", ":", "]", ",", "argspec", ".", "defaults", ")", ":", "if", "callable", "(", "default", ")", ":", "args_list", ".", "append", "(", "\"%s=%s\"", "%", "(", "arg", ",", "default", ".", "__name__", ")", ")", "else", ":", "args_list", ".", "append", "(", "\"%s=%r\"", "%", "(", "arg", ",", "default", ")", ")", "if", "argspec", ".", "varargs", ":", "args_list", ".", "append", "(", "\"*\"", "+", "argspec", ".", "varargs", ")", "if", "argspec", ".", "keywords", ":", "args_list", ".", "append", "(", "\"**\"", "+", "argspec", ".", "keywords", ")", "return", "\"(\"", "+", "\", \"", ".", "join", "(", "args_list", ")", "+", "\")\"" ]
Given a function, returns a string representing its args.
[ "Given", "a", "function", "returns", "a", "string", "representing", "its", "args", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L248-L279
9,436
tensorflow/skflow
scripts/docs/docs.py
Library._remove_docstring_indent
def _remove_docstring_indent(self, docstring): """Remove indenting. We follow Python's convention and remove the minimum indent of the lines after the first, see: https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation preserving relative indentation. Args: docstring: A docstring. Returns: A list of strings, one per line, with the minimum indent stripped. """ docstring = docstring or "" lines = docstring.strip().split("\n") min_indent = len(docstring) for l in lines[1:]: l = l.rstrip() if l: i = 0 while i < len(l) and l[i] == " ": i += 1 if i < min_indent: min_indent = i for i in range(1, len(lines)): l = lines[i].rstrip() if len(l) >= min_indent: l = l[min_indent:] lines[i] = l return lines
python
def _remove_docstring_indent(self, docstring): """Remove indenting. We follow Python's convention and remove the minimum indent of the lines after the first, see: https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation preserving relative indentation. Args: docstring: A docstring. Returns: A list of strings, one per line, with the minimum indent stripped. """ docstring = docstring or "" lines = docstring.strip().split("\n") min_indent = len(docstring) for l in lines[1:]: l = l.rstrip() if l: i = 0 while i < len(l) and l[i] == " ": i += 1 if i < min_indent: min_indent = i for i in range(1, len(lines)): l = lines[i].rstrip() if len(l) >= min_indent: l = l[min_indent:] lines[i] = l return lines
[ "def", "_remove_docstring_indent", "(", "self", ",", "docstring", ")", ":", "docstring", "=", "docstring", "or", "\"\"", "lines", "=", "docstring", ".", "strip", "(", ")", ".", "split", "(", "\"\\n\"", ")", "min_indent", "=", "len", "(", "docstring", ")", "for", "l", "in", "lines", "[", "1", ":", "]", ":", "l", "=", "l", ".", "rstrip", "(", ")", "if", "l", ":", "i", "=", "0", "while", "i", "<", "len", "(", "l", ")", "and", "l", "[", "i", "]", "==", "\" \"", ":", "i", "+=", "1", "if", "i", "<", "min_indent", ":", "min_indent", "=", "i", "for", "i", "in", "range", "(", "1", ",", "len", "(", "lines", ")", ")", ":", "l", "=", "lines", "[", "i", "]", ".", "rstrip", "(", ")", "if", "len", "(", "l", ")", ">=", "min_indent", ":", "l", "=", "l", "[", "min_indent", ":", "]", "lines", "[", "i", "]", "=", "l", "return", "lines" ]
Remove indenting. We follow Python's convention and remove the minimum indent of the lines after the first, see: https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation preserving relative indentation. Args: docstring: A docstring. Returns: A list of strings, one per line, with the minimum indent stripped.
[ "Remove", "indenting", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L281-L311
9,437
tensorflow/skflow
scripts/docs/docs.py
Library._print_formatted_docstring
def _print_formatted_docstring(self, docstring, f): """Formats the given `docstring` as Markdown and prints it to `f`.""" lines = self._remove_docstring_indent(docstring) # Output the lines, identifying "Args" and other section blocks. i = 0 def _at_start_of_section(): """Returns the header if lines[i] is at start of a docstring section.""" l = lines[i] match = _section_re.match(l) if match and i + 1 < len( lines) and lines[i + 1].startswith(" "): return match.group(1) else: return None while i < len(lines): l = lines[i] section_header = _at_start_of_section() if section_header: if i == 0 or lines[i-1]: print("", file=f) # Use at least H4 to keep these out of the TOC. print("##### " + section_header + ":", file=f) print("", file=f) i += 1 outputting_list = False while i < len(lines): l = lines[i] # A new section header terminates the section. if _at_start_of_section(): break match = _arg_re.match(l) if match: if not outputting_list: # We need to start a list. In Markdown, a blank line needs to # precede a list. print("", file=f) outputting_list = True suffix = l[len(match.group()):].lstrip() print("* <b>`" + match.group(1) + "`</b>: " + suffix, file=f) else: # For lines that don't start with _arg_re, continue the list if it # has enough indentation. outputting_list &= l.startswith(" ") print(l, file=f) i += 1 else: print(l, file=f) i += 1
python
def _print_formatted_docstring(self, docstring, f): """Formats the given `docstring` as Markdown and prints it to `f`.""" lines = self._remove_docstring_indent(docstring) # Output the lines, identifying "Args" and other section blocks. i = 0 def _at_start_of_section(): """Returns the header if lines[i] is at start of a docstring section.""" l = lines[i] match = _section_re.match(l) if match and i + 1 < len( lines) and lines[i + 1].startswith(" "): return match.group(1) else: return None while i < len(lines): l = lines[i] section_header = _at_start_of_section() if section_header: if i == 0 or lines[i-1]: print("", file=f) # Use at least H4 to keep these out of the TOC. print("##### " + section_header + ":", file=f) print("", file=f) i += 1 outputting_list = False while i < len(lines): l = lines[i] # A new section header terminates the section. if _at_start_of_section(): break match = _arg_re.match(l) if match: if not outputting_list: # We need to start a list. In Markdown, a blank line needs to # precede a list. print("", file=f) outputting_list = True suffix = l[len(match.group()):].lstrip() print("* <b>`" + match.group(1) + "`</b>: " + suffix, file=f) else: # For lines that don't start with _arg_re, continue the list if it # has enough indentation. outputting_list &= l.startswith(" ") print(l, file=f) i += 1 else: print(l, file=f) i += 1
[ "def", "_print_formatted_docstring", "(", "self", ",", "docstring", ",", "f", ")", ":", "lines", "=", "self", ".", "_remove_docstring_indent", "(", "docstring", ")", "# Output the lines, identifying \"Args\" and other section blocks.", "i", "=", "0", "def", "_at_start_of_section", "(", ")", ":", "\"\"\"Returns the header if lines[i] is at start of a docstring section.\"\"\"", "l", "=", "lines", "[", "i", "]", "match", "=", "_section_re", ".", "match", "(", "l", ")", "if", "match", "and", "i", "+", "1", "<", "len", "(", "lines", ")", "and", "lines", "[", "i", "+", "1", "]", ".", "startswith", "(", "\" \"", ")", ":", "return", "match", ".", "group", "(", "1", ")", "else", ":", "return", "None", "while", "i", "<", "len", "(", "lines", ")", ":", "l", "=", "lines", "[", "i", "]", "section_header", "=", "_at_start_of_section", "(", ")", "if", "section_header", ":", "if", "i", "==", "0", "or", "lines", "[", "i", "-", "1", "]", ":", "print", "(", "\"\"", ",", "file", "=", "f", ")", "# Use at least H4 to keep these out of the TOC.", "print", "(", "\"##### \"", "+", "section_header", "+", "\":\"", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "i", "+=", "1", "outputting_list", "=", "False", "while", "i", "<", "len", "(", "lines", ")", ":", "l", "=", "lines", "[", "i", "]", "# A new section header terminates the section.", "if", "_at_start_of_section", "(", ")", ":", "break", "match", "=", "_arg_re", ".", "match", "(", "l", ")", "if", "match", ":", "if", "not", "outputting_list", ":", "# We need to start a list. In Markdown, a blank line needs to", "# precede a list.", "print", "(", "\"\"", ",", "file", "=", "f", ")", "outputting_list", "=", "True", "suffix", "=", "l", "[", "len", "(", "match", ".", "group", "(", ")", ")", ":", "]", ".", "lstrip", "(", ")", "print", "(", "\"* <b>`\"", "+", "match", ".", "group", "(", "1", ")", "+", "\"`</b>: \"", "+", "suffix", ",", "file", "=", "f", ")", "else", ":", "# For lines that don't start with _arg_re, continue the list if it", "# has enough indentation.", "outputting_list", "&=", "l", ".", "startswith", "(", "\" \"", ")", "print", "(", "l", ",", "file", "=", "f", ")", "i", "+=", "1", "else", ":", "print", "(", "l", ",", "file", "=", "f", ")", "i", "+=", "1" ]
Formats the given `docstring` as Markdown and prints it to `f`.
[ "Formats", "the", "given", "docstring", "as", "Markdown", "and", "prints", "it", "to", "f", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L313-L364
9,438
tensorflow/skflow
scripts/docs/docs.py
Library._print_function
def _print_function(self, f, prefix, fullname, func): """Prints the given function to `f`.""" heading = prefix + " `" + fullname if not isinstance(func, property): heading += self._generate_signature_for_function(func) heading += "` {#%s}" % _get_anchor(self._module_to_name, fullname) print(heading, file=f) print("", file=f) self._print_formatted_docstring(inspect.getdoc(func), f) print("", file=f)
python
def _print_function(self, f, prefix, fullname, func): """Prints the given function to `f`.""" heading = prefix + " `" + fullname if not isinstance(func, property): heading += self._generate_signature_for_function(func) heading += "` {#%s}" % _get_anchor(self._module_to_name, fullname) print(heading, file=f) print("", file=f) self._print_formatted_docstring(inspect.getdoc(func), f) print("", file=f)
[ "def", "_print_function", "(", "self", ",", "f", ",", "prefix", ",", "fullname", ",", "func", ")", ":", "heading", "=", "prefix", "+", "\" `\"", "+", "fullname", "if", "not", "isinstance", "(", "func", ",", "property", ")", ":", "heading", "+=", "self", ".", "_generate_signature_for_function", "(", "func", ")", "heading", "+=", "\"` {#%s}\"", "%", "_get_anchor", "(", "self", ".", "_module_to_name", ",", "fullname", ")", "print", "(", "heading", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "self", ".", "_print_formatted_docstring", "(", "inspect", ".", "getdoc", "(", "func", ")", ",", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")" ]
Prints the given function to `f`.
[ "Prints", "the", "given", "function", "to", "f", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L366-L375
9,439
tensorflow/skflow
scripts/docs/docs.py
Library._write_member_markdown_to_file
def _write_member_markdown_to_file(self, f, prefix, name, member): """Print `member` to `f`.""" if (inspect.isfunction(member) or inspect.ismethod(member) or isinstance(member, property)): print("- - -", file=f) print("", file=f) self._print_function(f, prefix, name, member) print("", file=f) elif inspect.isclass(member): print("- - -", file=f) print("", file=f) print("%s `class %s` {#%s}" % (prefix, name, _get_anchor(self._module_to_name, name)), file=f) print("", file=f) self._write_class_markdown_to_file(f, name, member) print("", file=f) else: raise RuntimeError("Member %s has unknown type %s" % (name, type(member)))
python
def _write_member_markdown_to_file(self, f, prefix, name, member): """Print `member` to `f`.""" if (inspect.isfunction(member) or inspect.ismethod(member) or isinstance(member, property)): print("- - -", file=f) print("", file=f) self._print_function(f, prefix, name, member) print("", file=f) elif inspect.isclass(member): print("- - -", file=f) print("", file=f) print("%s `class %s` {#%s}" % (prefix, name, _get_anchor(self._module_to_name, name)), file=f) print("", file=f) self._write_class_markdown_to_file(f, name, member) print("", file=f) else: raise RuntimeError("Member %s has unknown type %s" % (name, type(member)))
[ "def", "_write_member_markdown_to_file", "(", "self", ",", "f", ",", "prefix", ",", "name", ",", "member", ")", ":", "if", "(", "inspect", ".", "isfunction", "(", "member", ")", "or", "inspect", ".", "ismethod", "(", "member", ")", "or", "isinstance", "(", "member", ",", "property", ")", ")", ":", "print", "(", "\"- - -\"", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "self", ".", "_print_function", "(", "f", ",", "prefix", ",", "name", ",", "member", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "elif", "inspect", ".", "isclass", "(", "member", ")", ":", "print", "(", "\"- - -\"", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "print", "(", "\"%s `class %s` {#%s}\"", "%", "(", "prefix", ",", "name", ",", "_get_anchor", "(", "self", ".", "_module_to_name", ",", "name", ")", ")", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "self", ".", "_write_class_markdown_to_file", "(", "f", ",", "name", ",", "member", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "else", ":", "raise", "RuntimeError", "(", "\"Member %s has unknown type %s\"", "%", "(", "name", ",", "type", "(", "member", ")", ")", ")" ]
Print `member` to `f`.
[ "Print", "member", "to", "f", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L377-L395
9,440
tensorflow/skflow
scripts/docs/docs.py
Library._write_class_markdown_to_file
def _write_class_markdown_to_file(self, f, name, cls): """Write the class doc to `f`. Args: f: File to write to. prefix: Prefix for names. cls: class object. name: name to use. """ # Build the list of class methods to document. methods = dict(self.get_class_members(name, cls)) # Used later to check if any methods were called out in the class # docstring. num_methods = len(methods) try: self._write_docstring_markdown_to_file(f, "####", inspect.getdoc(cls), methods, {}) except ValueError as e: raise ValueError(str(e) + " in class `%s`" % cls.__name__) # If some methods were not described, describe them now if they are # defined by the class itself (not inherited). If NO methods were # described, describe all methods. # # TODO(touts): when all methods have been categorized make it an error # if some methods are not categorized. any_method_called_out = (len(methods) != num_methods) if any_method_called_out: other_methods = {n: m for n, m in methods.items() if n in cls.__dict__} if other_methods: print("\n#### Other Methods", file=f) else: other_methods = methods for name in sorted(other_methods): self._write_member_markdown_to_file(f, "####", *other_methods[name])
python
def _write_class_markdown_to_file(self, f, name, cls): """Write the class doc to `f`. Args: f: File to write to. prefix: Prefix for names. cls: class object. name: name to use. """ # Build the list of class methods to document. methods = dict(self.get_class_members(name, cls)) # Used later to check if any methods were called out in the class # docstring. num_methods = len(methods) try: self._write_docstring_markdown_to_file(f, "####", inspect.getdoc(cls), methods, {}) except ValueError as e: raise ValueError(str(e) + " in class `%s`" % cls.__name__) # If some methods were not described, describe them now if they are # defined by the class itself (not inherited). If NO methods were # described, describe all methods. # # TODO(touts): when all methods have been categorized make it an error # if some methods are not categorized. any_method_called_out = (len(methods) != num_methods) if any_method_called_out: other_methods = {n: m for n, m in methods.items() if n in cls.__dict__} if other_methods: print("\n#### Other Methods", file=f) else: other_methods = methods for name in sorted(other_methods): self._write_member_markdown_to_file(f, "####", *other_methods[name])
[ "def", "_write_class_markdown_to_file", "(", "self", ",", "f", ",", "name", ",", "cls", ")", ":", "# Build the list of class methods to document.", "methods", "=", "dict", "(", "self", ".", "get_class_members", "(", "name", ",", "cls", ")", ")", "# Used later to check if any methods were called out in the class", "# docstring.", "num_methods", "=", "len", "(", "methods", ")", "try", ":", "self", ".", "_write_docstring_markdown_to_file", "(", "f", ",", "\"####\"", ",", "inspect", ".", "getdoc", "(", "cls", ")", ",", "methods", ",", "{", "}", ")", "except", "ValueError", "as", "e", ":", "raise", "ValueError", "(", "str", "(", "e", ")", "+", "\" in class `%s`\"", "%", "cls", ".", "__name__", ")", "# If some methods were not described, describe them now if they are", "# defined by the class itself (not inherited). If NO methods were", "# described, describe all methods.", "#", "# TODO(touts): when all methods have been categorized make it an error", "# if some methods are not categorized.", "any_method_called_out", "=", "(", "len", "(", "methods", ")", "!=", "num_methods", ")", "if", "any_method_called_out", ":", "other_methods", "=", "{", "n", ":", "m", "for", "n", ",", "m", "in", "methods", ".", "items", "(", ")", "if", "n", "in", "cls", ".", "__dict__", "}", "if", "other_methods", ":", "print", "(", "\"\\n#### Other Methods\"", ",", "file", "=", "f", ")", "else", ":", "other_methods", "=", "methods", "for", "name", "in", "sorted", "(", "other_methods", ")", ":", "self", ".", "_write_member_markdown_to_file", "(", "f", ",", "\"####\"", ",", "*", "other_methods", "[", "name", "]", ")" ]
Write the class doc to `f`. Args: f: File to write to. prefix: Prefix for names. cls: class object. name: name to use.
[ "Write", "the", "class", "doc", "to", "f", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L414-L448
9,441
tensorflow/skflow
scripts/docs/docs.py
Library.write_markdown_to_file
def write_markdown_to_file(self, f): """Prints this library to file `f`. Args: f: File to write to. Returns: Dictionary of documented members. """ print("---", file=f) print("---", file=f) print("<!-- This file is machine generated: DO NOT EDIT! -->", file=f) print("", file=f) # TODO(touts): Do not insert these. Let the doc writer put them in # the module docstring explicitly. print("#", self._title, file=f) if self._prefix: print(self._prefix, file=f) print("[TOC]", file=f) print("", file=f) if self._module is not None: self._write_module_markdown_to_file(f, self._module)
python
def write_markdown_to_file(self, f): """Prints this library to file `f`. Args: f: File to write to. Returns: Dictionary of documented members. """ print("---", file=f) print("---", file=f) print("<!-- This file is machine generated: DO NOT EDIT! -->", file=f) print("", file=f) # TODO(touts): Do not insert these. Let the doc writer put them in # the module docstring explicitly. print("#", self._title, file=f) if self._prefix: print(self._prefix, file=f) print("[TOC]", file=f) print("", file=f) if self._module is not None: self._write_module_markdown_to_file(f, self._module)
[ "def", "write_markdown_to_file", "(", "self", ",", "f", ")", ":", "print", "(", "\"---\"", ",", "file", "=", "f", ")", "print", "(", "\"---\"", ",", "file", "=", "f", ")", "print", "(", "\"<!-- This file is machine generated: DO NOT EDIT! -->\"", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "# TODO(touts): Do not insert these. Let the doc writer put them in", "# the module docstring explicitly.", "print", "(", "\"#\"", ",", "self", ".", "_title", ",", "file", "=", "f", ")", "if", "self", ".", "_prefix", ":", "print", "(", "self", ".", "_prefix", ",", "file", "=", "f", ")", "print", "(", "\"[TOC]\"", ",", "file", "=", "f", ")", "print", "(", "\"\"", ",", "file", "=", "f", ")", "if", "self", ".", "_module", "is", "not", "None", ":", "self", ".", "_write_module_markdown_to_file", "(", "f", ",", "self", ".", "_module", ")" ]
Prints this library to file `f`. Args: f: File to write to. Returns: Dictionary of documented members.
[ "Prints", "this", "library", "to", "file", "f", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L455-L476
9,442
tensorflow/skflow
scripts/docs/docs.py
Library.write_other_members
def write_other_members(self, f, catch_all=False): """Writes the leftover members to `f`. Args: f: File to write to. catch_all: If true, document all missing symbols from any module. Otherwise, document missing symbols from just this module. """ if catch_all: names = self._members.items() else: names = inspect.getmembers(self._module) leftovers = [] for name, _ in names: if name in self._members and name not in self._documented: leftovers.append(name) if leftovers: print("%s: undocumented members: %d" % (self._title, len(leftovers))) print("\n## Other Functions and Classes", file=f) for name in sorted(leftovers): print(" %s" % name) self._documented.add(name) self._mentioned.add(name) self._write_member_markdown_to_file(f, "###", *self._members[name])
python
def write_other_members(self, f, catch_all=False): """Writes the leftover members to `f`. Args: f: File to write to. catch_all: If true, document all missing symbols from any module. Otherwise, document missing symbols from just this module. """ if catch_all: names = self._members.items() else: names = inspect.getmembers(self._module) leftovers = [] for name, _ in names: if name in self._members and name not in self._documented: leftovers.append(name) if leftovers: print("%s: undocumented members: %d" % (self._title, len(leftovers))) print("\n## Other Functions and Classes", file=f) for name in sorted(leftovers): print(" %s" % name) self._documented.add(name) self._mentioned.add(name) self._write_member_markdown_to_file(f, "###", *self._members[name])
[ "def", "write_other_members", "(", "self", ",", "f", ",", "catch_all", "=", "False", ")", ":", "if", "catch_all", ":", "names", "=", "self", ".", "_members", ".", "items", "(", ")", "else", ":", "names", "=", "inspect", ".", "getmembers", "(", "self", ".", "_module", ")", "leftovers", "=", "[", "]", "for", "name", ",", "_", "in", "names", ":", "if", "name", "in", "self", ".", "_members", "and", "name", "not", "in", "self", ".", "_documented", ":", "leftovers", ".", "append", "(", "name", ")", "if", "leftovers", ":", "print", "(", "\"%s: undocumented members: %d\"", "%", "(", "self", ".", "_title", ",", "len", "(", "leftovers", ")", ")", ")", "print", "(", "\"\\n## Other Functions and Classes\"", ",", "file", "=", "f", ")", "for", "name", "in", "sorted", "(", "leftovers", ")", ":", "print", "(", "\" %s\"", "%", "name", ")", "self", ".", "_documented", ".", "add", "(", "name", ")", "self", ".", "_mentioned", ".", "add", "(", "name", ")", "self", ".", "_write_member_markdown_to_file", "(", "f", ",", "\"###\"", ",", "*", "self", ".", "_members", "[", "name", "]", ")" ]
Writes the leftover members to `f`. Args: f: File to write to. catch_all: If true, document all missing symbols from any module. Otherwise, document missing symbols from just this module.
[ "Writes", "the", "leftover", "members", "to", "f", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L478-L501
9,443
tensorflow/skflow
scripts/docs/docs.py
Library.assert_no_leftovers
def assert_no_leftovers(self): """Generate an error if there are leftover members.""" leftovers = [] for name in self._members.keys(): if name in self._members and name not in self._documented: leftovers.append(name) if leftovers: raise RuntimeError("%s: undocumented members: %s" % (self._title, ", ".join(leftovers)))
python
def assert_no_leftovers(self): """Generate an error if there are leftover members.""" leftovers = [] for name in self._members.keys(): if name in self._members and name not in self._documented: leftovers.append(name) if leftovers: raise RuntimeError("%s: undocumented members: %s" % (self._title, ", ".join(leftovers)))
[ "def", "assert_no_leftovers", "(", "self", ")", ":", "leftovers", "=", "[", "]", "for", "name", "in", "self", ".", "_members", ".", "keys", "(", ")", ":", "if", "name", "in", "self", ".", "_members", "and", "name", "not", "in", "self", ".", "_documented", ":", "leftovers", ".", "append", "(", "name", ")", "if", "leftovers", ":", "raise", "RuntimeError", "(", "\"%s: undocumented members: %s\"", "%", "(", "self", ".", "_title", ",", "\", \"", ".", "join", "(", "leftovers", ")", ")", ")" ]
Generate an error if there are leftover members.
[ "Generate", "an", "error", "if", "there", "are", "leftover", "members", "." ]
f8da498a1abb7562f57dfc7010941578103061b6
https://github.com/tensorflow/skflow/blob/f8da498a1abb7562f57dfc7010941578103061b6/scripts/docs/docs.py#L503-L511
9,444
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/multiprocess.py
MultiprocessPrometheusMetrics.start_http_server
def start_http_server(self, port, host='0.0.0.0', endpoint=None): """ Start an HTTP server for exposing the metrics, if the `should_start_http_server` function says we should, otherwise just return. Uses the implementation from `prometheus_client` rather than a Flask app. :param port: the HTTP port to expose the metrics endpoint on :param host: the HTTP host to listen on (default: `0.0.0.0`) :param endpoint: **ignored**, the HTTP server will respond on any path """ if self.should_start_http_server(): pc_start_http_server(port, host, registry=self.registry)
python
def start_http_server(self, port, host='0.0.0.0', endpoint=None): """ Start an HTTP server for exposing the metrics, if the `should_start_http_server` function says we should, otherwise just return. Uses the implementation from `prometheus_client` rather than a Flask app. :param port: the HTTP port to expose the metrics endpoint on :param host: the HTTP host to listen on (default: `0.0.0.0`) :param endpoint: **ignored**, the HTTP server will respond on any path """ if self.should_start_http_server(): pc_start_http_server(port, host, registry=self.registry)
[ "def", "start_http_server", "(", "self", ",", "port", ",", "host", "=", "'0.0.0.0'", ",", "endpoint", "=", "None", ")", ":", "if", "self", ".", "should_start_http_server", "(", ")", ":", "pc_start_http_server", "(", "port", ",", "host", ",", "registry", "=", "self", ".", "registry", ")" ]
Start an HTTP server for exposing the metrics, if the `should_start_http_server` function says we should, otherwise just return. Uses the implementation from `prometheus_client` rather than a Flask app. :param port: the HTTP port to expose the metrics endpoint on :param host: the HTTP host to listen on (default: `0.0.0.0`) :param endpoint: **ignored**, the HTTP server will respond on any path
[ "Start", "an", "HTTP", "server", "for", "exposing", "the", "metrics", "if", "the", "should_start_http_server", "function", "says", "we", "should", "otherwise", "just", "return", ".", "Uses", "the", "implementation", "from", "prometheus_client", "rather", "than", "a", "Flask", "app", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/multiprocess.py#L75-L87
9,445
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.init_app
def init_app(self, app): """ This callback can be used to initialize an application for the use with this prometheus reporter setup. This is usually used with a flask "app factory" configuration. Please see: http://flask.pocoo.org/docs/1.0/patterns/appfactories/ Note, that you need to use `PrometheusMetrics(app=None, ...)` for this mode, otherwise it is called automatically. :param app: the Flask application """ if self.path: self.register_endpoint(self.path, app) if self._export_defaults: self.export_defaults( self.buckets, self.group_by, self._defaults_prefix, app )
python
def init_app(self, app): """ This callback can be used to initialize an application for the use with this prometheus reporter setup. This is usually used with a flask "app factory" configuration. Please see: http://flask.pocoo.org/docs/1.0/patterns/appfactories/ Note, that you need to use `PrometheusMetrics(app=None, ...)` for this mode, otherwise it is called automatically. :param app: the Flask application """ if self.path: self.register_endpoint(self.path, app) if self._export_defaults: self.export_defaults( self.buckets, self.group_by, self._defaults_prefix, app )
[ "def", "init_app", "(", "self", ",", "app", ")", ":", "if", "self", ".", "path", ":", "self", ".", "register_endpoint", "(", "self", ".", "path", ",", "app", ")", "if", "self", ".", "_export_defaults", ":", "self", ".", "export_defaults", "(", "self", ".", "buckets", ",", "self", ".", "group_by", ",", "self", ".", "_defaults_prefix", ",", "app", ")" ]
This callback can be used to initialize an application for the use with this prometheus reporter setup. This is usually used with a flask "app factory" configuration. Please see: http://flask.pocoo.org/docs/1.0/patterns/appfactories/ Note, that you need to use `PrometheusMetrics(app=None, ...)` for this mode, otherwise it is called automatically. :param app: the Flask application
[ "This", "callback", "can", "be", "used", "to", "initialize", "an", "application", "for", "the", "use", "with", "this", "prometheus", "reporter", "setup", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L133-L154
9,446
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.register_endpoint
def register_endpoint(self, path, app=None): """ Register the metrics endpoint on the Flask application. :param path: the path of the endpoint :param app: the Flask application to register the endpoint on (by default it is the application registered with this class) """ if is_running_from_reloader() and not os.environ.get('DEBUG_METRICS'): return if app is None: app = self.app or current_app @app.route(path) @self.do_not_track() def prometheus_metrics(): # import these here so they don't clash with our own multiprocess module from prometheus_client import multiprocess, CollectorRegistry if 'prometheus_multiproc_dir' in os.environ: registry = CollectorRegistry() else: registry = self.registry if 'name[]' in request.args: registry = registry.restricted_registry(request.args.getlist('name[]')) if 'prometheus_multiproc_dir' in os.environ: multiprocess.MultiProcessCollector(registry) headers = {'Content-Type': CONTENT_TYPE_LATEST} return generate_latest(registry), 200, headers
python
def register_endpoint(self, path, app=None): """ Register the metrics endpoint on the Flask application. :param path: the path of the endpoint :param app: the Flask application to register the endpoint on (by default it is the application registered with this class) """ if is_running_from_reloader() and not os.environ.get('DEBUG_METRICS'): return if app is None: app = self.app or current_app @app.route(path) @self.do_not_track() def prometheus_metrics(): # import these here so they don't clash with our own multiprocess module from prometheus_client import multiprocess, CollectorRegistry if 'prometheus_multiproc_dir' in os.environ: registry = CollectorRegistry() else: registry = self.registry if 'name[]' in request.args: registry = registry.restricted_registry(request.args.getlist('name[]')) if 'prometheus_multiproc_dir' in os.environ: multiprocess.MultiProcessCollector(registry) headers = {'Content-Type': CONTENT_TYPE_LATEST} return generate_latest(registry), 200, headers
[ "def", "register_endpoint", "(", "self", ",", "path", ",", "app", "=", "None", ")", ":", "if", "is_running_from_reloader", "(", ")", "and", "not", "os", ".", "environ", ".", "get", "(", "'DEBUG_METRICS'", ")", ":", "return", "if", "app", "is", "None", ":", "app", "=", "self", ".", "app", "or", "current_app", "@", "app", ".", "route", "(", "path", ")", "@", "self", ".", "do_not_track", "(", ")", "def", "prometheus_metrics", "(", ")", ":", "# import these here so they don't clash with our own multiprocess module", "from", "prometheus_client", "import", "multiprocess", ",", "CollectorRegistry", "if", "'prometheus_multiproc_dir'", "in", "os", ".", "environ", ":", "registry", "=", "CollectorRegistry", "(", ")", "else", ":", "registry", "=", "self", ".", "registry", "if", "'name[]'", "in", "request", ".", "args", ":", "registry", "=", "registry", ".", "restricted_registry", "(", "request", ".", "args", ".", "getlist", "(", "'name[]'", ")", ")", "if", "'prometheus_multiproc_dir'", "in", "os", ".", "environ", ":", "multiprocess", ".", "MultiProcessCollector", "(", "registry", ")", "headers", "=", "{", "'Content-Type'", ":", "CONTENT_TYPE_LATEST", "}", "return", "generate_latest", "(", "registry", ")", ",", "200", ",", "headers" ]
Register the metrics endpoint on the Flask application. :param path: the path of the endpoint :param app: the Flask application to register the endpoint on (by default it is the application registered with this class)
[ "Register", "the", "metrics", "endpoint", "on", "the", "Flask", "application", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L156-L189
9,447
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.start_http_server
def start_http_server(self, port, host='0.0.0.0', endpoint='/metrics'): """ Start an HTTP server for exposing the metrics. This will be an individual Flask application, not the one registered with this class. :param port: the HTTP port to expose the metrics endpoint on :param host: the HTTP host to listen on (default: `0.0.0.0`) :param endpoint: the URL path to expose the endpoint on (default: `/metrics`) """ if is_running_from_reloader(): return app = Flask('prometheus-flask-exporter-%d' % port) self.register_endpoint(endpoint, app) def run_app(): app.run(host=host, port=port) thread = threading.Thread(target=run_app) thread.setDaemon(True) thread.start()
python
def start_http_server(self, port, host='0.0.0.0', endpoint='/metrics'): """ Start an HTTP server for exposing the metrics. This will be an individual Flask application, not the one registered with this class. :param port: the HTTP port to expose the metrics endpoint on :param host: the HTTP host to listen on (default: `0.0.0.0`) :param endpoint: the URL path to expose the endpoint on (default: `/metrics`) """ if is_running_from_reloader(): return app = Flask('prometheus-flask-exporter-%d' % port) self.register_endpoint(endpoint, app) def run_app(): app.run(host=host, port=port) thread = threading.Thread(target=run_app) thread.setDaemon(True) thread.start()
[ "def", "start_http_server", "(", "self", ",", "port", ",", "host", "=", "'0.0.0.0'", ",", "endpoint", "=", "'/metrics'", ")", ":", "if", "is_running_from_reloader", "(", ")", ":", "return", "app", "=", "Flask", "(", "'prometheus-flask-exporter-%d'", "%", "port", ")", "self", ".", "register_endpoint", "(", "endpoint", ",", "app", ")", "def", "run_app", "(", ")", ":", "app", ".", "run", "(", "host", "=", "host", ",", "port", "=", "port", ")", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "run_app", ")", "thread", ".", "setDaemon", "(", "True", ")", "thread", ".", "start", "(", ")" ]
Start an HTTP server for exposing the metrics. This will be an individual Flask application, not the one registered with this class. :param port: the HTTP port to expose the metrics endpoint on :param host: the HTTP host to listen on (default: `0.0.0.0`) :param endpoint: the URL path to expose the endpoint on (default: `/metrics`)
[ "Start", "an", "HTTP", "server", "for", "exposing", "the", "metrics", ".", "This", "will", "be", "an", "individual", "Flask", "application", "not", "the", "one", "registered", "with", "this", "class", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L191-L214
9,448
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.histogram
def histogram(self, name, description, labels=None, **kwargs): """ Use a Histogram to track the execution time and invocation count of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Histogram """ return self._track( Histogram, lambda metric, time: metric.observe(time), kwargs, name, description, labels, registry=self.registry )
python
def histogram(self, name, description, labels=None, **kwargs): """ Use a Histogram to track the execution time and invocation count of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Histogram """ return self._track( Histogram, lambda metric, time: metric.observe(time), kwargs, name, description, labels, registry=self.registry )
[ "def", "histogram", "(", "self", ",", "name", ",", "description", ",", "labels", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_track", "(", "Histogram", ",", "lambda", "metric", ",", "time", ":", "metric", ".", "observe", "(", "time", ")", ",", "kwargs", ",", "name", ",", "description", ",", "labels", ",", "registry", "=", "self", ".", "registry", ")" ]
Use a Histogram to track the execution time and invocation count of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Histogram
[ "Use", "a", "Histogram", "to", "track", "the", "execution", "time", "and", "invocation", "count", "of", "the", "method", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L317-L333
9,449
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.summary
def summary(self, name, description, labels=None, **kwargs): """ Use a Summary to track the execution time and invocation count of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Summary """ return self._track( Summary, lambda metric, time: metric.observe(time), kwargs, name, description, labels, registry=self.registry )
python
def summary(self, name, description, labels=None, **kwargs): """ Use a Summary to track the execution time and invocation count of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Summary """ return self._track( Summary, lambda metric, time: metric.observe(time), kwargs, name, description, labels, registry=self.registry )
[ "def", "summary", "(", "self", ",", "name", ",", "description", ",", "labels", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_track", "(", "Summary", ",", "lambda", "metric", ",", "time", ":", "metric", ".", "observe", "(", "time", ")", ",", "kwargs", ",", "name", ",", "description", ",", "labels", ",", "registry", "=", "self", ".", "registry", ")" ]
Use a Summary to track the execution time and invocation count of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Summary
[ "Use", "a", "Summary", "to", "track", "the", "execution", "time", "and", "invocation", "count", "of", "the", "method", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L335-L351
9,450
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.gauge
def gauge(self, name, description, labels=None, **kwargs): """ Use a Gauge to track the number of invocations in progress for the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Gauge """ return self._track( Gauge, lambda metric, time: metric.dec(), kwargs, name, description, labels, registry=self.registry, before=lambda metric: metric.inc() )
python
def gauge(self, name, description, labels=None, **kwargs): """ Use a Gauge to track the number of invocations in progress for the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Gauge """ return self._track( Gauge, lambda metric, time: metric.dec(), kwargs, name, description, labels, registry=self.registry, before=lambda metric: metric.inc() )
[ "def", "gauge", "(", "self", ",", "name", ",", "description", ",", "labels", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_track", "(", "Gauge", ",", "lambda", "metric", ",", "time", ":", "metric", ".", "dec", "(", ")", ",", "kwargs", ",", "name", ",", "description", ",", "labels", ",", "registry", "=", "self", ".", "registry", ",", "before", "=", "lambda", "metric", ":", "metric", ".", "inc", "(", ")", ")" ]
Use a Gauge to track the number of invocations in progress for the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Gauge
[ "Use", "a", "Gauge", "to", "track", "the", "number", "of", "invocations", "in", "progress", "for", "the", "method", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L353-L370
9,451
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.counter
def counter(self, name, description, labels=None, **kwargs): """ Use a Counter to track the total number of invocations of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Counter """ return self._track( Counter, lambda metric, time: metric.inc(), kwargs, name, description, labels, registry=self.registry )
python
def counter(self, name, description, labels=None, **kwargs): """ Use a Counter to track the total number of invocations of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Counter """ return self._track( Counter, lambda metric, time: metric.inc(), kwargs, name, description, labels, registry=self.registry )
[ "def", "counter", "(", "self", ",", "name", ",", "description", ",", "labels", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_track", "(", "Counter", ",", "lambda", "metric", ",", "time", ":", "metric", ".", "inc", "(", ")", ",", "kwargs", ",", "name", ",", "description", ",", "labels", ",", "registry", "=", "self", ".", "registry", ")" ]
Use a Counter to track the total number of invocations of the method. :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param kwargs: additional keyword arguments for creating the Counter
[ "Use", "a", "Counter", "to", "track", "the", "total", "number", "of", "invocations", "of", "the", "method", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L372-L387
9,452
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics._track
def _track(metric_type, metric_call, metric_kwargs, name, description, labels, registry, before=None): """ Internal method decorator logic. :param metric_type: the type of the metric from the `prometheus_client` library :param metric_call: the invocation to execute as a callable with `(metric, time)` :param metric_kwargs: additional keyword arguments for creating the metric :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param before: an optional callable to invoke before executing the request handler method accepting the single `metric` argument :param registry: the Prometheus Registry to use """ if labels is not None and not isinstance(labels, dict): raise TypeError('labels needs to be a dictionary of {labelname: callable}') label_names = labels.keys() if labels else tuple() parent_metric = metric_type( name, description, labelnames=label_names, registry=registry, **metric_kwargs ) def argspec(func): if hasattr(inspect, 'getfullargspec'): return inspect.getfullargspec(func) else: return inspect.getargspec(func) def label_value(f): if not callable(f): return lambda x: f if argspec(f).args: return lambda x: f(x) else: return lambda x: f() label_generator = tuple( (key, label_value(call)) for key, call in labels.items() ) if labels else tuple() def get_metric(response): if label_names: return parent_metric.labels( **{key: call(response) for key, call in label_generator} ) else: return parent_metric def decorator(f): @functools.wraps(f) def func(*args, **kwargs): if before: metric = get_metric(None) before(metric) else: metric = None start_time = default_timer() try: response = f(*args, **kwargs) except HTTPException as ex: response = ex except Exception as ex: response = make_response('Exception: %s' % ex, 500) total_time = max(default_timer() - start_time, 0) if not metric: response_for_metric = response if not isinstance(response, Response): if request.endpoint == f.__name__: # we are in a request handler method response_for_metric = make_response(response) metric = get_metric(response_for_metric) metric_call(metric, time=total_time) return response return func return decorator
python
def _track(metric_type, metric_call, metric_kwargs, name, description, labels, registry, before=None): """ Internal method decorator logic. :param metric_type: the type of the metric from the `prometheus_client` library :param metric_call: the invocation to execute as a callable with `(metric, time)` :param metric_kwargs: additional keyword arguments for creating the metric :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param before: an optional callable to invoke before executing the request handler method accepting the single `metric` argument :param registry: the Prometheus Registry to use """ if labels is not None and not isinstance(labels, dict): raise TypeError('labels needs to be a dictionary of {labelname: callable}') label_names = labels.keys() if labels else tuple() parent_metric = metric_type( name, description, labelnames=label_names, registry=registry, **metric_kwargs ) def argspec(func): if hasattr(inspect, 'getfullargspec'): return inspect.getfullargspec(func) else: return inspect.getargspec(func) def label_value(f): if not callable(f): return lambda x: f if argspec(f).args: return lambda x: f(x) else: return lambda x: f() label_generator = tuple( (key, label_value(call)) for key, call in labels.items() ) if labels else tuple() def get_metric(response): if label_names: return parent_metric.labels( **{key: call(response) for key, call in label_generator} ) else: return parent_metric def decorator(f): @functools.wraps(f) def func(*args, **kwargs): if before: metric = get_metric(None) before(metric) else: metric = None start_time = default_timer() try: response = f(*args, **kwargs) except HTTPException as ex: response = ex except Exception as ex: response = make_response('Exception: %s' % ex, 500) total_time = max(default_timer() - start_time, 0) if not metric: response_for_metric = response if not isinstance(response, Response): if request.endpoint == f.__name__: # we are in a request handler method response_for_metric = make_response(response) metric = get_metric(response_for_metric) metric_call(metric, time=total_time) return response return func return decorator
[ "def", "_track", "(", "metric_type", ",", "metric_call", ",", "metric_kwargs", ",", "name", ",", "description", ",", "labels", ",", "registry", ",", "before", "=", "None", ")", ":", "if", "labels", "is", "not", "None", "and", "not", "isinstance", "(", "labels", ",", "dict", ")", ":", "raise", "TypeError", "(", "'labels needs to be a dictionary of {labelname: callable}'", ")", "label_names", "=", "labels", ".", "keys", "(", ")", "if", "labels", "else", "tuple", "(", ")", "parent_metric", "=", "metric_type", "(", "name", ",", "description", ",", "labelnames", "=", "label_names", ",", "registry", "=", "registry", ",", "*", "*", "metric_kwargs", ")", "def", "argspec", "(", "func", ")", ":", "if", "hasattr", "(", "inspect", ",", "'getfullargspec'", ")", ":", "return", "inspect", ".", "getfullargspec", "(", "func", ")", "else", ":", "return", "inspect", ".", "getargspec", "(", "func", ")", "def", "label_value", "(", "f", ")", ":", "if", "not", "callable", "(", "f", ")", ":", "return", "lambda", "x", ":", "f", "if", "argspec", "(", "f", ")", ".", "args", ":", "return", "lambda", "x", ":", "f", "(", "x", ")", "else", ":", "return", "lambda", "x", ":", "f", "(", ")", "label_generator", "=", "tuple", "(", "(", "key", ",", "label_value", "(", "call", ")", ")", "for", "key", ",", "call", "in", "labels", ".", "items", "(", ")", ")", "if", "labels", "else", "tuple", "(", ")", "def", "get_metric", "(", "response", ")", ":", "if", "label_names", ":", "return", "parent_metric", ".", "labels", "(", "*", "*", "{", "key", ":", "call", "(", "response", ")", "for", "key", ",", "call", "in", "label_generator", "}", ")", "else", ":", "return", "parent_metric", "def", "decorator", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "before", ":", "metric", "=", "get_metric", "(", "None", ")", "before", "(", "metric", ")", "else", ":", "metric", "=", "None", "start_time", "=", "default_timer", "(", ")", "try", ":", "response", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "HTTPException", "as", "ex", ":", "response", "=", "ex", "except", "Exception", "as", "ex", ":", "response", "=", "make_response", "(", "'Exception: %s'", "%", "ex", ",", "500", ")", "total_time", "=", "max", "(", "default_timer", "(", ")", "-", "start_time", ",", "0", ")", "if", "not", "metric", ":", "response_for_metric", "=", "response", "if", "not", "isinstance", "(", "response", ",", "Response", ")", ":", "if", "request", ".", "endpoint", "==", "f", ".", "__name__", ":", "# we are in a request handler method", "response_for_metric", "=", "make_response", "(", "response", ")", "metric", "=", "get_metric", "(", "response_for_metric", ")", "metric_call", "(", "metric", ",", "time", "=", "total_time", ")", "return", "response", "return", "func", "return", "decorator" ]
Internal method decorator logic. :param metric_type: the type of the metric from the `prometheus_client` library :param metric_call: the invocation to execute as a callable with `(metric, time)` :param metric_kwargs: additional keyword arguments for creating the metric :param name: the name of the metric :param description: the description of the metric :param labels: a dictionary of `{labelname: callable_or_value}` for labels :param before: an optional callable to invoke before executing the request handler method accepting the single `metric` argument :param registry: the Prometheus Registry to use
[ "Internal", "method", "decorator", "logic", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L390-L477
9,453
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.do_not_track
def do_not_track(): """ Decorator to skip the default metrics collection for the method. *Note*: explicit metrics decorators will still collect the data """ def decorator(f): @functools.wraps(f) def func(*args, **kwargs): request.prom_do_not_track = True return f(*args, **kwargs) return func return decorator
python
def do_not_track(): """ Decorator to skip the default metrics collection for the method. *Note*: explicit metrics decorators will still collect the data """ def decorator(f): @functools.wraps(f) def func(*args, **kwargs): request.prom_do_not_track = True return f(*args, **kwargs) return func return decorator
[ "def", "do_not_track", "(", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "request", ".", "prom_do_not_track", "=", "True", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "func", "return", "decorator" ]
Decorator to skip the default metrics collection for the method. *Note*: explicit metrics decorators will still collect the data
[ "Decorator", "to", "skip", "the", "default", "metrics", "collection", "for", "the", "method", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L480-L495
9,454
rycus86/prometheus_flask_exporter
prometheus_flask_exporter/__init__.py
PrometheusMetrics.info
def info(self, name, description, labelnames=None, labelvalues=None, **labels): """ Report any information as a Prometheus metric. This will create a `Gauge` with the initial value of 1. The easiest way to use it is: metrics = PrometheusMetrics(app) metrics.info( 'app_info', 'Application info', version='1.0', major=1, minor=0 ) If the order of the labels matters: metrics = PrometheusMetrics(app) metrics.info( 'app_info', 'Application info', ('version', 'major', 'minor'), ('1.0', 1, 0) ) :param name: the name of the metric :param description: the description of the metric :param labelnames: the names of the labels :param labelvalues: the values of the labels :param labels: the names and values of the labels :return: the newly created `Gauge` metric """ if labels and labelnames: raise ValueError( 'Cannot have labels defined as `dict` ' 'and collections of names and values' ) if labelnames is None and labels: labelnames = labels.keys() elif labelnames and labelvalues: for idx, label_name in enumerate(labelnames): labels[label_name] = labelvalues[idx] gauge = Gauge( name, description, labelnames or tuple(), registry=self.registry ) if labels: gauge = gauge.labels(**labels) gauge.set(1) return gauge
python
def info(self, name, description, labelnames=None, labelvalues=None, **labels): """ Report any information as a Prometheus metric. This will create a `Gauge` with the initial value of 1. The easiest way to use it is: metrics = PrometheusMetrics(app) metrics.info( 'app_info', 'Application info', version='1.0', major=1, minor=0 ) If the order of the labels matters: metrics = PrometheusMetrics(app) metrics.info( 'app_info', 'Application info', ('version', 'major', 'minor'), ('1.0', 1, 0) ) :param name: the name of the metric :param description: the description of the metric :param labelnames: the names of the labels :param labelvalues: the values of the labels :param labels: the names and values of the labels :return: the newly created `Gauge` metric """ if labels and labelnames: raise ValueError( 'Cannot have labels defined as `dict` ' 'and collections of names and values' ) if labelnames is None and labels: labelnames = labels.keys() elif labelnames and labelvalues: for idx, label_name in enumerate(labelnames): labels[label_name] = labelvalues[idx] gauge = Gauge( name, description, labelnames or tuple(), registry=self.registry ) if labels: gauge = gauge.labels(**labels) gauge.set(1) return gauge
[ "def", "info", "(", "self", ",", "name", ",", "description", ",", "labelnames", "=", "None", ",", "labelvalues", "=", "None", ",", "*", "*", "labels", ")", ":", "if", "labels", "and", "labelnames", ":", "raise", "ValueError", "(", "'Cannot have labels defined as `dict` '", "'and collections of names and values'", ")", "if", "labelnames", "is", "None", "and", "labels", ":", "labelnames", "=", "labels", ".", "keys", "(", ")", "elif", "labelnames", "and", "labelvalues", ":", "for", "idx", ",", "label_name", "in", "enumerate", "(", "labelnames", ")", ":", "labels", "[", "label_name", "]", "=", "labelvalues", "[", "idx", "]", "gauge", "=", "Gauge", "(", "name", ",", "description", ",", "labelnames", "or", "tuple", "(", ")", ",", "registry", "=", "self", ".", "registry", ")", "if", "labels", ":", "gauge", "=", "gauge", ".", "labels", "(", "*", "*", "labels", ")", "gauge", ".", "set", "(", "1", ")", "return", "gauge" ]
Report any information as a Prometheus metric. This will create a `Gauge` with the initial value of 1. The easiest way to use it is: metrics = PrometheusMetrics(app) metrics.info( 'app_info', 'Application info', version='1.0', major=1, minor=0 ) If the order of the labels matters: metrics = PrometheusMetrics(app) metrics.info( 'app_info', 'Application info', ('version', 'major', 'minor'), ('1.0', 1, 0) ) :param name: the name of the metric :param description: the description of the metric :param labelnames: the names of the labels :param labelvalues: the values of the labels :param labels: the names and values of the labels :return: the newly created `Gauge` metric
[ "Report", "any", "information", "as", "a", "Prometheus", "metric", ".", "This", "will", "create", "a", "Gauge", "with", "the", "initial", "value", "of", "1", "." ]
678dbf3097e82a0ddb697268406004cc1f4a26bc
https://github.com/rycus86/prometheus_flask_exporter/blob/678dbf3097e82a0ddb697268406004cc1f4a26bc/prometheus_flask_exporter/__init__.py#L497-L550
9,455
berdario/pew
pew/pew.py
inve
def inve(env, command, *args, **kwargs): """Run a command in the given virtual environment. Pass additional keyword arguments to ``subprocess.check_call()``.""" # we don't strictly need to restore the environment, since pew runs in # its own process, but it feels like the right thing to do with temp_environ(): os.environ['VIRTUAL_ENV'] = str(workon_home / env) os.environ['PATH'] = compute_path(env) unsetenv('PYTHONHOME') unsetenv('__PYVENV_LAUNCHER__') try: return check_call([command] + list(args), shell=windows, **kwargs) # need to have shell=True on windows, otherwise the PYTHONPATH # won't inherit the PATH except OSError as e: if e.errno == 2: err('Unable to find', command) return 2 else: raise
python
def inve(env, command, *args, **kwargs): """Run a command in the given virtual environment. Pass additional keyword arguments to ``subprocess.check_call()``.""" # we don't strictly need to restore the environment, since pew runs in # its own process, but it feels like the right thing to do with temp_environ(): os.environ['VIRTUAL_ENV'] = str(workon_home / env) os.environ['PATH'] = compute_path(env) unsetenv('PYTHONHOME') unsetenv('__PYVENV_LAUNCHER__') try: return check_call([command] + list(args), shell=windows, **kwargs) # need to have shell=True on windows, otherwise the PYTHONPATH # won't inherit the PATH except OSError as e: if e.errno == 2: err('Unable to find', command) return 2 else: raise
[ "def", "inve", "(", "env", ",", "command", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# we don't strictly need to restore the environment, since pew runs in", "# its own process, but it feels like the right thing to do", "with", "temp_environ", "(", ")", ":", "os", ".", "environ", "[", "'VIRTUAL_ENV'", "]", "=", "str", "(", "workon_home", "/", "env", ")", "os", ".", "environ", "[", "'PATH'", "]", "=", "compute_path", "(", "env", ")", "unsetenv", "(", "'PYTHONHOME'", ")", "unsetenv", "(", "'__PYVENV_LAUNCHER__'", ")", "try", ":", "return", "check_call", "(", "[", "command", "]", "+", "list", "(", "args", ")", ",", "shell", "=", "windows", ",", "*", "*", "kwargs", ")", "# need to have shell=True on windows, otherwise the PYTHONPATH", "# won't inherit the PATH", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "2", ":", "err", "(", "'Unable to find'", ",", "command", ")", "return", "2", "else", ":", "raise" ]
Run a command in the given virtual environment. Pass additional keyword arguments to ``subprocess.check_call()``.
[ "Run", "a", "command", "in", "the", "given", "virtual", "environment", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L130-L152
9,456
berdario/pew
pew/pew.py
ls_cmd
def ls_cmd(argv): """List available environments.""" parser = argparse.ArgumentParser() p_group = parser.add_mutually_exclusive_group() p_group.add_argument('-b', '--brief', action='store_false') p_group.add_argument('-l', '--long', action='store_true') args = parser.parse_args(argv) lsvirtualenv(args.long)
python
def ls_cmd(argv): """List available environments.""" parser = argparse.ArgumentParser() p_group = parser.add_mutually_exclusive_group() p_group.add_argument('-b', '--brief', action='store_false') p_group.add_argument('-l', '--long', action='store_true') args = parser.parse_args(argv) lsvirtualenv(args.long)
[ "def", "ls_cmd", "(", "argv", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "p_group", "=", "parser", ".", "add_mutually_exclusive_group", "(", ")", "p_group", ".", "add_argument", "(", "'-b'", ",", "'--brief'", ",", "action", "=", "'store_false'", ")", "p_group", ".", "add_argument", "(", "'-l'", ",", "'--long'", ",", "action", "=", "'store_true'", ")", "args", "=", "parser", ".", "parse_args", "(", "argv", ")", "lsvirtualenv", "(", "args", ".", "long", ")" ]
List available environments.
[ "List", "available", "environments", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L345-L352
9,457
berdario/pew
pew/pew.py
workon_cmd
def workon_cmd(argv): """List or change working virtual environments.""" parser = argparse.ArgumentParser(prog='pew workon') parser.add_argument('envname', nargs='?') parser.add_argument( '-n', '--no-cd', action='store_true', help=('Do not change working directory to project directory after ' 'activating virtualenv.') ) args = parser.parse_args(argv) def list_and_exit(): lsvirtualenv(False) sys.exit(0) env = parse_envname([args.envname], list_and_exit) # Check if the virtualenv has an associated project directory and in # this case, use it as the current working directory. project_dir = get_project_dir(env) if project_dir is None or args.no_cd: project_dir = os.getcwd() return shell(env, cwd=project_dir)
python
def workon_cmd(argv): """List or change working virtual environments.""" parser = argparse.ArgumentParser(prog='pew workon') parser.add_argument('envname', nargs='?') parser.add_argument( '-n', '--no-cd', action='store_true', help=('Do not change working directory to project directory after ' 'activating virtualenv.') ) args = parser.parse_args(argv) def list_and_exit(): lsvirtualenv(False) sys.exit(0) env = parse_envname([args.envname], list_and_exit) # Check if the virtualenv has an associated project directory and in # this case, use it as the current working directory. project_dir = get_project_dir(env) if project_dir is None or args.no_cd: project_dir = os.getcwd() return shell(env, cwd=project_dir)
[ "def", "workon_cmd", "(", "argv", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "prog", "=", "'pew workon'", ")", "parser", ".", "add_argument", "(", "'envname'", ",", "nargs", "=", "'?'", ")", "parser", ".", "add_argument", "(", "'-n'", ",", "'--no-cd'", ",", "action", "=", "'store_true'", ",", "help", "=", "(", "'Do not change working directory to project directory after '", "'activating virtualenv.'", ")", ")", "args", "=", "parser", ".", "parse_args", "(", "argv", ")", "def", "list_and_exit", "(", ")", ":", "lsvirtualenv", "(", "False", ")", "sys", ".", "exit", "(", "0", ")", "env", "=", "parse_envname", "(", "[", "args", ".", "envname", "]", ",", "list_and_exit", ")", "# Check if the virtualenv has an associated project directory and in", "# this case, use it as the current working directory.", "project_dir", "=", "get_project_dir", "(", "env", ")", "if", "project_dir", "is", "None", "or", "args", ".", "no_cd", ":", "project_dir", "=", "os", ".", "getcwd", "(", ")", "return", "shell", "(", "env", ",", "cwd", "=", "project_dir", ")" ]
List or change working virtual environments.
[ "List", "or", "change", "working", "virtual", "environments", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L367-L390
9,458
berdario/pew
pew/pew.py
add_cmd
def add_cmd(argv): """Add the specified directories to the Python path for the currently active virtualenv. This will be done by placing the directory names in a path file named "virtualenv_path_extensions.pth" inside the virtualenv's site-packages directory; if this file does not exists, it will be created first. """ parser = argparse.ArgumentParser() parser.add_argument('-d', dest='remove', action='store_true') parser.add_argument('dirs', nargs='+') args = parser.parse_args(argv) extra_paths = sitepackages_dir() / '_virtualenv_path_extensions.pth' new_paths = [os.path.abspath(d) + "\n" for d in args.dirs] if not extra_paths.exists(): with extra_paths.open('w') as extra: extra.write('''import sys; sys.__plen = len(sys.path) import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new) ''') def rewrite(f): with extra_paths.open('r+') as extra: to_write = f(extra.readlines()) extra.seek(0) extra.truncate() extra.writelines(to_write) if args.remove: rewrite(lambda ls: [line for line in ls if line not in new_paths]) else: rewrite(lambda lines: lines[0:1] + new_paths + lines[1:])
python
def add_cmd(argv): """Add the specified directories to the Python path for the currently active virtualenv. This will be done by placing the directory names in a path file named "virtualenv_path_extensions.pth" inside the virtualenv's site-packages directory; if this file does not exists, it will be created first. """ parser = argparse.ArgumentParser() parser.add_argument('-d', dest='remove', action='store_true') parser.add_argument('dirs', nargs='+') args = parser.parse_args(argv) extra_paths = sitepackages_dir() / '_virtualenv_path_extensions.pth' new_paths = [os.path.abspath(d) + "\n" for d in args.dirs] if not extra_paths.exists(): with extra_paths.open('w') as extra: extra.write('''import sys; sys.__plen = len(sys.path) import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new) ''') def rewrite(f): with extra_paths.open('r+') as extra: to_write = f(extra.readlines()) extra.seek(0) extra.truncate() extra.writelines(to_write) if args.remove: rewrite(lambda ls: [line for line in ls if line not in new_paths]) else: rewrite(lambda lines: lines[0:1] + new_paths + lines[1:])
[ "def", "add_cmd", "(", "argv", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'-d'", ",", "dest", "=", "'remove'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'dirs'", ",", "nargs", "=", "'+'", ")", "args", "=", "parser", ".", "parse_args", "(", "argv", ")", "extra_paths", "=", "sitepackages_dir", "(", ")", "/", "'_virtualenv_path_extensions.pth'", "new_paths", "=", "[", "os", ".", "path", ".", "abspath", "(", "d", ")", "+", "\"\\n\"", "for", "d", "in", "args", ".", "dirs", "]", "if", "not", "extra_paths", ".", "exists", "(", ")", ":", "with", "extra_paths", ".", "open", "(", "'w'", ")", "as", "extra", ":", "extra", ".", "write", "(", "'''import sys; sys.__plen = len(sys.path)\nimport sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)\n '''", ")", "def", "rewrite", "(", "f", ")", ":", "with", "extra_paths", ".", "open", "(", "'r+'", ")", "as", "extra", ":", "to_write", "=", "f", "(", "extra", ".", "readlines", "(", ")", ")", "extra", ".", "seek", "(", "0", ")", "extra", ".", "truncate", "(", ")", "extra", ".", "writelines", "(", "to_write", ")", "if", "args", ".", "remove", ":", "rewrite", "(", "lambda", "ls", ":", "[", "line", "for", "line", "in", "ls", "if", "line", "not", "in", "new_paths", "]", ")", "else", ":", "rewrite", "(", "lambda", "lines", ":", "lines", "[", "0", ":", "1", "]", "+", "new_paths", "+", "lines", "[", "1", ":", "]", ")" ]
Add the specified directories to the Python path for the currently active virtualenv. This will be done by placing the directory names in a path file named "virtualenv_path_extensions.pth" inside the virtualenv's site-packages directory; if this file does not exists, it will be created first.
[ "Add", "the", "specified", "directories", "to", "the", "Python", "path", "for", "the", "currently", "active", "virtualenv", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L402-L433
9,459
berdario/pew
pew/pew.py
lssitepackages_cmd
def lssitepackages_cmd(argv): """Show the content of the site-packages directory of the current virtualenv.""" site = sitepackages_dir() print(*sorted(site.iterdir()), sep=os.linesep) extra_paths = site / '_virtualenv_path_extensions.pth' if extra_paths.exists(): print('from _virtualenv_path_extensions.pth:') with extra_paths.open() as extra: print(''.join(extra.readlines()))
python
def lssitepackages_cmd(argv): """Show the content of the site-packages directory of the current virtualenv.""" site = sitepackages_dir() print(*sorted(site.iterdir()), sep=os.linesep) extra_paths = site / '_virtualenv_path_extensions.pth' if extra_paths.exists(): print('from _virtualenv_path_extensions.pth:') with extra_paths.open() as extra: print(''.join(extra.readlines()))
[ "def", "lssitepackages_cmd", "(", "argv", ")", ":", "site", "=", "sitepackages_dir", "(", ")", "print", "(", "*", "sorted", "(", "site", ".", "iterdir", "(", ")", ")", ",", "sep", "=", "os", ".", "linesep", ")", "extra_paths", "=", "site", "/", "'_virtualenv_path_extensions.pth'", "if", "extra_paths", ".", "exists", "(", ")", ":", "print", "(", "'from _virtualenv_path_extensions.pth:'", ")", "with", "extra_paths", ".", "open", "(", ")", "as", "extra", ":", "print", "(", "''", ".", "join", "(", "extra", ".", "readlines", "(", ")", ")", ")" ]
Show the content of the site-packages directory of the current virtualenv.
[ "Show", "the", "content", "of", "the", "site", "-", "packages", "directory", "of", "the", "current", "virtualenv", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L440-L448
9,460
berdario/pew
pew/pew.py
toggleglobalsitepackages_cmd
def toggleglobalsitepackages_cmd(argv): """Toggle the current virtualenv between having and not having access to the global site-packages.""" quiet = argv == ['-q'] site = sitepackages_dir() ngsp_file = site.parent / 'no-global-site-packages.txt' if ngsp_file.exists(): ngsp_file.unlink() if not quiet: print('Enabled global site-packages') else: with ngsp_file.open('w'): if not quiet: print('Disabled global site-packages')
python
def toggleglobalsitepackages_cmd(argv): """Toggle the current virtualenv between having and not having access to the global site-packages.""" quiet = argv == ['-q'] site = sitepackages_dir() ngsp_file = site.parent / 'no-global-site-packages.txt' if ngsp_file.exists(): ngsp_file.unlink() if not quiet: print('Enabled global site-packages') else: with ngsp_file.open('w'): if not quiet: print('Disabled global site-packages')
[ "def", "toggleglobalsitepackages_cmd", "(", "argv", ")", ":", "quiet", "=", "argv", "==", "[", "'-q'", "]", "site", "=", "sitepackages_dir", "(", ")", "ngsp_file", "=", "site", ".", "parent", "/", "'no-global-site-packages.txt'", "if", "ngsp_file", ".", "exists", "(", ")", ":", "ngsp_file", ".", "unlink", "(", ")", "if", "not", "quiet", ":", "print", "(", "'Enabled global site-packages'", ")", "else", ":", "with", "ngsp_file", ".", "open", "(", "'w'", ")", ":", "if", "not", "quiet", ":", "print", "(", "'Disabled global site-packages'", ")" ]
Toggle the current virtualenv between having and not having access to the global site-packages.
[ "Toggle", "the", "current", "virtualenv", "between", "having", "and", "not", "having", "access", "to", "the", "global", "site", "-", "packages", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L451-L463
9,461
berdario/pew
pew/pew.py
cp_cmd
def cp_cmd(argv): """Duplicate the named virtualenv to make a new one.""" parser = argparse.ArgumentParser() parser.add_argument('source') parser.add_argument('target', nargs='?') parser.add_argument('-d', '--dont-activate', action='store_false', default=True, dest='activate', help="After \ creation, continue with the existing shell (don't \ activate the new environment).") args = parser.parse_args(argv) target_name = copy_virtualenv_project(args.source, args.target) if args.activate: shell(target_name)
python
def cp_cmd(argv): """Duplicate the named virtualenv to make a new one.""" parser = argparse.ArgumentParser() parser.add_argument('source') parser.add_argument('target', nargs='?') parser.add_argument('-d', '--dont-activate', action='store_false', default=True, dest='activate', help="After \ creation, continue with the existing shell (don't \ activate the new environment).") args = parser.parse_args(argv) target_name = copy_virtualenv_project(args.source, args.target) if args.activate: shell(target_name)
[ "def", "cp_cmd", "(", "argv", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'source'", ")", "parser", ".", "add_argument", "(", "'target'", ",", "nargs", "=", "'?'", ")", "parser", ".", "add_argument", "(", "'-d'", ",", "'--dont-activate'", ",", "action", "=", "'store_false'", ",", "default", "=", "True", ",", "dest", "=", "'activate'", ",", "help", "=", "\"After \\\n creation, continue with the existing shell (don't \\\n activate the new environment).\"", ")", "args", "=", "parser", ".", "parse_args", "(", "argv", ")", "target_name", "=", "copy_virtualenv_project", "(", "args", ".", "source", ",", "args", ".", "target", ")", "if", "args", ".", "activate", ":", "shell", "(", "target_name", ")" ]
Duplicate the named virtualenv to make a new one.
[ "Duplicate", "the", "named", "virtualenv", "to", "make", "a", "new", "one", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L466-L479
9,462
berdario/pew
pew/pew.py
rename_cmd
def rename_cmd(argv): """Rename a virtualenv""" parser = argparse.ArgumentParser() parser.add_argument('source') parser.add_argument('target') pargs = parser.parse_args(argv) copy_virtualenv_project(pargs.source, pargs.target) return rmvirtualenvs([pargs.source])
python
def rename_cmd(argv): """Rename a virtualenv""" parser = argparse.ArgumentParser() parser.add_argument('source') parser.add_argument('target') pargs = parser.parse_args(argv) copy_virtualenv_project(pargs.source, pargs.target) return rmvirtualenvs([pargs.source])
[ "def", "rename_cmd", "(", "argv", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'source'", ")", "parser", ".", "add_argument", "(", "'target'", ")", "pargs", "=", "parser", ".", "parse_args", "(", "argv", ")", "copy_virtualenv_project", "(", "pargs", ".", "source", ",", "pargs", ".", "target", ")", "return", "rmvirtualenvs", "(", "[", "pargs", ".", "source", "]", ")" ]
Rename a virtualenv
[ "Rename", "a", "virtualenv" ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L503-L510
9,463
berdario/pew
pew/pew.py
setproject_cmd
def setproject_cmd(argv): """Given a virtualenv directory and a project directory, set the \ virtualenv up to be associated with the project.""" args = dict(enumerate(argv)) project = os.path.abspath(args.get(1, '.')) env = args.get(0, os.environ.get('VIRTUAL_ENV')) if not env: sys.exit('pew setproject [virtualenv] [project_path]') if not (workon_home / env).exists(): sys.exit("Environment '%s' doesn't exist." % env) if not os.path.isdir(project): sys.exit('pew setproject: %s does not exist' % project) setvirtualenvproject(env, project)
python
def setproject_cmd(argv): """Given a virtualenv directory and a project directory, set the \ virtualenv up to be associated with the project.""" args = dict(enumerate(argv)) project = os.path.abspath(args.get(1, '.')) env = args.get(0, os.environ.get('VIRTUAL_ENV')) if not env: sys.exit('pew setproject [virtualenv] [project_path]') if not (workon_home / env).exists(): sys.exit("Environment '%s' doesn't exist." % env) if not os.path.isdir(project): sys.exit('pew setproject: %s does not exist' % project) setvirtualenvproject(env, project)
[ "def", "setproject_cmd", "(", "argv", ")", ":", "args", "=", "dict", "(", "enumerate", "(", "argv", ")", ")", "project", "=", "os", ".", "path", ".", "abspath", "(", "args", ".", "get", "(", "1", ",", "'.'", ")", ")", "env", "=", "args", ".", "get", "(", "0", ",", "os", ".", "environ", ".", "get", "(", "'VIRTUAL_ENV'", ")", ")", "if", "not", "env", ":", "sys", ".", "exit", "(", "'pew setproject [virtualenv] [project_path]'", ")", "if", "not", "(", "workon_home", "/", "env", ")", ".", "exists", "(", ")", ":", "sys", ".", "exit", "(", "\"Environment '%s' doesn't exist.\"", "%", "env", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "project", ")", ":", "sys", ".", "exit", "(", "'pew setproject: %s does not exist'", "%", "project", ")", "setvirtualenvproject", "(", "env", ",", "project", ")" ]
Given a virtualenv directory and a project directory, set the \ virtualenv up to be associated with the project.
[ "Given", "a", "virtualenv", "directory", "and", "a", "project", "directory", "set", "the", "\\", "virtualenv", "up", "to", "be", "associated", "with", "the", "project", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L519-L531
9,464
berdario/pew
pew/pew.py
getproject_cmd
def getproject_cmd(argv): """Print a virtualenv's project directory, if set. If called without providing a virtualenv name as argument, print the current virtualenv's project directory. """ # Parse command line arguments parser = argparse.ArgumentParser( description="Print an environment's project directory.", ) parser.add_argument( 'envname', nargs='?', default=os.environ.get('VIRTUAL_ENV'), help=( 'The name of the environment to return the project directory ' 'for. If omitted, will use the currently active environment.' ), ) args = parser.parse_args(argv) # Now, do the actual work if not args.envname: sys.exit('ERROR: no virtualenv active') if not (workon_home / args.envname).exists(): sys.exit("ERROR: Environment '{0}' does not exist." .format(args.envname)) project_dir = get_project_dir(args.envname) if project_dir is None: sys.exit("ERROR: no project directory set for Environment '{0}'" .format(args.envname)) print(project_dir)
python
def getproject_cmd(argv): """Print a virtualenv's project directory, if set. If called without providing a virtualenv name as argument, print the current virtualenv's project directory. """ # Parse command line arguments parser = argparse.ArgumentParser( description="Print an environment's project directory.", ) parser.add_argument( 'envname', nargs='?', default=os.environ.get('VIRTUAL_ENV'), help=( 'The name of the environment to return the project directory ' 'for. If omitted, will use the currently active environment.' ), ) args = parser.parse_args(argv) # Now, do the actual work if not args.envname: sys.exit('ERROR: no virtualenv active') if not (workon_home / args.envname).exists(): sys.exit("ERROR: Environment '{0}' does not exist." .format(args.envname)) project_dir = get_project_dir(args.envname) if project_dir is None: sys.exit("ERROR: no project directory set for Environment '{0}'" .format(args.envname)) print(project_dir)
[ "def", "getproject_cmd", "(", "argv", ")", ":", "# Parse command line arguments", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Print an environment's project directory.\"", ",", ")", "parser", ".", "add_argument", "(", "'envname'", ",", "nargs", "=", "'?'", ",", "default", "=", "os", ".", "environ", ".", "get", "(", "'VIRTUAL_ENV'", ")", ",", "help", "=", "(", "'The name of the environment to return the project directory '", "'for. If omitted, will use the currently active environment.'", ")", ",", ")", "args", "=", "parser", ".", "parse_args", "(", "argv", ")", "# Now, do the actual work", "if", "not", "args", ".", "envname", ":", "sys", ".", "exit", "(", "'ERROR: no virtualenv active'", ")", "if", "not", "(", "workon_home", "/", "args", ".", "envname", ")", ".", "exists", "(", ")", ":", "sys", ".", "exit", "(", "\"ERROR: Environment '{0}' does not exist.\"", ".", "format", "(", "args", ".", "envname", ")", ")", "project_dir", "=", "get_project_dir", "(", "args", ".", "envname", ")", "if", "project_dir", "is", "None", ":", "sys", ".", "exit", "(", "\"ERROR: no project directory set for Environment '{0}'\"", ".", "format", "(", "args", ".", "envname", ")", ")", "print", "(", "project_dir", ")" ]
Print a virtualenv's project directory, if set. If called without providing a virtualenv name as argument, print the current virtualenv's project directory.
[ "Print", "a", "virtualenv", "s", "project", "directory", "if", "set", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L534-L564
9,465
berdario/pew
pew/pew.py
mkproject_cmd
def mkproject_cmd(argv): """Create a new project directory and its associated virtualenv.""" if '-l' in argv or '--list' in argv: templates = [t.name[9:] for t in workon_home.glob("template_*")] print("Available project templates:", *templates, sep='\n') return parser = mkvirtualenv_argparser() parser.add_argument('envname') parser.add_argument( '-t', action='append', default=[], dest='templates', help='Multiple \ templates may be selected. They are applied in the order specified on the \ command line.') parser.add_argument( '-l', '--list', action='store_true', help='List available templates.') args, rest = parser.parse_known_args(argv) projects_home = Path(os.environ.get('PROJECT_HOME', '.')) if not projects_home.exists(): sys.exit('ERROR: Projects directory %s does not exist. \ Create it or set PROJECT_HOME to an existing directory.' % projects_home) project = (projects_home / args.envname).absolute() if project.exists(): sys.exit('Project %s already exists.' % args.envname) mkvirtualenv(args.envname, args.python, args.packages, project.absolute(), args.requirements, rest) project.mkdir() for template_name in args.templates: template = workon_home / ("template_" + template_name) inve(args.envname, str(template), args.envname, str(project)) if args.activate: shell(args.envname, cwd=str(project))
python
def mkproject_cmd(argv): """Create a new project directory and its associated virtualenv.""" if '-l' in argv or '--list' in argv: templates = [t.name[9:] for t in workon_home.glob("template_*")] print("Available project templates:", *templates, sep='\n') return parser = mkvirtualenv_argparser() parser.add_argument('envname') parser.add_argument( '-t', action='append', default=[], dest='templates', help='Multiple \ templates may be selected. They are applied in the order specified on the \ command line.') parser.add_argument( '-l', '--list', action='store_true', help='List available templates.') args, rest = parser.parse_known_args(argv) projects_home = Path(os.environ.get('PROJECT_HOME', '.')) if not projects_home.exists(): sys.exit('ERROR: Projects directory %s does not exist. \ Create it or set PROJECT_HOME to an existing directory.' % projects_home) project = (projects_home / args.envname).absolute() if project.exists(): sys.exit('Project %s already exists.' % args.envname) mkvirtualenv(args.envname, args.python, args.packages, project.absolute(), args.requirements, rest) project.mkdir() for template_name in args.templates: template = workon_home / ("template_" + template_name) inve(args.envname, str(template), args.envname, str(project)) if args.activate: shell(args.envname, cwd=str(project))
[ "def", "mkproject_cmd", "(", "argv", ")", ":", "if", "'-l'", "in", "argv", "or", "'--list'", "in", "argv", ":", "templates", "=", "[", "t", ".", "name", "[", "9", ":", "]", "for", "t", "in", "workon_home", ".", "glob", "(", "\"template_*\"", ")", "]", "print", "(", "\"Available project templates:\"", ",", "*", "templates", ",", "sep", "=", "'\\n'", ")", "return", "parser", "=", "mkvirtualenv_argparser", "(", ")", "parser", ".", "add_argument", "(", "'envname'", ")", "parser", ".", "add_argument", "(", "'-t'", ",", "action", "=", "'append'", ",", "default", "=", "[", "]", ",", "dest", "=", "'templates'", ",", "help", "=", "'Multiple \\\ntemplates may be selected. They are applied in the order specified on the \\\ncommand line.'", ")", "parser", ".", "add_argument", "(", "'-l'", ",", "'--list'", ",", "action", "=", "'store_true'", ",", "help", "=", "'List available templates.'", ")", "args", ",", "rest", "=", "parser", ".", "parse_known_args", "(", "argv", ")", "projects_home", "=", "Path", "(", "os", ".", "environ", ".", "get", "(", "'PROJECT_HOME'", ",", "'.'", ")", ")", "if", "not", "projects_home", ".", "exists", "(", ")", ":", "sys", ".", "exit", "(", "'ERROR: Projects directory %s does not exist. \\\nCreate it or set PROJECT_HOME to an existing directory.'", "%", "projects_home", ")", "project", "=", "(", "projects_home", "/", "args", ".", "envname", ")", ".", "absolute", "(", ")", "if", "project", ".", "exists", "(", ")", ":", "sys", ".", "exit", "(", "'Project %s already exists.'", "%", "args", ".", "envname", ")", "mkvirtualenv", "(", "args", ".", "envname", ",", "args", ".", "python", ",", "args", ".", "packages", ",", "project", ".", "absolute", "(", ")", ",", "args", ".", "requirements", ",", "rest", ")", "project", ".", "mkdir", "(", ")", "for", "template_name", "in", "args", ".", "templates", ":", "template", "=", "workon_home", "/", "(", "\"template_\"", "+", "template_name", ")", "inve", "(", "args", ".", "envname", ",", "str", "(", "template", ")", ",", "args", ".", "envname", ",", "str", "(", "project", ")", ")", "if", "args", ".", "activate", ":", "shell", "(", "args", ".", "envname", ",", "cwd", "=", "str", "(", "project", ")", ")" ]
Create a new project directory and its associated virtualenv.
[ "Create", "a", "new", "project", "directory", "and", "its", "associated", "virtualenv", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L567-L603
9,466
berdario/pew
pew/pew.py
mktmpenv_cmd
def mktmpenv_cmd(argv): """Create a temporary virtualenv.""" parser = mkvirtualenv_argparser() env = '.' while (workon_home / env).exists(): env = hex(random.getrandbits(64))[2:-1] args, rest = parser.parse_known_args(argv) mkvirtualenv(env, args.python, args.packages, requirements=args.requirements, rest=rest) print('This is a temporary environment. It will be deleted when you exit') try: if args.activate: # only used for testing on windows shell(env) finally: return rmvirtualenvs([env])
python
def mktmpenv_cmd(argv): """Create a temporary virtualenv.""" parser = mkvirtualenv_argparser() env = '.' while (workon_home / env).exists(): env = hex(random.getrandbits(64))[2:-1] args, rest = parser.parse_known_args(argv) mkvirtualenv(env, args.python, args.packages, requirements=args.requirements, rest=rest) print('This is a temporary environment. It will be deleted when you exit') try: if args.activate: # only used for testing on windows shell(env) finally: return rmvirtualenvs([env])
[ "def", "mktmpenv_cmd", "(", "argv", ")", ":", "parser", "=", "mkvirtualenv_argparser", "(", ")", "env", "=", "'.'", "while", "(", "workon_home", "/", "env", ")", ".", "exists", "(", ")", ":", "env", "=", "hex", "(", "random", ".", "getrandbits", "(", "64", ")", ")", "[", "2", ":", "-", "1", "]", "args", ",", "rest", "=", "parser", ".", "parse_known_args", "(", "argv", ")", "mkvirtualenv", "(", "env", ",", "args", ".", "python", ",", "args", ".", "packages", ",", "requirements", "=", "args", ".", "requirements", ",", "rest", "=", "rest", ")", "print", "(", "'This is a temporary environment. It will be deleted when you exit'", ")", "try", ":", "if", "args", ".", "activate", ":", "# only used for testing on windows", "shell", "(", "env", ")", "finally", ":", "return", "rmvirtualenvs", "(", "[", "env", "]", ")" ]
Create a temporary virtualenv.
[ "Create", "a", "temporary", "virtualenv", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L606-L623
9,467
berdario/pew
pew/pew.py
inall_cmd
def inall_cmd(argv): """Run a command in each virtualenv.""" envs = lsenvs() errors = False for env in envs: print("\n%s:" % env) try: inve(env, *argv) except CalledProcessError as e: errors = True err(e) sys.exit(errors)
python
def inall_cmd(argv): """Run a command in each virtualenv.""" envs = lsenvs() errors = False for env in envs: print("\n%s:" % env) try: inve(env, *argv) except CalledProcessError as e: errors = True err(e) sys.exit(errors)
[ "def", "inall_cmd", "(", "argv", ")", ":", "envs", "=", "lsenvs", "(", ")", "errors", "=", "False", "for", "env", "in", "envs", ":", "print", "(", "\"\\n%s:\"", "%", "env", ")", "try", ":", "inve", "(", "env", ",", "*", "argv", ")", "except", "CalledProcessError", "as", "e", ":", "errors", "=", "True", "err", "(", "e", ")", "sys", ".", "exit", "(", "errors", ")" ]
Run a command in each virtualenv.
[ "Run", "a", "command", "in", "each", "virtualenv", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L649-L660
9,468
berdario/pew
pew/pew.py
in_cmd
def in_cmd(argv): """Run a command in the given virtualenv.""" if len(argv) == 1: return workon_cmd(argv) parse_envname(argv, lambda : sys.exit('You must provide a valid virtualenv to target')) return inve(*argv)
python
def in_cmd(argv): """Run a command in the given virtualenv.""" if len(argv) == 1: return workon_cmd(argv) parse_envname(argv, lambda : sys.exit('You must provide a valid virtualenv to target')) return inve(*argv)
[ "def", "in_cmd", "(", "argv", ")", ":", "if", "len", "(", "argv", ")", "==", "1", ":", "return", "workon_cmd", "(", "argv", ")", "parse_envname", "(", "argv", ",", "lambda", ":", "sys", ".", "exit", "(", "'You must provide a valid virtualenv to target'", ")", ")", "return", "inve", "(", "*", "argv", ")" ]
Run a command in the given virtualenv.
[ "Run", "a", "command", "in", "the", "given", "virtualenv", "." ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L663-L671
9,469
berdario/pew
pew/pew.py
restore_cmd
def restore_cmd(argv): """Try to restore a broken virtualenv by reinstalling the same python version on top of it""" if len(argv) < 1: sys.exit('You must provide a valid virtualenv to target') env = argv[0] path = workon_home / env py = path / env_bin_dir / ('python.exe' if windows else 'python') exact_py = py.resolve().name return check_call([sys.executable, "-m", "virtualenv", str(path.absolute()), "--python=%s" % exact_py])
python
def restore_cmd(argv): """Try to restore a broken virtualenv by reinstalling the same python version on top of it""" if len(argv) < 1: sys.exit('You must provide a valid virtualenv to target') env = argv[0] path = workon_home / env py = path / env_bin_dir / ('python.exe' if windows else 'python') exact_py = py.resolve().name return check_call([sys.executable, "-m", "virtualenv", str(path.absolute()), "--python=%s" % exact_py])
[ "def", "restore_cmd", "(", "argv", ")", ":", "if", "len", "(", "argv", ")", "<", "1", ":", "sys", ".", "exit", "(", "'You must provide a valid virtualenv to target'", ")", "env", "=", "argv", "[", "0", "]", "path", "=", "workon_home", "/", "env", "py", "=", "path", "/", "env_bin_dir", "/", "(", "'python.exe'", "if", "windows", "else", "'python'", ")", "exact_py", "=", "py", ".", "resolve", "(", ")", ".", "name", "return", "check_call", "(", "[", "sys", ".", "executable", ",", "\"-m\"", ",", "\"virtualenv\"", ",", "str", "(", "path", ".", "absolute", "(", ")", ")", ",", "\"--python=%s\"", "%", "exact_py", "]", ")" ]
Try to restore a broken virtualenv by reinstalling the same python version on top of it
[ "Try", "to", "restore", "a", "broken", "virtualenv", "by", "reinstalling", "the", "same", "python", "version", "on", "top", "of", "it" ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L674-L685
9,470
berdario/pew
pew/pew.py
dir_cmd
def dir_cmd(argv): """Print the path for the virtualenv directory""" env = parse_envname(argv, lambda : sys.exit('You must provide a valid virtualenv to target')) print(workon_home / env)
python
def dir_cmd(argv): """Print the path for the virtualenv directory""" env = parse_envname(argv, lambda : sys.exit('You must provide a valid virtualenv to target')) print(workon_home / env)
[ "def", "dir_cmd", "(", "argv", ")", ":", "env", "=", "parse_envname", "(", "argv", ",", "lambda", ":", "sys", ".", "exit", "(", "'You must provide a valid virtualenv to target'", ")", ")", "print", "(", "workon_home", "/", "env", ")" ]
Print the path for the virtualenv directory
[ "Print", "the", "path", "for", "the", "virtualenv", "directory" ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L688-L691
9,471
berdario/pew
pew/pew.py
install_cmd
def install_cmd(argv): '''Use Pythonz to download and build the specified Python version''' installer = InstallCommand() options, versions = installer.parser.parse_args(argv) if len(versions) != 1: installer.parser.print_help() sys.exit(1) else: try: actual_installer = PythonInstaller.get_installer(versions[0], options) return actual_installer.install() except AlreadyInstalledError as e: print(e)
python
def install_cmd(argv): '''Use Pythonz to download and build the specified Python version''' installer = InstallCommand() options, versions = installer.parser.parse_args(argv) if len(versions) != 1: installer.parser.print_help() sys.exit(1) else: try: actual_installer = PythonInstaller.get_installer(versions[0], options) return actual_installer.install() except AlreadyInstalledError as e: print(e)
[ "def", "install_cmd", "(", "argv", ")", ":", "installer", "=", "InstallCommand", "(", ")", "options", ",", "versions", "=", "installer", ".", "parser", ".", "parse_args", "(", "argv", ")", "if", "len", "(", "versions", ")", "!=", "1", ":", "installer", ".", "parser", ".", "print_help", "(", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "try", ":", "actual_installer", "=", "PythonInstaller", ".", "get_installer", "(", "versions", "[", "0", "]", ",", "options", ")", "return", "actual_installer", ".", "install", "(", ")", "except", "AlreadyInstalledError", "as", "e", ":", "print", "(", "e", ")" ]
Use Pythonz to download and build the specified Python version
[ "Use", "Pythonz", "to", "download", "and", "build", "the", "specified", "Python", "version" ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L694-L706
9,472
berdario/pew
pew/pew.py
version_cmd
def version_cmd(argv): """Prints current pew version""" import pkg_resources try: __version__ = pkg_resources.get_distribution('pew').version except pkg_resources.DistributionNotFound: __version__ = 'unknown' print('Setuptools has some issues here, failed to get our own package.', file=sys.stderr) print(__version__)
python
def version_cmd(argv): """Prints current pew version""" import pkg_resources try: __version__ = pkg_resources.get_distribution('pew').version except pkg_resources.DistributionNotFound: __version__ = 'unknown' print('Setuptools has some issues here, failed to get our own package.', file=sys.stderr) print(__version__)
[ "def", "version_cmd", "(", "argv", ")", ":", "import", "pkg_resources", "try", ":", "__version__", "=", "pkg_resources", ".", "get_distribution", "(", "'pew'", ")", ".", "version", "except", "pkg_resources", ".", "DistributionNotFound", ":", "__version__", "=", "'unknown'", "print", "(", "'Setuptools has some issues here, failed to get our own package.'", ",", "file", "=", "sys", ".", "stderr", ")", "print", "(", "__version__", ")" ]
Prints current pew version
[ "Prints", "current", "pew", "version" ]
37d9ff79342336b8ef6437d9a551008be07afe9b
https://github.com/berdario/pew/blob/37d9ff79342336b8ef6437d9a551008be07afe9b/pew/pew.py#L724-L734
9,473
peterbe/premailer
premailer/merge_style.py
csstext_to_pairs
def csstext_to_pairs(csstext): """ csstext_to_pairs takes css text and make it to list of tuple of key,value. """ # The lock is required to avoid ``cssutils`` concurrency # issues documented in issue #65 with csstext_to_pairs._lock: return sorted( [ (prop.name.strip(), format_value(prop)) for prop in cssutils.parseStyle(csstext) ], key=itemgetter(0), )
python
def csstext_to_pairs(csstext): """ csstext_to_pairs takes css text and make it to list of tuple of key,value. """ # The lock is required to avoid ``cssutils`` concurrency # issues documented in issue #65 with csstext_to_pairs._lock: return sorted( [ (prop.name.strip(), format_value(prop)) for prop in cssutils.parseStyle(csstext) ], key=itemgetter(0), )
[ "def", "csstext_to_pairs", "(", "csstext", ")", ":", "# The lock is required to avoid ``cssutils`` concurrency", "# issues documented in issue #65", "with", "csstext_to_pairs", ".", "_lock", ":", "return", "sorted", "(", "[", "(", "prop", ".", "name", ".", "strip", "(", ")", ",", "format_value", "(", "prop", ")", ")", "for", "prop", "in", "cssutils", ".", "parseStyle", "(", "csstext", ")", "]", ",", "key", "=", "itemgetter", "(", "0", ")", ",", ")" ]
csstext_to_pairs takes css text and make it to list of tuple of key,value.
[ "csstext_to_pairs", "takes", "css", "text", "and", "make", "it", "to", "list", "of", "tuple", "of", "key", "value", "." ]
4d74656fb12e8e44683fa787ae71c0735282376b
https://github.com/peterbe/premailer/blob/4d74656fb12e8e44683fa787ae71c0735282376b/premailer/merge_style.py#L23-L37
9,474
peterbe/premailer
premailer/merge_style.py
merge_styles
def merge_styles(inline_style, new_styles, classes, remove_unset_properties=False): """ This will merge all new styles where the order is important The last one will override the first When that is done it will apply old inline style again The old inline style is always important and override all new ones. The inline style must be valid. Args: inline_style(str): the old inline style of the element if there is one new_styles: a list of new styles, each element should be a list of tuple classes: a list of classes which maps new_styles, important! remove_unset_properties(bool): Allow us to remove certain CSS properties with rules that set their value to 'unset' Returns: str: the final style """ # building classes styles = OrderedDict([("", OrderedDict())]) for pc in set(classes): styles[pc] = OrderedDict() for i, style in enumerate(new_styles): for k, v in style: styles[classes[i]][k] = v # keep always the old inline style if inline_style: # inline should be a declaration list as I understand # ie property-name:property-value;... for k, v in csstext_to_pairs(inline_style): styles[""][k] = v normal_styles = [] pseudo_styles = [] for pseudoclass, kv in styles.items(): if remove_unset_properties: # Remove rules that we were going to have value 'unset' because # they effectively are the same as not saying anything about the # property when inlined kv = OrderedDict( (k, v) for (k, v) in kv.items() if not v.lower() == "unset" ) if not kv: continue if pseudoclass: pseudo_styles.append( "%s{%s}" % (pseudoclass, "; ".join("%s:%s" % (k, v) for k, v in kv.items())) ) else: normal_styles.append("; ".join("%s:%s" % (k, v) for k, v in kv.items())) if pseudo_styles: # if we do or code thing correct this should not happen # inline style definition: declarations without braces all_styles = ( (["{%s}" % "".join(normal_styles)] + pseudo_styles) if normal_styles else pseudo_styles ) else: all_styles = normal_styles return " ".join(all_styles).strip()
python
def merge_styles(inline_style, new_styles, classes, remove_unset_properties=False): """ This will merge all new styles where the order is important The last one will override the first When that is done it will apply old inline style again The old inline style is always important and override all new ones. The inline style must be valid. Args: inline_style(str): the old inline style of the element if there is one new_styles: a list of new styles, each element should be a list of tuple classes: a list of classes which maps new_styles, important! remove_unset_properties(bool): Allow us to remove certain CSS properties with rules that set their value to 'unset' Returns: str: the final style """ # building classes styles = OrderedDict([("", OrderedDict())]) for pc in set(classes): styles[pc] = OrderedDict() for i, style in enumerate(new_styles): for k, v in style: styles[classes[i]][k] = v # keep always the old inline style if inline_style: # inline should be a declaration list as I understand # ie property-name:property-value;... for k, v in csstext_to_pairs(inline_style): styles[""][k] = v normal_styles = [] pseudo_styles = [] for pseudoclass, kv in styles.items(): if remove_unset_properties: # Remove rules that we were going to have value 'unset' because # they effectively are the same as not saying anything about the # property when inlined kv = OrderedDict( (k, v) for (k, v) in kv.items() if not v.lower() == "unset" ) if not kv: continue if pseudoclass: pseudo_styles.append( "%s{%s}" % (pseudoclass, "; ".join("%s:%s" % (k, v) for k, v in kv.items())) ) else: normal_styles.append("; ".join("%s:%s" % (k, v) for k, v in kv.items())) if pseudo_styles: # if we do or code thing correct this should not happen # inline style definition: declarations without braces all_styles = ( (["{%s}" % "".join(normal_styles)] + pseudo_styles) if normal_styles else pseudo_styles ) else: all_styles = normal_styles return " ".join(all_styles).strip()
[ "def", "merge_styles", "(", "inline_style", ",", "new_styles", ",", "classes", ",", "remove_unset_properties", "=", "False", ")", ":", "# building classes", "styles", "=", "OrderedDict", "(", "[", "(", "\"\"", ",", "OrderedDict", "(", ")", ")", "]", ")", "for", "pc", "in", "set", "(", "classes", ")", ":", "styles", "[", "pc", "]", "=", "OrderedDict", "(", ")", "for", "i", ",", "style", "in", "enumerate", "(", "new_styles", ")", ":", "for", "k", ",", "v", "in", "style", ":", "styles", "[", "classes", "[", "i", "]", "]", "[", "k", "]", "=", "v", "# keep always the old inline style", "if", "inline_style", ":", "# inline should be a declaration list as I understand", "# ie property-name:property-value;...", "for", "k", ",", "v", "in", "csstext_to_pairs", "(", "inline_style", ")", ":", "styles", "[", "\"\"", "]", "[", "k", "]", "=", "v", "normal_styles", "=", "[", "]", "pseudo_styles", "=", "[", "]", "for", "pseudoclass", ",", "kv", "in", "styles", ".", "items", "(", ")", ":", "if", "remove_unset_properties", ":", "# Remove rules that we were going to have value 'unset' because", "# they effectively are the same as not saying anything about the", "# property when inlined", "kv", "=", "OrderedDict", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "kv", ".", "items", "(", ")", "if", "not", "v", ".", "lower", "(", ")", "==", "\"unset\"", ")", "if", "not", "kv", ":", "continue", "if", "pseudoclass", ":", "pseudo_styles", ".", "append", "(", "\"%s{%s}\"", "%", "(", "pseudoclass", ",", "\"; \"", ".", "join", "(", "\"%s:%s\"", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "kv", ".", "items", "(", ")", ")", ")", ")", "else", ":", "normal_styles", ".", "append", "(", "\"; \"", ".", "join", "(", "\"%s:%s\"", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "kv", ".", "items", "(", ")", ")", ")", "if", "pseudo_styles", ":", "# if we do or code thing correct this should not happen", "# inline style definition: declarations without braces", "all_styles", "=", "(", "(", "[", "\"{%s}\"", "%", "\"\"", ".", "join", "(", "normal_styles", ")", "]", "+", "pseudo_styles", ")", "if", "normal_styles", "else", "pseudo_styles", ")", "else", ":", "all_styles", "=", "normal_styles", "return", "\" \"", ".", "join", "(", "all_styles", ")", ".", "strip", "(", ")" ]
This will merge all new styles where the order is important The last one will override the first When that is done it will apply old inline style again The old inline style is always important and override all new ones. The inline style must be valid. Args: inline_style(str): the old inline style of the element if there is one new_styles: a list of new styles, each element should be a list of tuple classes: a list of classes which maps new_styles, important! remove_unset_properties(bool): Allow us to remove certain CSS properties with rules that set their value to 'unset' Returns: str: the final style
[ "This", "will", "merge", "all", "new", "styles", "where", "the", "order", "is", "important", "The", "last", "one", "will", "override", "the", "first", "When", "that", "is", "done", "it", "will", "apply", "old", "inline", "style", "again", "The", "old", "inline", "style", "is", "always", "important", "and", "override", "all", "new", "ones", ".", "The", "inline", "style", "must", "be", "valid", "." ]
4d74656fb12e8e44683fa787ae71c0735282376b
https://github.com/peterbe/premailer/blob/4d74656fb12e8e44683fa787ae71c0735282376b/premailer/merge_style.py#L43-L110
9,475
peterbe/premailer
premailer/premailer.py
make_important
def make_important(bulk): """makes every property in a string !important. """ return ";".join( "%s !important" % p if not p.endswith("!important") else p for p in bulk.split(";") )
python
def make_important(bulk): """makes every property in a string !important. """ return ";".join( "%s !important" % p if not p.endswith("!important") else p for p in bulk.split(";") )
[ "def", "make_important", "(", "bulk", ")", ":", "return", "\";\"", ".", "join", "(", "\"%s !important\"", "%", "p", "if", "not", "p", ".", "endswith", "(", "\"!important\"", ")", "else", "p", "for", "p", "in", "bulk", ".", "split", "(", "\";\"", ")", ")" ]
makes every property in a string !important.
[ "makes", "every", "property", "in", "a", "string", "!important", "." ]
4d74656fb12e8e44683fa787ae71c0735282376b
https://github.com/peterbe/premailer/blob/4d74656fb12e8e44683fa787ae71c0735282376b/premailer/premailer.py#L53-L59
9,476
peterbe/premailer
premailer/premailer.py
capitalize_float_margin
def capitalize_float_margin(css_body): """Capitalize float and margin CSS property names """ def _capitalize_property(match): return "{0}:{1}{2}".format( match.group("property").capitalize(), match.group("value"), match.group("terminator"), ) return _lowercase_margin_float_rule.sub(_capitalize_property, css_body)
python
def capitalize_float_margin(css_body): """Capitalize float and margin CSS property names """ def _capitalize_property(match): return "{0}:{1}{2}".format( match.group("property").capitalize(), match.group("value"), match.group("terminator"), ) return _lowercase_margin_float_rule.sub(_capitalize_property, css_body)
[ "def", "capitalize_float_margin", "(", "css_body", ")", ":", "def", "_capitalize_property", "(", "match", ")", ":", "return", "\"{0}:{1}{2}\"", ".", "format", "(", "match", ".", "group", "(", "\"property\"", ")", ".", "capitalize", "(", ")", ",", "match", ".", "group", "(", "\"value\"", ")", ",", "match", ".", "group", "(", "\"terminator\"", ")", ",", ")", "return", "_lowercase_margin_float_rule", ".", "sub", "(", "_capitalize_property", ",", "css_body", ")" ]
Capitalize float and margin CSS property names
[ "Capitalize", "float", "and", "margin", "CSS", "property", "names" ]
4d74656fb12e8e44683fa787ae71c0735282376b
https://github.com/peterbe/premailer/blob/4d74656fb12e8e44683fa787ae71c0735282376b/premailer/premailer.py#L100-L111
9,477
peterbe/premailer
premailer/premailer.py
Premailer._load_external
def _load_external(self, url): """loads an external stylesheet from a remote url or local path """ if url.startswith("//"): # then we have to rely on the base_url if self.base_url and "https://" in self.base_url: url = "https:" + url else: url = "http:" + url if url.startswith("http://") or url.startswith("https://"): css_body = self._load_external_url(url) else: stylefile = url if not os.path.isabs(stylefile): stylefile = os.path.abspath( os.path.join(self.base_path or "", stylefile) ) if os.path.exists(stylefile): with codecs.open(stylefile, encoding="utf-8") as f: css_body = f.read() elif self.base_url: url = urljoin(self.base_url, url) return self._load_external(url) else: raise ExternalNotFoundError(stylefile) return css_body
python
def _load_external(self, url): """loads an external stylesheet from a remote url or local path """ if url.startswith("//"): # then we have to rely on the base_url if self.base_url and "https://" in self.base_url: url = "https:" + url else: url = "http:" + url if url.startswith("http://") or url.startswith("https://"): css_body = self._load_external_url(url) else: stylefile = url if not os.path.isabs(stylefile): stylefile = os.path.abspath( os.path.join(self.base_path or "", stylefile) ) if os.path.exists(stylefile): with codecs.open(stylefile, encoding="utf-8") as f: css_body = f.read() elif self.base_url: url = urljoin(self.base_url, url) return self._load_external(url) else: raise ExternalNotFoundError(stylefile) return css_body
[ "def", "_load_external", "(", "self", ",", "url", ")", ":", "if", "url", ".", "startswith", "(", "\"//\"", ")", ":", "# then we have to rely on the base_url", "if", "self", ".", "base_url", "and", "\"https://\"", "in", "self", ".", "base_url", ":", "url", "=", "\"https:\"", "+", "url", "else", ":", "url", "=", "\"http:\"", "+", "url", "if", "url", ".", "startswith", "(", "\"http://\"", ")", "or", "url", ".", "startswith", "(", "\"https://\"", ")", ":", "css_body", "=", "self", ".", "_load_external_url", "(", "url", ")", "else", ":", "stylefile", "=", "url", "if", "not", "os", ".", "path", ".", "isabs", "(", "stylefile", ")", ":", "stylefile", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "base_path", "or", "\"\"", ",", "stylefile", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "stylefile", ")", ":", "with", "codecs", ".", "open", "(", "stylefile", ",", "encoding", "=", "\"utf-8\"", ")", "as", "f", ":", "css_body", "=", "f", ".", "read", "(", ")", "elif", "self", ".", "base_url", ":", "url", "=", "urljoin", "(", "self", ".", "base_url", ",", "url", ")", "return", "self", ".", "_load_external", "(", "url", ")", "else", ":", "raise", "ExternalNotFoundError", "(", "stylefile", ")", "return", "css_body" ]
loads an external stylesheet from a remote url or local path
[ "loads", "an", "external", "stylesheet", "from", "a", "remote", "url", "or", "local", "path" ]
4d74656fb12e8e44683fa787ae71c0735282376b
https://github.com/peterbe/premailer/blob/4d74656fb12e8e44683fa787ae71c0735282376b/premailer/premailer.py#L546-L573
9,478
peterbe/premailer
premailer/premailer.py
Premailer._css_rules_to_string
def _css_rules_to_string(self, rules): """given a list of css rules returns a css string """ lines = [] for item in rules: if isinstance(item, tuple): k, v = item lines.append("%s {%s}" % (k, make_important(v))) # media rule else: for rule in item.cssRules: if isinstance( rule, ( cssutils.css.csscomment.CSSComment, cssutils.css.cssunknownrule.CSSUnknownRule, ), ): continue for key in rule.style.keys(): rule.style[key] = ( rule.style.getPropertyValue(key, False), "!important", ) lines.append(item.cssText) return "\n".join(lines)
python
def _css_rules_to_string(self, rules): """given a list of css rules returns a css string """ lines = [] for item in rules: if isinstance(item, tuple): k, v = item lines.append("%s {%s}" % (k, make_important(v))) # media rule else: for rule in item.cssRules: if isinstance( rule, ( cssutils.css.csscomment.CSSComment, cssutils.css.cssunknownrule.CSSUnknownRule, ), ): continue for key in rule.style.keys(): rule.style[key] = ( rule.style.getPropertyValue(key, False), "!important", ) lines.append(item.cssText) return "\n".join(lines)
[ "def", "_css_rules_to_string", "(", "self", ",", "rules", ")", ":", "lines", "=", "[", "]", "for", "item", "in", "rules", ":", "if", "isinstance", "(", "item", ",", "tuple", ")", ":", "k", ",", "v", "=", "item", "lines", ".", "append", "(", "\"%s {%s}\"", "%", "(", "k", ",", "make_important", "(", "v", ")", ")", ")", "# media rule", "else", ":", "for", "rule", "in", "item", ".", "cssRules", ":", "if", "isinstance", "(", "rule", ",", "(", "cssutils", ".", "css", ".", "csscomment", ".", "CSSComment", ",", "cssutils", ".", "css", ".", "cssunknownrule", ".", "CSSUnknownRule", ",", ")", ",", ")", ":", "continue", "for", "key", "in", "rule", ".", "style", ".", "keys", "(", ")", ":", "rule", ".", "style", "[", "key", "]", "=", "(", "rule", ".", "style", ".", "getPropertyValue", "(", "key", ",", "False", ")", ",", "\"!important\"", ",", ")", "lines", ".", "append", "(", "item", ".", "cssText", ")", "return", "\"\\n\"", ".", "join", "(", "lines", ")" ]
given a list of css rules returns a css string
[ "given", "a", "list", "of", "css", "rules", "returns", "a", "css", "string" ]
4d74656fb12e8e44683fa787ae71c0735282376b
https://github.com/peterbe/premailer/blob/4d74656fb12e8e44683fa787ae71c0735282376b/premailer/premailer.py#L631-L656
9,479
vatlab/SoS
src/sos/workers.py
WorkerManager.check_workers
def check_workers(self): '''Kill workers that have been pending for a while and check if all workers are alive. ''' if time.time() - self._worker_alive_time > 5: self._worker_alive_time = time.time() # join processes if they are now gone, it should not do anything bad # if the process is still running [worker.join() for worker in self._workers if not worker.is_alive()] self._workers = [ worker for worker in self._workers if worker.is_alive() ] if len(self._workers) < self._num_workers: raise ProcessKilled('One of the workers has been killed.')
python
def check_workers(self): '''Kill workers that have been pending for a while and check if all workers are alive. ''' if time.time() - self._worker_alive_time > 5: self._worker_alive_time = time.time() # join processes if they are now gone, it should not do anything bad # if the process is still running [worker.join() for worker in self._workers if not worker.is_alive()] self._workers = [ worker for worker in self._workers if worker.is_alive() ] if len(self._workers) < self._num_workers: raise ProcessKilled('One of the workers has been killed.')
[ "def", "check_workers", "(", "self", ")", ":", "if", "time", ".", "time", "(", ")", "-", "self", ".", "_worker_alive_time", ">", "5", ":", "self", ".", "_worker_alive_time", "=", "time", ".", "time", "(", ")", "# join processes if they are now gone, it should not do anything bad", "# if the process is still running", "[", "worker", ".", "join", "(", ")", "for", "worker", "in", "self", ".", "_workers", "if", "not", "worker", ".", "is_alive", "(", ")", "]", "self", ".", "_workers", "=", "[", "worker", "for", "worker", "in", "self", ".", "_workers", "if", "worker", ".", "is_alive", "(", ")", "]", "if", "len", "(", "self", ".", "_workers", ")", "<", "self", ".", "_num_workers", ":", "raise", "ProcessKilled", "(", "'One of the workers has been killed.'", ")" ]
Kill workers that have been pending for a while and check if all workers are alive.
[ "Kill", "workers", "that", "have", "been", "pending", "for", "a", "while", "and", "check", "if", "all", "workers", "are", "alive", "." ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/workers.py#L515-L527
9,480
vatlab/SoS
src/sos/workers.py
WorkerManager.kill_all
def kill_all(self): '''Kill all workers''' while self._num_workers > 0 and self._worker_backend_socket.poll(1000): msg = self._worker_backend_socket.recv_pyobj() self._worker_backend_socket.send_pyobj(None) self._num_workers -= 1 self.report(f'Kill {msg[1:]}') # join all processes [worker.join() for worker in self._workers]
python
def kill_all(self): '''Kill all workers''' while self._num_workers > 0 and self._worker_backend_socket.poll(1000): msg = self._worker_backend_socket.recv_pyobj() self._worker_backend_socket.send_pyobj(None) self._num_workers -= 1 self.report(f'Kill {msg[1:]}') # join all processes [worker.join() for worker in self._workers]
[ "def", "kill_all", "(", "self", ")", ":", "while", "self", ".", "_num_workers", ">", "0", "and", "self", ".", "_worker_backend_socket", ".", "poll", "(", "1000", ")", ":", "msg", "=", "self", ".", "_worker_backend_socket", ".", "recv_pyobj", "(", ")", "self", ".", "_worker_backend_socket", ".", "send_pyobj", "(", "None", ")", "self", ".", "_num_workers", "-=", "1", "self", ".", "report", "(", "f'Kill {msg[1:]}'", ")", "# join all processes", "[", "worker", ".", "join", "(", ")", "for", "worker", "in", "self", ".", "_workers", "]" ]
Kill all workers
[ "Kill", "all", "workers" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/workers.py#L529-L537
9,481
vatlab/SoS
src/sos/targets_python.py
Py_Module._install
def _install(self, name, autoinstall): '''Check existence of Python module and install it using command pip install if necessary.''' import importlib import pkg_resources spam_spec = importlib.util.find_spec(name) reinstall = False if spam_spec is not None: if self._version: mod = importlib.__import__(name) if hasattr(mod, '__version__'): ver = mod.__version__ else: try: ver = pkg_resources.get_distribution(name).version except Exception as e: env.logger.debug( f'Failed to get version of {name}: {e}') env.logger.debug( f'Comparing exiting version {ver} against requested version {self._version}' ) if self._version.startswith( '==') and pkg_resources.parse_version( ver) == pkg_resources.parse_version( self._version[2:]): pass elif self._version.startswith( '<=') and pkg_resources.parse_version( ver) <= pkg_resources.parse_version( self._version[2:]): pass elif self._version.startswith( '<') and not self._version.startswith( '<=') and pkg_resources.parse_version( ver) < pkg_resources.parse_version( self._version[1:]): pass elif self._version.startswith( '>=') and pkg_resources.parse_version( ver) >= pkg_resources.parse_version( self._version[2:]): pass # the case of > elif self._version.startswith( '>') and not self._version.startswith( '>=') and pkg_resources.parse_version( ver) > pkg_resources.parse_version( self._version[1:]): pass elif self._version.startswith( '!=') and pkg_resources.parse_version( ver) != pkg_resources.parse_version( self._version[2:]): pass elif self._version[0] not in ( '=', '>', '<', '!') and pkg_resources.parse_version( ver) == pkg_resources.parse_version(self._version): pass else: env.logger.warning( f'Version {ver} of installed {name} does not match specified version {self._version}.' ) reinstall = True if spam_spec and not reinstall: return True if not autoinstall: return False # try to install it? import subprocess cmd = ['pip', 'install'] + ([] if self._version else ['-U']) + [ self._module + (self._version if self._version else '') if self._autoinstall is True else self._autoinstall ] env.logger.info( f'Installing python module {name} with command {" ".join(cmd)}') ret = subprocess.call(cmd) if reinstall: import sys importlib.reload(sys.modules[name]) # try to check version return ret == 0 and self._install(name, False)
python
def _install(self, name, autoinstall): '''Check existence of Python module and install it using command pip install if necessary.''' import importlib import pkg_resources spam_spec = importlib.util.find_spec(name) reinstall = False if spam_spec is not None: if self._version: mod = importlib.__import__(name) if hasattr(mod, '__version__'): ver = mod.__version__ else: try: ver = pkg_resources.get_distribution(name).version except Exception as e: env.logger.debug( f'Failed to get version of {name}: {e}') env.logger.debug( f'Comparing exiting version {ver} against requested version {self._version}' ) if self._version.startswith( '==') and pkg_resources.parse_version( ver) == pkg_resources.parse_version( self._version[2:]): pass elif self._version.startswith( '<=') and pkg_resources.parse_version( ver) <= pkg_resources.parse_version( self._version[2:]): pass elif self._version.startswith( '<') and not self._version.startswith( '<=') and pkg_resources.parse_version( ver) < pkg_resources.parse_version( self._version[1:]): pass elif self._version.startswith( '>=') and pkg_resources.parse_version( ver) >= pkg_resources.parse_version( self._version[2:]): pass # the case of > elif self._version.startswith( '>') and not self._version.startswith( '>=') and pkg_resources.parse_version( ver) > pkg_resources.parse_version( self._version[1:]): pass elif self._version.startswith( '!=') and pkg_resources.parse_version( ver) != pkg_resources.parse_version( self._version[2:]): pass elif self._version[0] not in ( '=', '>', '<', '!') and pkg_resources.parse_version( ver) == pkg_resources.parse_version(self._version): pass else: env.logger.warning( f'Version {ver} of installed {name} does not match specified version {self._version}.' ) reinstall = True if spam_spec and not reinstall: return True if not autoinstall: return False # try to install it? import subprocess cmd = ['pip', 'install'] + ([] if self._version else ['-U']) + [ self._module + (self._version if self._version else '') if self._autoinstall is True else self._autoinstall ] env.logger.info( f'Installing python module {name} with command {" ".join(cmd)}') ret = subprocess.call(cmd) if reinstall: import sys importlib.reload(sys.modules[name]) # try to check version return ret == 0 and self._install(name, False)
[ "def", "_install", "(", "self", ",", "name", ",", "autoinstall", ")", ":", "import", "importlib", "import", "pkg_resources", "spam_spec", "=", "importlib", ".", "util", ".", "find_spec", "(", "name", ")", "reinstall", "=", "False", "if", "spam_spec", "is", "not", "None", ":", "if", "self", ".", "_version", ":", "mod", "=", "importlib", ".", "__import__", "(", "name", ")", "if", "hasattr", "(", "mod", ",", "'__version__'", ")", ":", "ver", "=", "mod", ".", "__version__", "else", ":", "try", ":", "ver", "=", "pkg_resources", ".", "get_distribution", "(", "name", ")", ".", "version", "except", "Exception", "as", "e", ":", "env", ".", "logger", ".", "debug", "(", "f'Failed to get version of {name}: {e}'", ")", "env", ".", "logger", ".", "debug", "(", "f'Comparing exiting version {ver} against requested version {self._version}'", ")", "if", "self", ".", "_version", ".", "startswith", "(", "'=='", ")", "and", "pkg_resources", ".", "parse_version", "(", "ver", ")", "==", "pkg_resources", ".", "parse_version", "(", "self", ".", "_version", "[", "2", ":", "]", ")", ":", "pass", "elif", "self", ".", "_version", ".", "startswith", "(", "'<='", ")", "and", "pkg_resources", ".", "parse_version", "(", "ver", ")", "<=", "pkg_resources", ".", "parse_version", "(", "self", ".", "_version", "[", "2", ":", "]", ")", ":", "pass", "elif", "self", ".", "_version", ".", "startswith", "(", "'<'", ")", "and", "not", "self", ".", "_version", ".", "startswith", "(", "'<='", ")", "and", "pkg_resources", ".", "parse_version", "(", "ver", ")", "<", "pkg_resources", ".", "parse_version", "(", "self", ".", "_version", "[", "1", ":", "]", ")", ":", "pass", "elif", "self", ".", "_version", ".", "startswith", "(", "'>='", ")", "and", "pkg_resources", ".", "parse_version", "(", "ver", ")", ">=", "pkg_resources", ".", "parse_version", "(", "self", ".", "_version", "[", "2", ":", "]", ")", ":", "pass", "# the case of >", "elif", "self", ".", "_version", ".", "startswith", "(", "'>'", ")", "and", "not", "self", ".", "_version", ".", "startswith", "(", "'>='", ")", "and", "pkg_resources", ".", "parse_version", "(", "ver", ")", ">", "pkg_resources", ".", "parse_version", "(", "self", ".", "_version", "[", "1", ":", "]", ")", ":", "pass", "elif", "self", ".", "_version", ".", "startswith", "(", "'!='", ")", "and", "pkg_resources", ".", "parse_version", "(", "ver", ")", "!=", "pkg_resources", ".", "parse_version", "(", "self", ".", "_version", "[", "2", ":", "]", ")", ":", "pass", "elif", "self", ".", "_version", "[", "0", "]", "not", "in", "(", "'='", ",", "'>'", ",", "'<'", ",", "'!'", ")", "and", "pkg_resources", ".", "parse_version", "(", "ver", ")", "==", "pkg_resources", ".", "parse_version", "(", "self", ".", "_version", ")", ":", "pass", "else", ":", "env", ".", "logger", ".", "warning", "(", "f'Version {ver} of installed {name} does not match specified version {self._version}.'", ")", "reinstall", "=", "True", "if", "spam_spec", "and", "not", "reinstall", ":", "return", "True", "if", "not", "autoinstall", ":", "return", "False", "# try to install it?", "import", "subprocess", "cmd", "=", "[", "'pip'", ",", "'install'", "]", "+", "(", "[", "]", "if", "self", ".", "_version", "else", "[", "'-U'", "]", ")", "+", "[", "self", ".", "_module", "+", "(", "self", ".", "_version", "if", "self", ".", "_version", "else", "''", ")", "if", "self", ".", "_autoinstall", "is", "True", "else", "self", ".", "_autoinstall", "]", "env", ".", "logger", ".", "info", "(", "f'Installing python module {name} with command {\" \".join(cmd)}'", ")", "ret", "=", "subprocess", ".", "call", "(", "cmd", ")", "if", "reinstall", ":", "import", "sys", "importlib", ".", "reload", "(", "sys", ".", "modules", "[", "name", "]", ")", "# try to check version", "return", "ret", "==", "0", "and", "self", ".", "_install", "(", "name", ",", "False", ")" ]
Check existence of Python module and install it using command pip install if necessary.
[ "Check", "existence", "of", "Python", "module", "and", "install", "it", "using", "command", "pip", "install", "if", "necessary", "." ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets_python.py#L38-L118
9,482
vatlab/SoS
src/sos/task_executor.py
execute_task
def execute_task(task_id, verbosity=None, runmode='run', sigmode=None, monitor_interval=5, resource_monitor_interval=60): '''Execute single or master task, return a dictionary''' tf = TaskFile(task_id) # this will automatically create a pulse file tf.status = 'running' # write result file try: signal.signal(signal.SIGTERM, signal_handler) res = _execute_task(task_id, verbosity, runmode, sigmode, monitor_interval, resource_monitor_interval) except KeyboardInterrupt: tf.status = 'aborted' raise except ProcessKilled: tf.status = 'aborted' raise ProcessKilled('task interrupted') finally: signal.signal(signal.SIGTERM, signal.SIG_DFL) if res['ret_code'] != 0 and 'exception' in res: with open( os.path.join( os.path.expanduser('~'), '.sos', 'tasks', task_id + '.err'), 'a') as err: err.write(f'Task {task_id} exits with code {res["ret_code"]}') if res.get('skipped', False): # a special mode for skipped to set running time to zero tf.status = 'skipped' else: tf.add_outputs() sig = res.get('signature', {}) res.pop('signature', None) tf.add_result(res) if sig: tf.add_signature(sig) # **after** result file is created, remove other files # # NOTE: if the pulse is not removed. When another sos process checkes # the task is started very quickly so the task has satus 'pending', # the task might be considered already running. tf.status = 'completed' if res['ret_code'] == 0 else 'failed' return res['ret_code']
python
def execute_task(task_id, verbosity=None, runmode='run', sigmode=None, monitor_interval=5, resource_monitor_interval=60): '''Execute single or master task, return a dictionary''' tf = TaskFile(task_id) # this will automatically create a pulse file tf.status = 'running' # write result file try: signal.signal(signal.SIGTERM, signal_handler) res = _execute_task(task_id, verbosity, runmode, sigmode, monitor_interval, resource_monitor_interval) except KeyboardInterrupt: tf.status = 'aborted' raise except ProcessKilled: tf.status = 'aborted' raise ProcessKilled('task interrupted') finally: signal.signal(signal.SIGTERM, signal.SIG_DFL) if res['ret_code'] != 0 and 'exception' in res: with open( os.path.join( os.path.expanduser('~'), '.sos', 'tasks', task_id + '.err'), 'a') as err: err.write(f'Task {task_id} exits with code {res["ret_code"]}') if res.get('skipped', False): # a special mode for skipped to set running time to zero tf.status = 'skipped' else: tf.add_outputs() sig = res.get('signature', {}) res.pop('signature', None) tf.add_result(res) if sig: tf.add_signature(sig) # **after** result file is created, remove other files # # NOTE: if the pulse is not removed. When another sos process checkes # the task is started very quickly so the task has satus 'pending', # the task might be considered already running. tf.status = 'completed' if res['ret_code'] == 0 else 'failed' return res['ret_code']
[ "def", "execute_task", "(", "task_id", ",", "verbosity", "=", "None", ",", "runmode", "=", "'run'", ",", "sigmode", "=", "None", ",", "monitor_interval", "=", "5", ",", "resource_monitor_interval", "=", "60", ")", ":", "tf", "=", "TaskFile", "(", "task_id", ")", "# this will automatically create a pulse file", "tf", ".", "status", "=", "'running'", "# write result file", "try", ":", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "signal_handler", ")", "res", "=", "_execute_task", "(", "task_id", ",", "verbosity", ",", "runmode", ",", "sigmode", ",", "monitor_interval", ",", "resource_monitor_interval", ")", "except", "KeyboardInterrupt", ":", "tf", ".", "status", "=", "'aborted'", "raise", "except", "ProcessKilled", ":", "tf", ".", "status", "=", "'aborted'", "raise", "ProcessKilled", "(", "'task interrupted'", ")", "finally", ":", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "signal", ".", "SIG_DFL", ")", "if", "res", "[", "'ret_code'", "]", "!=", "0", "and", "'exception'", "in", "res", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ",", "'.sos'", ",", "'tasks'", ",", "task_id", "+", "'.err'", ")", ",", "'a'", ")", "as", "err", ":", "err", ".", "write", "(", "f'Task {task_id} exits with code {res[\"ret_code\"]}'", ")", "if", "res", ".", "get", "(", "'skipped'", ",", "False", ")", ":", "# a special mode for skipped to set running time to zero", "tf", ".", "status", "=", "'skipped'", "else", ":", "tf", ".", "add_outputs", "(", ")", "sig", "=", "res", ".", "get", "(", "'signature'", ",", "{", "}", ")", "res", ".", "pop", "(", "'signature'", ",", "None", ")", "tf", ".", "add_result", "(", "res", ")", "if", "sig", ":", "tf", ".", "add_signature", "(", "sig", ")", "# **after** result file is created, remove other files", "#", "# NOTE: if the pulse is not removed. When another sos process checkes", "# the task is started very quickly so the task has satus 'pending',", "# the task might be considered already running.", "tf", ".", "status", "=", "'completed'", "if", "res", "[", "'ret_code'", "]", "==", "0", "else", "'failed'", "return", "res", "[", "'ret_code'", "]" ]
Execute single or master task, return a dictionary
[ "Execute", "single", "or", "master", "task", "return", "a", "dictionary" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/task_executor.py#L129-L178
9,483
vatlab/SoS
src/sos/targets.py
textMD5
def textMD5(text): '''Get md5 of a piece of text''' m = hash_md5() if isinstance(text, str): m.update(text.encode()) else: m.update(text) return m.hexdigest()
python
def textMD5(text): '''Get md5 of a piece of text''' m = hash_md5() if isinstance(text, str): m.update(text.encode()) else: m.update(text) return m.hexdigest()
[ "def", "textMD5", "(", "text", ")", ":", "m", "=", "hash_md5", "(", ")", "if", "isinstance", "(", "text", ",", "str", ")", ":", "m", ".", "update", "(", "text", ".", "encode", "(", ")", ")", "else", ":", "m", ".", "update", "(", "text", ")", "return", "m", ".", "hexdigest", "(", ")" ]
Get md5 of a piece of text
[ "Get", "md5", "of", "a", "piece", "of", "text" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L89-L96
9,484
vatlab/SoS
src/sos/targets.py
objectMD5
def objectMD5(obj): '''Get md5 of an object''' if hasattr(obj, 'target_name'): return obj.target_name() try: return textMD5(pickle.dumps(obj)) except: return ''
python
def objectMD5(obj): '''Get md5 of an object''' if hasattr(obj, 'target_name'): return obj.target_name() try: return textMD5(pickle.dumps(obj)) except: return ''
[ "def", "objectMD5", "(", "obj", ")", ":", "if", "hasattr", "(", "obj", ",", "'target_name'", ")", ":", "return", "obj", ".", "target_name", "(", ")", "try", ":", "return", "textMD5", "(", "pickle", ".", "dumps", "(", "obj", ")", ")", "except", ":", "return", "''" ]
Get md5 of an object
[ "Get", "md5", "of", "an", "object" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L99-L106
9,485
vatlab/SoS
src/sos/targets.py
fileMD5
def fileMD5(filename, partial=True): '''Calculate partial MD5, basically the first and last 8M of the file for large files. This should signicicantly reduce the time spent on the creation and comparison of file signature when dealing with large bioinformat ics datasets. ''' filesize = os.path.getsize(filename) # calculate md5 for specified file md5 = hash_md5() block_size = 2**20 # buffer of 1M try: # 2**24 = 16M if (not partial) or filesize < 2**24: with open(filename, 'rb') as f: while True: data = f.read(block_size) if not data: break md5.update(data) else: count = 16 # otherwise, use the first and last 8M with open(filename, 'rb') as f: while True: data = f.read(block_size) count -= 1 if count == 8: # 2**23 = 8M f.seek(-2**23, 2) if not data or count == 0: break md5.update(data) except IOError as e: sys.exit(f'Failed to read {filename}: {e}') return md5.hexdigest()
python
def fileMD5(filename, partial=True): '''Calculate partial MD5, basically the first and last 8M of the file for large files. This should signicicantly reduce the time spent on the creation and comparison of file signature when dealing with large bioinformat ics datasets. ''' filesize = os.path.getsize(filename) # calculate md5 for specified file md5 = hash_md5() block_size = 2**20 # buffer of 1M try: # 2**24 = 16M if (not partial) or filesize < 2**24: with open(filename, 'rb') as f: while True: data = f.read(block_size) if not data: break md5.update(data) else: count = 16 # otherwise, use the first and last 8M with open(filename, 'rb') as f: while True: data = f.read(block_size) count -= 1 if count == 8: # 2**23 = 8M f.seek(-2**23, 2) if not data or count == 0: break md5.update(data) except IOError as e: sys.exit(f'Failed to read {filename}: {e}') return md5.hexdigest()
[ "def", "fileMD5", "(", "filename", ",", "partial", "=", "True", ")", ":", "filesize", "=", "os", ".", "path", ".", "getsize", "(", "filename", ")", "# calculate md5 for specified file", "md5", "=", "hash_md5", "(", ")", "block_size", "=", "2", "**", "20", "# buffer of 1M", "try", ":", "# 2**24 = 16M", "if", "(", "not", "partial", ")", "or", "filesize", "<", "2", "**", "24", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "while", "True", ":", "data", "=", "f", ".", "read", "(", "block_size", ")", "if", "not", "data", ":", "break", "md5", ".", "update", "(", "data", ")", "else", ":", "count", "=", "16", "# otherwise, use the first and last 8M", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "while", "True", ":", "data", "=", "f", ".", "read", "(", "block_size", ")", "count", "-=", "1", "if", "count", "==", "8", ":", "# 2**23 = 8M", "f", ".", "seek", "(", "-", "2", "**", "23", ",", "2", ")", "if", "not", "data", "or", "count", "==", "0", ":", "break", "md5", ".", "update", "(", "data", ")", "except", "IOError", "as", "e", ":", "sys", ".", "exit", "(", "f'Failed to read {filename}: {e}'", ")", "return", "md5", ".", "hexdigest", "(", ")" ]
Calculate partial MD5, basically the first and last 8M of the file for large files. This should signicicantly reduce the time spent on the creation and comparison of file signature when dealing with large bioinformat ics datasets.
[ "Calculate", "partial", "MD5", "basically", "the", "first", "and", "last", "8M", "of", "the", "file", "for", "large", "files", ".", "This", "should", "signicicantly", "reduce", "the", "time", "spent", "on", "the", "creation", "and", "comparison", "of", "file", "signature", "when", "dealing", "with", "large", "bioinformat", "ics", "datasets", "." ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L109-L142
9,486
vatlab/SoS
src/sos/targets.py
file_target.target_signature
def target_signature(self): '''Return file signature''' if self.exists(): if not self._md5: self._md5 = fileMD5(self) return (os.path.getmtime(self), os.path.getsize(self), self._md5) elif (self + '.zapped').is_file(): with open(self + '.zapped') as sig: line = sig.readline() _, mtime, size, md5 = line.strip().rsplit('\t', 3) self._md5 = md5 return (float(mtime), int(size), md5) else: raise ValueError(f'{self} does not exist.')
python
def target_signature(self): '''Return file signature''' if self.exists(): if not self._md5: self._md5 = fileMD5(self) return (os.path.getmtime(self), os.path.getsize(self), self._md5) elif (self + '.zapped').is_file(): with open(self + '.zapped') as sig: line = sig.readline() _, mtime, size, md5 = line.strip().rsplit('\t', 3) self._md5 = md5 return (float(mtime), int(size), md5) else: raise ValueError(f'{self} does not exist.')
[ "def", "target_signature", "(", "self", ")", ":", "if", "self", ".", "exists", "(", ")", ":", "if", "not", "self", ".", "_md5", ":", "self", ".", "_md5", "=", "fileMD5", "(", "self", ")", "return", "(", "os", ".", "path", ".", "getmtime", "(", "self", ")", ",", "os", ".", "path", ".", "getsize", "(", "self", ")", ",", "self", ".", "_md5", ")", "elif", "(", "self", "+", "'.zapped'", ")", ".", "is_file", "(", ")", ":", "with", "open", "(", "self", "+", "'.zapped'", ")", "as", "sig", ":", "line", "=", "sig", ".", "readline", "(", ")", "_", ",", "mtime", ",", "size", ",", "md5", "=", "line", ".", "strip", "(", ")", ".", "rsplit", "(", "'\\t'", ",", "3", ")", "self", ".", "_md5", "=", "md5", "return", "(", "float", "(", "mtime", ")", ",", "int", "(", "size", ")", ",", "md5", ")", "else", ":", "raise", "ValueError", "(", "f'{self} does not exist.'", ")" ]
Return file signature
[ "Return", "file", "signature" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L747-L760
9,487
vatlab/SoS
src/sos/targets.py
file_target.validate
def validate(self, sig=None): '''Check if file matches its signature''' if sig is not None: sig_mtime, sig_size, sig_md5 = sig else: try: with open(self.sig_file()) as sig: sig_mtime, sig_size, sig_md5 = sig.read().strip().split() except: return False if not self.exists(): if (self + '.zapped').is_file(): with open(self + '.zapped') as sig: line = sig.readline() return sig_md5 == line.strip().rsplit('\t', 3)[-1] else: return False if sig_mtime == os.path.getmtime(self) and sig_size == os.path.getsize( self): return True return fileMD5(self) == sig_md5
python
def validate(self, sig=None): '''Check if file matches its signature''' if sig is not None: sig_mtime, sig_size, sig_md5 = sig else: try: with open(self.sig_file()) as sig: sig_mtime, sig_size, sig_md5 = sig.read().strip().split() except: return False if not self.exists(): if (self + '.zapped').is_file(): with open(self + '.zapped') as sig: line = sig.readline() return sig_md5 == line.strip().rsplit('\t', 3)[-1] else: return False if sig_mtime == os.path.getmtime(self) and sig_size == os.path.getsize( self): return True return fileMD5(self) == sig_md5
[ "def", "validate", "(", "self", ",", "sig", "=", "None", ")", ":", "if", "sig", "is", "not", "None", ":", "sig_mtime", ",", "sig_size", ",", "sig_md5", "=", "sig", "else", ":", "try", ":", "with", "open", "(", "self", ".", "sig_file", "(", ")", ")", "as", "sig", ":", "sig_mtime", ",", "sig_size", ",", "sig_md5", "=", "sig", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "split", "(", ")", "except", ":", "return", "False", "if", "not", "self", ".", "exists", "(", ")", ":", "if", "(", "self", "+", "'.zapped'", ")", ".", "is_file", "(", ")", ":", "with", "open", "(", "self", "+", "'.zapped'", ")", "as", "sig", ":", "line", "=", "sig", ".", "readline", "(", ")", "return", "sig_md5", "==", "line", ".", "strip", "(", ")", ".", "rsplit", "(", "'\\t'", ",", "3", ")", "[", "-", "1", "]", "else", ":", "return", "False", "if", "sig_mtime", "==", "os", ".", "path", ".", "getmtime", "(", "self", ")", "and", "sig_size", "==", "os", ".", "path", ".", "getsize", "(", "self", ")", ":", "return", "True", "return", "fileMD5", "(", "self", ")", "==", "sig_md5" ]
Check if file matches its signature
[ "Check", "if", "file", "matches", "its", "signature" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L766-L786
9,488
vatlab/SoS
src/sos/targets.py
file_target.write_sig
def write_sig(self): '''Write signature to sig store''' if not self._md5: self._md5 = fileMD5(self) with open(self.sig_file(), 'w') as sig: sig.write( f'{os.path.getmtime(self)}\t{os.path.getsize(self)}\t{self._md5}' )
python
def write_sig(self): '''Write signature to sig store''' if not self._md5: self._md5 = fileMD5(self) with open(self.sig_file(), 'w') as sig: sig.write( f'{os.path.getmtime(self)}\t{os.path.getsize(self)}\t{self._md5}' )
[ "def", "write_sig", "(", "self", ")", ":", "if", "not", "self", ".", "_md5", ":", "self", ".", "_md5", "=", "fileMD5", "(", "self", ")", "with", "open", "(", "self", ".", "sig_file", "(", ")", ",", "'w'", ")", "as", "sig", ":", "sig", ".", "write", "(", "f'{os.path.getmtime(self)}\\t{os.path.getsize(self)}\\t{self._md5}'", ")" ]
Write signature to sig store
[ "Write", "signature", "to", "sig", "store" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L788-L795
9,489
vatlab/SoS
src/sos/targets.py
sos_targets.remove_targets
def remove_targets(self, type, kept=None): '''Remove targets of certain type''' if kept is None: kept = [ i for i, x in enumerate(self._targets) if not isinstance(x, type) ] if len(kept) == len(self._targets): return self self._targets = [self._targets[x] for x in kept] self._labels = [self._labels[x] for x in kept] if not self._groups: return self index_map = { o_idx: n_idx for n_idx, o_idx in zip(range(len(self._targets)), kept) } kept = set(kept) for idx, grp in enumerate(self._groups): self._groups[idx] = _sos_group( [index_map[x] for x in grp._indexes if x in kept], [y for x, y in zip(grp._indexes, grp._labels) if x in kept ]).set(**grp._dict) return self
python
def remove_targets(self, type, kept=None): '''Remove targets of certain type''' if kept is None: kept = [ i for i, x in enumerate(self._targets) if not isinstance(x, type) ] if len(kept) == len(self._targets): return self self._targets = [self._targets[x] for x in kept] self._labels = [self._labels[x] for x in kept] if not self._groups: return self index_map = { o_idx: n_idx for n_idx, o_idx in zip(range(len(self._targets)), kept) } kept = set(kept) for idx, grp in enumerate(self._groups): self._groups[idx] = _sos_group( [index_map[x] for x in grp._indexes if x in kept], [y for x, y in zip(grp._indexes, grp._labels) if x in kept ]).set(**grp._dict) return self
[ "def", "remove_targets", "(", "self", ",", "type", ",", "kept", "=", "None", ")", ":", "if", "kept", "is", "None", ":", "kept", "=", "[", "i", "for", "i", ",", "x", "in", "enumerate", "(", "self", ".", "_targets", ")", "if", "not", "isinstance", "(", "x", ",", "type", ")", "]", "if", "len", "(", "kept", ")", "==", "len", "(", "self", ".", "_targets", ")", ":", "return", "self", "self", ".", "_targets", "=", "[", "self", ".", "_targets", "[", "x", "]", "for", "x", "in", "kept", "]", "self", ".", "_labels", "=", "[", "self", ".", "_labels", "[", "x", "]", "for", "x", "in", "kept", "]", "if", "not", "self", ".", "_groups", ":", "return", "self", "index_map", "=", "{", "o_idx", ":", "n_idx", "for", "n_idx", ",", "o_idx", "in", "zip", "(", "range", "(", "len", "(", "self", ".", "_targets", ")", ")", ",", "kept", ")", "}", "kept", "=", "set", "(", "kept", ")", "for", "idx", ",", "grp", "in", "enumerate", "(", "self", ".", "_groups", ")", ":", "self", ".", "_groups", "[", "idx", "]", "=", "_sos_group", "(", "[", "index_map", "[", "x", "]", "for", "x", "in", "grp", ".", "_indexes", "if", "x", "in", "kept", "]", ",", "[", "y", "for", "x", ",", "y", "in", "zip", "(", "grp", ".", "_indexes", ",", "grp", ".", "_labels", ")", "if", "x", "in", "kept", "]", ")", ".", "set", "(", "*", "*", "grp", ".", "_dict", ")", "return", "self" ]
Remove targets of certain type
[ "Remove", "targets", "of", "certain", "type" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L1406-L1429
9,490
vatlab/SoS
src/sos/targets.py
sos_targets.resolve_remote
def resolve_remote(self): '''If target is of remote type, resolve it''' for idx, target in enumerate(self._targets): if isinstance(target, remote): resolved = target.resolve() if isinstance(resolved, str): resolved = interpolate(resolved, env.sos_dict.dict()) self._targets[idx] = file_target(resolved).set(**target._dict) return self
python
def resolve_remote(self): '''If target is of remote type, resolve it''' for idx, target in enumerate(self._targets): if isinstance(target, remote): resolved = target.resolve() if isinstance(resolved, str): resolved = interpolate(resolved, env.sos_dict.dict()) self._targets[idx] = file_target(resolved).set(**target._dict) return self
[ "def", "resolve_remote", "(", "self", ")", ":", "for", "idx", ",", "target", "in", "enumerate", "(", "self", ".", "_targets", ")", ":", "if", "isinstance", "(", "target", ",", "remote", ")", ":", "resolved", "=", "target", ".", "resolve", "(", ")", "if", "isinstance", "(", "resolved", ",", "str", ")", ":", "resolved", "=", "interpolate", "(", "resolved", ",", "env", ".", "sos_dict", ".", "dict", "(", ")", ")", "self", ".", "_targets", "[", "idx", "]", "=", "file_target", "(", "resolved", ")", ".", "set", "(", "*", "*", "target", ".", "_dict", ")", "return", "self" ]
If target is of remote type, resolve it
[ "If", "target", "is", "of", "remote", "type", "resolve", "it" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L1431-L1439
9,491
vatlab/SoS
src/sos/targets.py
sos_targets._handle_paired_with
def _handle_paired_with(self, paired_with): '''Handle input option paired_with''' if paired_with is None or not paired_with: var_name = [] var_value = [] elif isinstance(paired_with, str): var_name = ['_' + paired_with] if paired_with not in env.sos_dict: raise ValueError(f'Variable {paired_with} does not exist.') var_value = [env.sos_dict[paired_with]] elif isinstance(paired_with, dict): var_name = [] var_value = [] for k, v in paired_with.items(): var_name.append(k) var_value.append(v) elif isinstance(paired_with, Iterable): try: var_name = ['_' + x for x in paired_with] except Exception: raise ValueError( f'Invalud value for option paired_with {paired_with}') var_value = [] for vn in var_name: if vn[1:] not in env.sos_dict: raise ValueError(f'Variable {vn[1:]} does not exist.') var_value.append(env.sos_dict[vn[1:]]) else: raise ValueError( f'Unacceptable value for parameter paired_with: {paired_with}') # for vn, vv in zip(var_name, var_value): # set paired with values to step_input self.paired_with(vn, vv)
python
def _handle_paired_with(self, paired_with): '''Handle input option paired_with''' if paired_with is None or not paired_with: var_name = [] var_value = [] elif isinstance(paired_with, str): var_name = ['_' + paired_with] if paired_with not in env.sos_dict: raise ValueError(f'Variable {paired_with} does not exist.') var_value = [env.sos_dict[paired_with]] elif isinstance(paired_with, dict): var_name = [] var_value = [] for k, v in paired_with.items(): var_name.append(k) var_value.append(v) elif isinstance(paired_with, Iterable): try: var_name = ['_' + x for x in paired_with] except Exception: raise ValueError( f'Invalud value for option paired_with {paired_with}') var_value = [] for vn in var_name: if vn[1:] not in env.sos_dict: raise ValueError(f'Variable {vn[1:]} does not exist.') var_value.append(env.sos_dict[vn[1:]]) else: raise ValueError( f'Unacceptable value for parameter paired_with: {paired_with}') # for vn, vv in zip(var_name, var_value): # set paired with values to step_input self.paired_with(vn, vv)
[ "def", "_handle_paired_with", "(", "self", ",", "paired_with", ")", ":", "if", "paired_with", "is", "None", "or", "not", "paired_with", ":", "var_name", "=", "[", "]", "var_value", "=", "[", "]", "elif", "isinstance", "(", "paired_with", ",", "str", ")", ":", "var_name", "=", "[", "'_'", "+", "paired_with", "]", "if", "paired_with", "not", "in", "env", ".", "sos_dict", ":", "raise", "ValueError", "(", "f'Variable {paired_with} does not exist.'", ")", "var_value", "=", "[", "env", ".", "sos_dict", "[", "paired_with", "]", "]", "elif", "isinstance", "(", "paired_with", ",", "dict", ")", ":", "var_name", "=", "[", "]", "var_value", "=", "[", "]", "for", "k", ",", "v", "in", "paired_with", ".", "items", "(", ")", ":", "var_name", ".", "append", "(", "k", ")", "var_value", ".", "append", "(", "v", ")", "elif", "isinstance", "(", "paired_with", ",", "Iterable", ")", ":", "try", ":", "var_name", "=", "[", "'_'", "+", "x", "for", "x", "in", "paired_with", "]", "except", "Exception", ":", "raise", "ValueError", "(", "f'Invalud value for option paired_with {paired_with}'", ")", "var_value", "=", "[", "]", "for", "vn", "in", "var_name", ":", "if", "vn", "[", "1", ":", "]", "not", "in", "env", ".", "sos_dict", ":", "raise", "ValueError", "(", "f'Variable {vn[1:]} does not exist.'", ")", "var_value", ".", "append", "(", "env", ".", "sos_dict", "[", "vn", "[", "1", ":", "]", "]", ")", "else", ":", "raise", "ValueError", "(", "f'Unacceptable value for parameter paired_with: {paired_with}'", ")", "#", "for", "vn", ",", "vv", "in", "zip", "(", "var_name", ",", "var_value", ")", ":", "# set paired with values to step_input", "self", ".", "paired_with", "(", "vn", ",", "vv", ")" ]
Handle input option paired_with
[ "Handle", "input", "option", "paired_with" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L1685-L1718
9,492
vatlab/SoS
src/sos/targets.py
sos_targets._handle_group_with
def _handle_group_with(self, group_with): '''Handle input option group_with''' if group_with is None or not group_with: var_name = [] var_value = [] elif isinstance(group_with, str): var_name = ['_' + group_with] if group_with not in env.sos_dict: raise ValueError(f'Variable {group_with} does not exist.') var_value = [env.sos_dict[group_with]] elif isinstance(group_with, dict): var_name = [] var_value = [] for k, v in group_with.items(): var_name.append(k) var_value.append(v) elif isinstance(group_with, Iterable): try: var_name = ['_' + x for x in group_with] except Exception: raise ValueError( f'Invalud value for option group_with {group_with}') var_value = [] for vn in var_name: if vn[1:] not in env.sos_dict: raise ValueError(f'Variable {vn[1:]} does not exist.') var_value.append(env.sos_dict[vn[1:]]) else: raise ValueError( f'Unacceptable value for parameter group_with: {group_with}') # for vn, vv in zip(var_name, var_value): self.group_with(vn, vv)
python
def _handle_group_with(self, group_with): '''Handle input option group_with''' if group_with is None or not group_with: var_name = [] var_value = [] elif isinstance(group_with, str): var_name = ['_' + group_with] if group_with not in env.sos_dict: raise ValueError(f'Variable {group_with} does not exist.') var_value = [env.sos_dict[group_with]] elif isinstance(group_with, dict): var_name = [] var_value = [] for k, v in group_with.items(): var_name.append(k) var_value.append(v) elif isinstance(group_with, Iterable): try: var_name = ['_' + x for x in group_with] except Exception: raise ValueError( f'Invalud value for option group_with {group_with}') var_value = [] for vn in var_name: if vn[1:] not in env.sos_dict: raise ValueError(f'Variable {vn[1:]} does not exist.') var_value.append(env.sos_dict[vn[1:]]) else: raise ValueError( f'Unacceptable value for parameter group_with: {group_with}') # for vn, vv in zip(var_name, var_value): self.group_with(vn, vv)
[ "def", "_handle_group_with", "(", "self", ",", "group_with", ")", ":", "if", "group_with", "is", "None", "or", "not", "group_with", ":", "var_name", "=", "[", "]", "var_value", "=", "[", "]", "elif", "isinstance", "(", "group_with", ",", "str", ")", ":", "var_name", "=", "[", "'_'", "+", "group_with", "]", "if", "group_with", "not", "in", "env", ".", "sos_dict", ":", "raise", "ValueError", "(", "f'Variable {group_with} does not exist.'", ")", "var_value", "=", "[", "env", ".", "sos_dict", "[", "group_with", "]", "]", "elif", "isinstance", "(", "group_with", ",", "dict", ")", ":", "var_name", "=", "[", "]", "var_value", "=", "[", "]", "for", "k", ",", "v", "in", "group_with", ".", "items", "(", ")", ":", "var_name", ".", "append", "(", "k", ")", "var_value", ".", "append", "(", "v", ")", "elif", "isinstance", "(", "group_with", ",", "Iterable", ")", ":", "try", ":", "var_name", "=", "[", "'_'", "+", "x", "for", "x", "in", "group_with", "]", "except", "Exception", ":", "raise", "ValueError", "(", "f'Invalud value for option group_with {group_with}'", ")", "var_value", "=", "[", "]", "for", "vn", "in", "var_name", ":", "if", "vn", "[", "1", ":", "]", "not", "in", "env", ".", "sos_dict", ":", "raise", "ValueError", "(", "f'Variable {vn[1:]} does not exist.'", ")", "var_value", ".", "append", "(", "env", ".", "sos_dict", "[", "vn", "[", "1", ":", "]", "]", ")", "else", ":", "raise", "ValueError", "(", "f'Unacceptable value for parameter group_with: {group_with}'", ")", "#", "for", "vn", ",", "vv", "in", "zip", "(", "var_name", ",", "var_value", ")", ":", "self", ".", "group_with", "(", "vn", ",", "vv", ")" ]
Handle input option group_with
[ "Handle", "input", "option", "group_with" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L1720-L1752
9,493
vatlab/SoS
src/sos/targets.py
sos_targets._handle_extract_pattern
def _handle_extract_pattern(self, pattern): '''Handle input option pattern''' if pattern is None or not pattern: patterns = [] elif isinstance(pattern, str): patterns = [pattern] elif isinstance(pattern, Iterable): patterns = pattern else: raise ValueError( f'Unacceptable value for parameter pattern: {pattern}') # for pattern in patterns: res = extract_pattern(pattern, self._targets) self.set(**res) # also make k, v pair with _input self._handle_paired_with({'_' + x: y for x, y in res.items()})
python
def _handle_extract_pattern(self, pattern): '''Handle input option pattern''' if pattern is None or not pattern: patterns = [] elif isinstance(pattern, str): patterns = [pattern] elif isinstance(pattern, Iterable): patterns = pattern else: raise ValueError( f'Unacceptable value for parameter pattern: {pattern}') # for pattern in patterns: res = extract_pattern(pattern, self._targets) self.set(**res) # also make k, v pair with _input self._handle_paired_with({'_' + x: y for x, y in res.items()})
[ "def", "_handle_extract_pattern", "(", "self", ",", "pattern", ")", ":", "if", "pattern", "is", "None", "or", "not", "pattern", ":", "patterns", "=", "[", "]", "elif", "isinstance", "(", "pattern", ",", "str", ")", ":", "patterns", "=", "[", "pattern", "]", "elif", "isinstance", "(", "pattern", ",", "Iterable", ")", ":", "patterns", "=", "pattern", "else", ":", "raise", "ValueError", "(", "f'Unacceptable value for parameter pattern: {pattern}'", ")", "#", "for", "pattern", "in", "patterns", ":", "res", "=", "extract_pattern", "(", "pattern", ",", "self", ".", "_targets", ")", "self", ".", "set", "(", "*", "*", "res", ")", "# also make k, v pair with _input", "self", ".", "_handle_paired_with", "(", "{", "'_'", "+", "x", ":", "y", "for", "x", ",", "y", "in", "res", ".", "items", "(", ")", "}", ")" ]
Handle input option pattern
[ "Handle", "input", "option", "pattern" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L1754-L1770
9,494
vatlab/SoS
src/sos/targets.py
RuntimeInfo.write
def write(self): '''Write signature file with signature of script, input, output and dependent files. Because local input and output files can only be determined after the execution of workflow. They are not part of the construction. ''' if not self.output_files.valid(): raise ValueError( f'Cannot write signature with undetermined output {self.output_files}' ) else: if 'TARGET' in env.config['SOS_DEBUG'] or 'ALL' in env.config['SOS_DEBUG']: env.log_to_file( 'TARGET', f'write signature {self.sig_id} with output {self.output_files}' ) ret = super(RuntimeInfo, self).write() if ret is False: env.logger.debug(f'Failed to write signature {self.sig_id}') return ret send_message_to_controller(['step_sig', self.sig_id, ret]) send_message_to_controller([ 'workflow_sig', 'tracked_files', self.sig_id, repr({ 'input_files': [ str(f.resolve()) for f in self.input_files if isinstance(f, file_target) ], 'dependent_files': [ str(f.resolve()) for f in self.dependent_files if isinstance(f, file_target) ], 'output_files': [ str(f.resolve()) for f in self.output_files if isinstance(f, file_target) ] }) ]) return True
python
def write(self): '''Write signature file with signature of script, input, output and dependent files. Because local input and output files can only be determined after the execution of workflow. They are not part of the construction. ''' if not self.output_files.valid(): raise ValueError( f'Cannot write signature with undetermined output {self.output_files}' ) else: if 'TARGET' in env.config['SOS_DEBUG'] or 'ALL' in env.config['SOS_DEBUG']: env.log_to_file( 'TARGET', f'write signature {self.sig_id} with output {self.output_files}' ) ret = super(RuntimeInfo, self).write() if ret is False: env.logger.debug(f'Failed to write signature {self.sig_id}') return ret send_message_to_controller(['step_sig', self.sig_id, ret]) send_message_to_controller([ 'workflow_sig', 'tracked_files', self.sig_id, repr({ 'input_files': [ str(f.resolve()) for f in self.input_files if isinstance(f, file_target) ], 'dependent_files': [ str(f.resolve()) for f in self.dependent_files if isinstance(f, file_target) ], 'output_files': [ str(f.resolve()) for f in self.output_files if isinstance(f, file_target) ] }) ]) return True
[ "def", "write", "(", "self", ")", ":", "if", "not", "self", ".", "output_files", ".", "valid", "(", ")", ":", "raise", "ValueError", "(", "f'Cannot write signature with undetermined output {self.output_files}'", ")", "else", ":", "if", "'TARGET'", "in", "env", ".", "config", "[", "'SOS_DEBUG'", "]", "or", "'ALL'", "in", "env", ".", "config", "[", "'SOS_DEBUG'", "]", ":", "env", ".", "log_to_file", "(", "'TARGET'", ",", "f'write signature {self.sig_id} with output {self.output_files}'", ")", "ret", "=", "super", "(", "RuntimeInfo", ",", "self", ")", ".", "write", "(", ")", "if", "ret", "is", "False", ":", "env", ".", "logger", ".", "debug", "(", "f'Failed to write signature {self.sig_id}'", ")", "return", "ret", "send_message_to_controller", "(", "[", "'step_sig'", ",", "self", ".", "sig_id", ",", "ret", "]", ")", "send_message_to_controller", "(", "[", "'workflow_sig'", ",", "'tracked_files'", ",", "self", ".", "sig_id", ",", "repr", "(", "{", "'input_files'", ":", "[", "str", "(", "f", ".", "resolve", "(", ")", ")", "for", "f", "in", "self", ".", "input_files", "if", "isinstance", "(", "f", ",", "file_target", ")", "]", ",", "'dependent_files'", ":", "[", "str", "(", "f", ".", "resolve", "(", ")", ")", "for", "f", "in", "self", ".", "dependent_files", "if", "isinstance", "(", "f", ",", "file_target", ")", "]", ",", "'output_files'", ":", "[", "str", "(", "f", ".", "resolve", "(", ")", ")", "for", "f", "in", "self", ".", "output_files", "if", "isinstance", "(", "f", ",", "file_target", ")", "]", "}", ")", "]", ")", "return", "True" ]
Write signature file with signature of script, input, output and dependent files. Because local input and output files can only be determined after the execution of workflow. They are not part of the construction.
[ "Write", "signature", "file", "with", "signature", "of", "script", "input", "output", "and", "dependent", "files", ".", "Because", "local", "input", "and", "output", "files", "can", "only", "be", "determined", "after", "the", "execution", "of", "workflow", ".", "They", "are", "not", "part", "of", "the", "construction", "." ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/targets.py#L2219-L2259
9,495
vatlab/SoS
src/sos/executor_utils.py
clear_output
def clear_output(output=None): ''' Remove file targets in `_output` when a step fails to complete ''' for target in env.sos_dict['_output'] if output is None else output: if isinstance(target, file_target) and target.exists(): try: target.unlink() except Exception as e: env.logger.warning(f'Failed to remove {target}: {e}')
python
def clear_output(output=None): ''' Remove file targets in `_output` when a step fails to complete ''' for target in env.sos_dict['_output'] if output is None else output: if isinstance(target, file_target) and target.exists(): try: target.unlink() except Exception as e: env.logger.warning(f'Failed to remove {target}: {e}')
[ "def", "clear_output", "(", "output", "=", "None", ")", ":", "for", "target", "in", "env", ".", "sos_dict", "[", "'_output'", "]", "if", "output", "is", "None", "else", "output", ":", "if", "isinstance", "(", "target", ",", "file_target", ")", "and", "target", ".", "exists", "(", ")", ":", "try", ":", "target", ".", "unlink", "(", ")", "except", "Exception", "as", "e", ":", "env", ".", "logger", ".", "warning", "(", "f'Failed to remove {target}: {e}'", ")" ]
Remove file targets in `_output` when a step fails to complete
[ "Remove", "file", "targets", "in", "_output", "when", "a", "step", "fails", "to", "complete" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/executor_utils.py#L121-L130
9,496
vatlab/SoS
src/sos/workflow_executor.py
Base_Executor.add_forward_workflow
def add_forward_workflow(self, dag, sections, satisfies=None): '''Add a forward-workflow, return number of nodes added ''' dag.new_forward_workflow() if 'DAG' in env.config['SOS_DEBUG'] or 'ALL' in env.config['SOS_DEBUG']: env.log_to_file( 'DAG', f'Adding mini-workflow with {len(sections)} sections') default_input: sos_targets = sos_targets([]) for idx, section in enumerate(sections): # res = analyze_section(section, default_input=default_input) environ_vars = res['environ_vars'] signature_vars = res['signature_vars'] changed_vars = res['changed_vars'] # parameters, if used in the step, should be considered environmental environ_vars |= env.parameter_vars & signature_vars # add shared to targets if res['changed_vars']: if 'provides' in section.options: if isinstance(section.options['provides'], str): section.options.set('provides', [section.options['provides']]) else: section.options.set('provides', []) # section.options.set( 'provides', section.options['provides'] + [sos_variable(var) for var in changed_vars]) context = { '__signature_vars__': signature_vars, '__environ_vars__': environ_vars, '__changed_vars__': changed_vars, '__dynamic_depends__': res['dynamic_depends'], '__dynamic_input__': res['dynamic_input'] } # for nested workflow, the input is specified by sos_run, not None. if idx == 0: context['__step_output__'] = env.sos_dict['__step_output__'] # can be the only step if idx == len(sections) - 1 and satisfies is not None: res['step_output'].extend(satisfies) dag.add_step( section.uuid, section.step_name(), idx, res['step_input'], res['step_depends'], res['step_output'], context=context) default_input = res['step_output'] return len(sections)
python
def add_forward_workflow(self, dag, sections, satisfies=None): '''Add a forward-workflow, return number of nodes added ''' dag.new_forward_workflow() if 'DAG' in env.config['SOS_DEBUG'] or 'ALL' in env.config['SOS_DEBUG']: env.log_to_file( 'DAG', f'Adding mini-workflow with {len(sections)} sections') default_input: sos_targets = sos_targets([]) for idx, section in enumerate(sections): # res = analyze_section(section, default_input=default_input) environ_vars = res['environ_vars'] signature_vars = res['signature_vars'] changed_vars = res['changed_vars'] # parameters, if used in the step, should be considered environmental environ_vars |= env.parameter_vars & signature_vars # add shared to targets if res['changed_vars']: if 'provides' in section.options: if isinstance(section.options['provides'], str): section.options.set('provides', [section.options['provides']]) else: section.options.set('provides', []) # section.options.set( 'provides', section.options['provides'] + [sos_variable(var) for var in changed_vars]) context = { '__signature_vars__': signature_vars, '__environ_vars__': environ_vars, '__changed_vars__': changed_vars, '__dynamic_depends__': res['dynamic_depends'], '__dynamic_input__': res['dynamic_input'] } # for nested workflow, the input is specified by sos_run, not None. if idx == 0: context['__step_output__'] = env.sos_dict['__step_output__'] # can be the only step if idx == len(sections) - 1 and satisfies is not None: res['step_output'].extend(satisfies) dag.add_step( section.uuid, section.step_name(), idx, res['step_input'], res['step_depends'], res['step_output'], context=context) default_input = res['step_output'] return len(sections)
[ "def", "add_forward_workflow", "(", "self", ",", "dag", ",", "sections", ",", "satisfies", "=", "None", ")", ":", "dag", ".", "new_forward_workflow", "(", ")", "if", "'DAG'", "in", "env", ".", "config", "[", "'SOS_DEBUG'", "]", "or", "'ALL'", "in", "env", ".", "config", "[", "'SOS_DEBUG'", "]", ":", "env", ".", "log_to_file", "(", "'DAG'", ",", "f'Adding mini-workflow with {len(sections)} sections'", ")", "default_input", ":", "sos_targets", "=", "sos_targets", "(", "[", "]", ")", "for", "idx", ",", "section", "in", "enumerate", "(", "sections", ")", ":", "#", "res", "=", "analyze_section", "(", "section", ",", "default_input", "=", "default_input", ")", "environ_vars", "=", "res", "[", "'environ_vars'", "]", "signature_vars", "=", "res", "[", "'signature_vars'", "]", "changed_vars", "=", "res", "[", "'changed_vars'", "]", "# parameters, if used in the step, should be considered environmental", "environ_vars", "|=", "env", ".", "parameter_vars", "&", "signature_vars", "# add shared to targets", "if", "res", "[", "'changed_vars'", "]", ":", "if", "'provides'", "in", "section", ".", "options", ":", "if", "isinstance", "(", "section", ".", "options", "[", "'provides'", "]", ",", "str", ")", ":", "section", ".", "options", ".", "set", "(", "'provides'", ",", "[", "section", ".", "options", "[", "'provides'", "]", "]", ")", "else", ":", "section", ".", "options", ".", "set", "(", "'provides'", ",", "[", "]", ")", "#", "section", ".", "options", ".", "set", "(", "'provides'", ",", "section", ".", "options", "[", "'provides'", "]", "+", "[", "sos_variable", "(", "var", ")", "for", "var", "in", "changed_vars", "]", ")", "context", "=", "{", "'__signature_vars__'", ":", "signature_vars", ",", "'__environ_vars__'", ":", "environ_vars", ",", "'__changed_vars__'", ":", "changed_vars", ",", "'__dynamic_depends__'", ":", "res", "[", "'dynamic_depends'", "]", ",", "'__dynamic_input__'", ":", "res", "[", "'dynamic_input'", "]", "}", "# for nested workflow, the input is specified by sos_run, not None.", "if", "idx", "==", "0", ":", "context", "[", "'__step_output__'", "]", "=", "env", ".", "sos_dict", "[", "'__step_output__'", "]", "# can be the only step", "if", "idx", "==", "len", "(", "sections", ")", "-", "1", "and", "satisfies", "is", "not", "None", ":", "res", "[", "'step_output'", "]", ".", "extend", "(", "satisfies", ")", "dag", ".", "add_step", "(", "section", ".", "uuid", ",", "section", ".", "step_name", "(", ")", ",", "idx", ",", "res", "[", "'step_input'", "]", ",", "res", "[", "'step_depends'", "]", ",", "res", "[", "'step_output'", "]", ",", "context", "=", "context", ")", "default_input", "=", "res", "[", "'step_output'", "]", "return", "len", "(", "sections", ")" ]
Add a forward-workflow, return number of nodes added
[ "Add", "a", "forward", "-", "workflow", "return", "number", "of", "nodes", "added" ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/workflow_executor.py#L702-L758
9,497
vatlab/SoS
src/sos/workflow_executor.py
Base_Executor.initialize_dag
def initialize_dag(self, targets: Optional[List[str]] = [], nested: bool = False) -> SoS_DAG: '''Create a DAG by analyzing sections statically.''' self.reset_dict() dag = SoS_DAG(name=self.md5) targets = sos_targets(targets) self.add_forward_workflow(dag, self.workflow.sections) # if self.resolve_dangling_targets(dag, targets) == 0: if targets: raise UnknownTarget(f'No step to generate target {targets}.') # now, there should be no dangling targets, let us connect nodes dag.build() # dag.show_nodes() # trim the DAG if targets are specified if targets: dag = dag.subgraph_from(targets) # check error cycle = dag.circular_dependencies() if cycle: raise RuntimeError( f'Circular dependency detected {cycle}. It is likely a later step produces input of a previous step.' ) dag.save(env.config['output_dag']) return dag
python
def initialize_dag(self, targets: Optional[List[str]] = [], nested: bool = False) -> SoS_DAG: '''Create a DAG by analyzing sections statically.''' self.reset_dict() dag = SoS_DAG(name=self.md5) targets = sos_targets(targets) self.add_forward_workflow(dag, self.workflow.sections) # if self.resolve_dangling_targets(dag, targets) == 0: if targets: raise UnknownTarget(f'No step to generate target {targets}.') # now, there should be no dangling targets, let us connect nodes dag.build() # dag.show_nodes() # trim the DAG if targets are specified if targets: dag = dag.subgraph_from(targets) # check error cycle = dag.circular_dependencies() if cycle: raise RuntimeError( f'Circular dependency detected {cycle}. It is likely a later step produces input of a previous step.' ) dag.save(env.config['output_dag']) return dag
[ "def", "initialize_dag", "(", "self", ",", "targets", ":", "Optional", "[", "List", "[", "str", "]", "]", "=", "[", "]", ",", "nested", ":", "bool", "=", "False", ")", "->", "SoS_DAG", ":", "self", ".", "reset_dict", "(", ")", "dag", "=", "SoS_DAG", "(", "name", "=", "self", ".", "md5", ")", "targets", "=", "sos_targets", "(", "targets", ")", "self", ".", "add_forward_workflow", "(", "dag", ",", "self", ".", "workflow", ".", "sections", ")", "#", "if", "self", ".", "resolve_dangling_targets", "(", "dag", ",", "targets", ")", "==", "0", ":", "if", "targets", ":", "raise", "UnknownTarget", "(", "f'No step to generate target {targets}.'", ")", "# now, there should be no dangling targets, let us connect nodes", "dag", ".", "build", "(", ")", "# dag.show_nodes()", "# trim the DAG if targets are specified", "if", "targets", ":", "dag", "=", "dag", ".", "subgraph_from", "(", "targets", ")", "# check error", "cycle", "=", "dag", ".", "circular_dependencies", "(", ")", "if", "cycle", ":", "raise", "RuntimeError", "(", "f'Circular dependency detected {cycle}. It is likely a later step produces input of a previous step.'", ")", "dag", ".", "save", "(", "env", ".", "config", "[", "'output_dag'", "]", ")", "return", "dag" ]
Create a DAG by analyzing sections statically.
[ "Create", "a", "DAG", "by", "analyzing", "sections", "statically", "." ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/workflow_executor.py#L831-L859
9,498
vatlab/SoS
src/sos/utils.py
short_repr
def short_repr(obj, noneAsNA=False): '''Return a short representation of obj for clarity.''' if obj is None: return 'unspecified' if noneAsNA else 'None' elif isinstance(obj, str) and len(obj) > 80: return '{}...{}'.format(obj[:60].replace('\n', '\\n'), obj[-20:].replace('\n', '\\n')) elif isinstance(obj, (str, int, float, bool)): return repr(obj) elif hasattr(obj, '__short_repr__'): return obj.__short_repr__() elif isinstance(obj, Sequence): # should be a list or tuple if len(obj) == 0: return '[]' elif len(obj) == 1: return f'{short_repr(obj[0])}' elif len(obj) == 2: return f'{short_repr(obj[0])}, {short_repr(obj[1])}' else: return f'{short_repr(obj[0])}, {short_repr(obj[1])}, ... ({len(obj)} items)' elif isinstance(obj, dict): if not obj: return '' elif len(obj) == 1: first_key = list(obj.keys())[0] return f'{short_repr(first_key)!r}:{short_repr(obj[first_key])!r}' else: first_key = list(obj.keys())[0] return f'{short_repr(first_key)}:{short_repr(obj[first_key])}, ... ({len(obj)} items)' elif isinstance(obj, KeysView): if not obj: return '' elif len(obj) == 1: return short_repr(next(iter(obj))) else: return f'{short_repr(next(iter(obj)))}, ... ({len(obj)} items)' #elif hasattr(obj, 'target_name'): # return obj.target_name() else: ret = str(obj) if len(ret) > 40: return f'{repr(obj)[:35]}...' else: return ret
python
def short_repr(obj, noneAsNA=False): '''Return a short representation of obj for clarity.''' if obj is None: return 'unspecified' if noneAsNA else 'None' elif isinstance(obj, str) and len(obj) > 80: return '{}...{}'.format(obj[:60].replace('\n', '\\n'), obj[-20:].replace('\n', '\\n')) elif isinstance(obj, (str, int, float, bool)): return repr(obj) elif hasattr(obj, '__short_repr__'): return obj.__short_repr__() elif isinstance(obj, Sequence): # should be a list or tuple if len(obj) == 0: return '[]' elif len(obj) == 1: return f'{short_repr(obj[0])}' elif len(obj) == 2: return f'{short_repr(obj[0])}, {short_repr(obj[1])}' else: return f'{short_repr(obj[0])}, {short_repr(obj[1])}, ... ({len(obj)} items)' elif isinstance(obj, dict): if not obj: return '' elif len(obj) == 1: first_key = list(obj.keys())[0] return f'{short_repr(first_key)!r}:{short_repr(obj[first_key])!r}' else: first_key = list(obj.keys())[0] return f'{short_repr(first_key)}:{short_repr(obj[first_key])}, ... ({len(obj)} items)' elif isinstance(obj, KeysView): if not obj: return '' elif len(obj) == 1: return short_repr(next(iter(obj))) else: return f'{short_repr(next(iter(obj)))}, ... ({len(obj)} items)' #elif hasattr(obj, 'target_name'): # return obj.target_name() else: ret = str(obj) if len(ret) > 40: return f'{repr(obj)[:35]}...' else: return ret
[ "def", "short_repr", "(", "obj", ",", "noneAsNA", "=", "False", ")", ":", "if", "obj", "is", "None", ":", "return", "'unspecified'", "if", "noneAsNA", "else", "'None'", "elif", "isinstance", "(", "obj", ",", "str", ")", "and", "len", "(", "obj", ")", ">", "80", ":", "return", "'{}...{}'", ".", "format", "(", "obj", "[", ":", "60", "]", ".", "replace", "(", "'\\n'", ",", "'\\\\n'", ")", ",", "obj", "[", "-", "20", ":", "]", ".", "replace", "(", "'\\n'", ",", "'\\\\n'", ")", ")", "elif", "isinstance", "(", "obj", ",", "(", "str", ",", "int", ",", "float", ",", "bool", ")", ")", ":", "return", "repr", "(", "obj", ")", "elif", "hasattr", "(", "obj", ",", "'__short_repr__'", ")", ":", "return", "obj", ".", "__short_repr__", "(", ")", "elif", "isinstance", "(", "obj", ",", "Sequence", ")", ":", "# should be a list or tuple", "if", "len", "(", "obj", ")", "==", "0", ":", "return", "'[]'", "elif", "len", "(", "obj", ")", "==", "1", ":", "return", "f'{short_repr(obj[0])}'", "elif", "len", "(", "obj", ")", "==", "2", ":", "return", "f'{short_repr(obj[0])}, {short_repr(obj[1])}'", "else", ":", "return", "f'{short_repr(obj[0])}, {short_repr(obj[1])}, ... ({len(obj)} items)'", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "if", "not", "obj", ":", "return", "''", "elif", "len", "(", "obj", ")", "==", "1", ":", "first_key", "=", "list", "(", "obj", ".", "keys", "(", ")", ")", "[", "0", "]", "return", "f'{short_repr(first_key)!r}:{short_repr(obj[first_key])!r}'", "else", ":", "first_key", "=", "list", "(", "obj", ".", "keys", "(", ")", ")", "[", "0", "]", "return", "f'{short_repr(first_key)}:{short_repr(obj[first_key])}, ... ({len(obj)} items)'", "elif", "isinstance", "(", "obj", ",", "KeysView", ")", ":", "if", "not", "obj", ":", "return", "''", "elif", "len", "(", "obj", ")", "==", "1", ":", "return", "short_repr", "(", "next", "(", "iter", "(", "obj", ")", ")", ")", "else", ":", "return", "f'{short_repr(next(iter(obj)))}, ... ({len(obj)} items)'", "#elif hasattr(obj, 'target_name'):", "# return obj.target_name()", "else", ":", "ret", "=", "str", "(", "obj", ")", "if", "len", "(", "ret", ")", ">", "40", ":", "return", "f'{repr(obj)[:35]}...'", "else", ":", "return", "ret" ]
Return a short representation of obj for clarity.
[ "Return", "a", "short", "representation", "of", "obj", "for", "clarity", "." ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/utils.py#L138-L181
9,499
vatlab/SoS
src/sos/utils.py
tail_of_file
def tail_of_file(filename, n, ansi2html=False): """Reads a n lines from f with an offset of offset lines. """ avg_line_length = 74 to_read = n with open(filename) as f: while 1: try: f.seek(-(avg_line_length * to_read), 2) except IOError: # woops. apparently file is smaller than what we want # to step back, go to the beginning instead f.seek(0) pos = f.tell() lines = f.read().splitlines() if len(lines) >= to_read or pos == 0: if ansi2html: return convertAnsi2html('\n'.join(lines[-to_read:])) return '\n'.join(lines[-to_read:]) + '\n' avg_line_length *= 1.3
python
def tail_of_file(filename, n, ansi2html=False): """Reads a n lines from f with an offset of offset lines. """ avg_line_length = 74 to_read = n with open(filename) as f: while 1: try: f.seek(-(avg_line_length * to_read), 2) except IOError: # woops. apparently file is smaller than what we want # to step back, go to the beginning instead f.seek(0) pos = f.tell() lines = f.read().splitlines() if len(lines) >= to_read or pos == 0: if ansi2html: return convertAnsi2html('\n'.join(lines[-to_read:])) return '\n'.join(lines[-to_read:]) + '\n' avg_line_length *= 1.3
[ "def", "tail_of_file", "(", "filename", ",", "n", ",", "ansi2html", "=", "False", ")", ":", "avg_line_length", "=", "74", "to_read", "=", "n", "with", "open", "(", "filename", ")", "as", "f", ":", "while", "1", ":", "try", ":", "f", ".", "seek", "(", "-", "(", "avg_line_length", "*", "to_read", ")", ",", "2", ")", "except", "IOError", ":", "# woops. apparently file is smaller than what we want", "# to step back, go to the beginning instead", "f", ".", "seek", "(", "0", ")", "pos", "=", "f", ".", "tell", "(", ")", "lines", "=", "f", ".", "read", "(", ")", ".", "splitlines", "(", ")", "if", "len", "(", "lines", ")", ">=", "to_read", "or", "pos", "==", "0", ":", "if", "ansi2html", ":", "return", "convertAnsi2html", "(", "'\\n'", ".", "join", "(", "lines", "[", "-", "to_read", ":", "]", ")", ")", "return", "'\\n'", ".", "join", "(", "lines", "[", "-", "to_read", ":", "]", ")", "+", "'\\n'", "avg_line_length", "*=", "1.3" ]
Reads a n lines from f with an offset of offset lines.
[ "Reads", "a", "n", "lines", "from", "f", "with", "an", "offset", "of", "offset", "lines", "." ]
6b60ed0770916d135e17322e469520d778e9d4e7
https://github.com/vatlab/SoS/blob/6b60ed0770916d135e17322e469520d778e9d4e7/src/sos/utils.py#L1476-L1495