_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q24300
_resizer.resize_file_to
train
def resize_file_to(self, in_path, out_path, keep_filename=False): """ Given a filename, resize and save the image per the specification into out_path :param in_path: path to image file to save. Must be supported by PIL :param out_path: path to the directory root for the outputted thumbnails to be stored :return: None """ if keep_filename: filename = path.join(out_path, path.basename(in_path)) else: filename = path.join(out_path, self.get_thumbnail_name(in_path)) out_path = path.dirname(filename) if not path.exists(out_path): os.makedirs(out_path) if not path.exists(filename): try: image = Image.open(in_path) thumbnail = self.resize(image) thumbnail.save(filename) logger.info("Generated Thumbnail {0}".format(path.basename(filename))) except IOError: logger.info("Generating Thumbnail for {0} skipped".format(path.basename(filename)))
python
{ "resource": "" }
q24301
run_graphviz
train
def run_graphviz(program, code, options=[], format='png'): """ Runs graphviz programs and returns image data Copied from https://github.com/tkf/ipython-hierarchymagic/blob/master/hierarchymagic.py """ import os from subprocess import Popen, PIPE dot_args = [program] + options + ['-T', format] if os.name == 'nt': # Avoid opening shell window. # * https://github.com/tkf/ipython-hierarchymagic/issues/1 # * http://stackoverflow.com/a/2935727/727827 p = Popen(dot_args, stdout=PIPE, stdin=PIPE, stderr=PIPE, creationflags=0x08000000) else: p = Popen(dot_args, stdout=PIPE, stdin=PIPE, stderr=PIPE) wentwrong = False try: # Graphviz may close standard input when an error occurs, # resulting in a broken pipe on communicate() stdout, stderr = p.communicate(code.encode('utf-8')) except (OSError, IOError) as err: if err.errno != EPIPE: raise wentwrong = True except IOError as err: if err.errno != EINVAL: raise wentwrong = True if wentwrong: # in this case, read the standard output and standard error streams # directly, to get the error message(s) stdout, stderr = p.stdout.read(), p.stderr.read() p.wait() if p.returncode != 0: raise RuntimeError('dot exited with error:\n[stderr]\n{0}'.format(stderr.decode('utf-8'))) return stdout
python
{ "resource": "" }
q24302
graphviz_parser
train
def graphviz_parser(preprocessor, tag, markup): """ Simple Graphviz parser """ # Parse the markup string m = DOT_BLOCK_RE.search(markup) if m: # Get program and DOT code code = m.group('code') program = m.group('program').strip() # Run specified program with our markup output = run_graphviz(program, code) # Return Base64 encoded image return '<span class="graphviz" style="text-align: center;"><img src="data:image/png;base64,%s"></span>' % base64.b64encode(output).decode('utf-8') else: raise ValueError('Error processing input. ' 'Expected syntax: {0}'.format(SYNTAX))
python
{ "resource": "" }
q24303
inline_markdown_extension
train
def inline_markdown_extension(pelicanobj, config): """Instantiates a customized Markdown extension""" # Instantiate Markdown extension and append it to the current extensions try: if isinstance(pelicanobj.settings.get('MD_EXTENSIONS'), list): # pelican 3.6.3 and earlier pelicanobj.settings['MD_EXTENSIONS'].append(PelicanInlineMarkdownExtension(config)) else: pelicanobj.settings['MARKDOWN'].setdefault('extensions', []).append(PelicanInlineMarkdownExtension(config)) except: sys.excepthook(*sys.exc_info()) sys.stderr.write("\nError - the pelican Markdown extension failed to configure. Inline Markdown extension is non-functional.\n") sys.stderr.flush()
python
{ "resource": "" }
q24304
group_content
train
def group_content(generator, content_type): """ Assembles articles and pages into lists based on each article or page's content. These lists are available through the global context passed to the template engine. When multiple categories are present, splits category names based on commas and trims whitespace surrounding a category's name. Thus, commas may not appear within a category but they can be used to delimit categories and may be surrounded by arbitrary amounts of whitespace. For each category, substitutes '_' for all whitespace and '-' characters, then creates a list named `SUBSTITUTED_CATEGORY_NAME`_articles or `SUBSTITUTED_CATEGORY_NAME`_pages for Articles or Pages, respectively. Note that the *original* category name must appear in the `CATEGORIES_TO_COLLATE` when using this plugin with category filtering enabled. """ category_filter = generator.settings.get('CATEGORIES_TO_COLLATE', None) filtering_active = type(category_filter) in (list, tuple, set) collations = generator.context.get('collations', defaultdict(list)) for content in generator.context[content_type]: category_list = [c.strip() for c in content.category.name.split(',')] for category in category_list: if filtering_active and category not in category_filter: continue category = substitute_category_name(category) collations['%s_%s' % (category, content_type)].append(content) generator.context['collations'] = collations
python
{ "resource": "" }
q24305
ImportMixin.has_import_permission
train
def has_import_permission(self, request): """ Returns whether a request has import permission. """ IMPORT_PERMISSION_CODE = getattr(settings, 'IMPORT_EXPORT_IMPORT_PERMISSION_CODE', None) if IMPORT_PERMISSION_CODE is None: return True opts = self.opts codename = get_permission_codename(IMPORT_PERMISSION_CODE, opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename))
python
{ "resource": "" }
q24306
ImportMixin.get_import_data_kwargs
train
def get_import_data_kwargs(self, request, *args, **kwargs): """ Prepare kwargs for import_data. """ form = kwargs.get('form') if form: kwargs.pop('form') return kwargs return {}
python
{ "resource": "" }
q24307
ImportMixin.import_action
train
def import_action(self, request, *args, **kwargs): """ Perform a dry_run of the import to make sure the import will not result in errors. If there where no error, save the user uploaded file to a local temp file that will be used by 'process_import' for the actual import. """ if not self.has_import_permission(request): raise PermissionDenied context = self.get_import_context_data() import_formats = self.get_import_formats() form_type = self.get_import_form() form_kwargs = self.get_form_kwargs(form_type, *args, **kwargs) form = form_type(import_formats, request.POST or None, request.FILES or None, **form_kwargs) if request.POST and form.is_valid(): input_format = import_formats[ int(form.cleaned_data['input_format']) ]() import_file = form.cleaned_data['import_file'] # first always write the uploaded file to disk as it may be a # memory file or else based on settings upload handlers tmp_storage = self.write_to_tmp_storage(import_file, input_format) # then read the file, using the proper format-specific mode # warning, big files may exceed memory try: data = tmp_storage.read(input_format.get_read_mode()) if not input_format.is_binary() and self.from_encoding: data = force_text(data, self.from_encoding) dataset = input_format.create_dataset(data) except UnicodeDecodeError as e: return HttpResponse(_(u"<h1>Imported file has a wrong encoding: %s</h1>" % e)) except Exception as e: return HttpResponse(_(u"<h1>%s encountered while trying to read file: %s</h1>" % (type(e).__name__, import_file.name))) # prepare kwargs for import data, if needed res_kwargs = self.get_import_resource_kwargs(request, form=form, *args, **kwargs) resource = self.get_import_resource_class()(**res_kwargs) # prepare additional kwargs for import_data, if needed imp_kwargs = self.get_import_data_kwargs(request, form=form, *args, **kwargs) result = resource.import_data(dataset, dry_run=True, raise_errors=False, file_name=import_file.name, user=request.user, **imp_kwargs) context['result'] = result if not result.has_errors() and not result.has_validation_errors(): initial = { 'import_file_name': tmp_storage.name, 'original_file_name': import_file.name, 'input_format': form.cleaned_data['input_format'], } confirm_form = self.get_confirm_import_form() initial = self.get_form_kwargs(form=form, **initial) context['confirm_form'] = confirm_form(initial=initial) else: res_kwargs = self.get_import_resource_kwargs(request, form=form, *args, **kwargs) resource = self.get_import_resource_class()(**res_kwargs) context.update(self.admin_site.each_context(request)) context['title'] = _("Import") context['form'] = form context['opts'] = self.model._meta context['fields'] = [f.column_name for f in resource.get_user_visible_fields()] request.current_app = self.admin_site.name return TemplateResponse(request, [self.import_template_name], context)
python
{ "resource": "" }
q24308
ExportMixin.has_export_permission
train
def has_export_permission(self, request): """ Returns whether a request has export permission. """ EXPORT_PERMISSION_CODE = getattr(settings, 'IMPORT_EXPORT_EXPORT_PERMISSION_CODE', None) if EXPORT_PERMISSION_CODE is None: return True opts = self.opts codename = get_permission_codename(EXPORT_PERMISSION_CODE, opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename))
python
{ "resource": "" }
q24309
ExportMixin.get_export_queryset
train
def get_export_queryset(self, request): """ Returns export queryset. Default implementation respects applied search and filters. """ list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) list_filter = self.get_list_filter(request) search_fields = self.get_search_fields(request) if self.get_actions(request): list_display = ['action_checkbox'] + list(list_display) ChangeList = self.get_changelist(request) changelist_kwargs = { 'request': request, 'model': self.model, 'list_display': list_display, 'list_display_links': list_display_links, 'list_filter': list_filter, 'date_hierarchy': self.date_hierarchy, 'search_fields': search_fields, 'list_select_related': self.list_select_related, 'list_per_page': self.list_per_page, 'list_max_show_all': self.list_max_show_all, 'list_editable': self.list_editable, 'model_admin': self, } if django.VERSION >= (2, 1): changelist_kwargs['sortable_by'] = self.sortable_by cl = ChangeList(**changelist_kwargs) return cl.get_queryset(request)
python
{ "resource": "" }
q24310
ExportMixin.get_export_data
train
def get_export_data(self, file_format, queryset, *args, **kwargs): """ Returns file_format representation for given queryset. """ request = kwargs.pop("request") if not self.has_export_permission(request): raise PermissionDenied resource_class = self.get_export_resource_class() data = resource_class(**self.get_export_resource_kwargs(request)).export(queryset, *args, **kwargs) export_data = file_format.export_data(data) return export_data
python
{ "resource": "" }
q24311
ExportActionMixin.export_admin_action
train
def export_admin_action(self, request, queryset): """ Exports the selected rows using file_format. """ export_format = request.POST.get('file_format') if not export_format: messages.warning(request, _('You must select an export format.')) else: formats = self.get_export_formats() file_format = formats[int(export_format)]() export_data = self.get_export_data(file_format, queryset, request=request) content_type = file_format.get_content_type() response = HttpResponse(export_data, content_type=content_type) response['Content-Disposition'] = 'attachment; filename=%s' % ( self.get_export_filename(file_format), ) return response
python
{ "resource": "" }
q24312
modelresource_factory
train
def modelresource_factory(model, resource_class=ModelResource): """ Factory for creating ``ModelResource`` class for given Django model. """ attrs = {'model': model} Meta = type(str('Meta'), (object,), attrs) class_name = model.__name__ + str('Resource') class_attrs = { 'Meta': Meta, } metaclass = ModelDeclarativeMetaclass return metaclass(class_name, (resource_class,), class_attrs)
python
{ "resource": "" }
q24313
Resource.get_field_name
train
def get_field_name(self, field): """ Returns the field name for a given field. """ for field_name, f in self.fields.items(): if f == field: return field_name raise AttributeError("Field %s does not exists in %s resource" % ( field, self.__class__))
python
{ "resource": "" }
q24314
Resource.get_or_init_instance
train
def get_or_init_instance(self, instance_loader, row): """ Either fetches an already existing instance or initializes a new one. """ instance = self.get_instance(instance_loader, row) if instance: return (instance, False) else: return (self.init_instance(row), True)
python
{ "resource": "" }
q24315
Resource.save_instance
train
def save_instance(self, instance, using_transactions=True, dry_run=False): """ Takes care of saving the object to the database. Keep in mind that this is done by calling ``instance.save()``, so objects are not created in bulk! """ self.before_save_instance(instance, using_transactions, dry_run) if not using_transactions and dry_run: # we don't have transactions and we want to do a dry_run pass else: instance.save() self.after_save_instance(instance, using_transactions, dry_run)
python
{ "resource": "" }
q24316
Resource.save_m2m
train
def save_m2m(self, obj, data, using_transactions, dry_run): """ Saves m2m fields. Model instance need to have a primary key value before a many-to-many relationship can be used. """ if not using_transactions and dry_run: # we don't have transactions and we want to do a dry_run pass else: for field in self.get_import_fields(): if not isinstance(field.widget, widgets.ManyToManyWidget): continue self.import_field(field, obj, data, True)
python
{ "resource": "" }
q24317
Resource.skip_row
train
def skip_row(self, instance, original): """ Returns ``True`` if ``row`` importing should be skipped. Default implementation returns ``False`` unless skip_unchanged == True. Override this method to handle skipping rows meeting certain conditions. Use ``super`` if you want to preserve default handling while overriding :: class YourResource(ModelResource): def skip_row(self, instance, original): # Add code here return super(YourResource, self).skip_row(instance, original) """ if not self._meta.skip_unchanged: return False for field in self.get_import_fields(): try: # For fields that are models.fields.related.ManyRelatedManager # we need to compare the results if list(field.get_value(instance).all()) != list(field.get_value(original).all()): return False except AttributeError: if field.get_value(instance) != field.get_value(original): return False return True
python
{ "resource": "" }
q24318
Resource.export
train
def export(self, queryset=None, *args, **kwargs): """ Exports a resource. """ self.before_export(queryset, *args, **kwargs) if queryset is None: queryset = self.get_queryset() headers = self.get_export_headers() data = tablib.Dataset(headers=headers) if isinstance(queryset, QuerySet): # Iterate without the queryset cache, to avoid wasting memory when # exporting large datasets. iterable = queryset.iterator() else: iterable = queryset for obj in iterable: data.append(self.export_resource(obj)) self.after_export(queryset, data, *args, **kwargs) return data
python
{ "resource": "" }
q24319
ModelResource.get_m2m_widget
train
def get_m2m_widget(cls, field): """ Prepare widget for m2m field """ return functools.partial( widgets.ManyToManyWidget, model=get_related_model(field))
python
{ "resource": "" }
q24320
ModelResource.get_fk_widget
train
def get_fk_widget(cls, field): """ Prepare widget for fk and o2o fields """ return functools.partial( widgets.ForeignKeyWidget, model=get_related_model(field))
python
{ "resource": "" }
q24321
ModelResource.widget_from_django_field
train
def widget_from_django_field(cls, f, default=widgets.Widget): """ Returns the widget that would likely be associated with each Django type. Includes mapping of Postgres Array and JSON fields. In the case that psycopg2 is not installed, we consume the error and process the field regardless. """ result = default internal_type = "" if callable(getattr(f, "get_internal_type", None)): internal_type = f.get_internal_type() if internal_type in cls.WIDGETS_MAP: result = cls.WIDGETS_MAP[internal_type] if isinstance(result, str): result = getattr(cls, result)(f) else: try: from django.contrib.postgres.fields import ArrayField, JSONField except ImportError: # ImportError: No module named psycopg2.extras class ArrayField: pass class JSONField: pass if isinstance(f, ArrayField): return widgets.SimpleArrayWidget elif isinstance(f, JSONField): return widgets.JSONWidget return result
python
{ "resource": "" }
q24322
ModelResource.widget_kwargs_for_field
train
def widget_kwargs_for_field(self, field_name): """ Returns widget kwargs for given field_name. """ if self._meta.widgets: return self._meta.widgets.get(field_name, {}) return {}
python
{ "resource": "" }
q24323
ModelResource.field_from_django_field
train
def field_from_django_field(cls, field_name, django_field, readonly): """ Returns a Resource Field instance for the given Django model field. """ FieldWidget = cls.widget_from_django_field(django_field) widget_kwargs = cls.widget_kwargs_for_field(field_name) field = cls.DEFAULT_RESOURCE_FIELD( attribute=field_name, column_name=field_name, widget=FieldWidget(**widget_kwargs), readonly=readonly, default=django_field.default, ) return field
python
{ "resource": "" }
q24324
ModelResource.after_import
train
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): """ Reset the SQL sequences after new objects are imported """ # Adapted from django's loaddata if not dry_run and any(r.import_type == RowResult.IMPORT_TYPE_NEW for r in result.rows): connection = connections[DEFAULT_DB_ALIAS] sequence_sql = connection.ops.sequence_reset_sql(no_style(), [self._meta.model]) if sequence_sql: cursor = connection.cursor() try: for line in sequence_sql: cursor.execute(line) finally: cursor.close()
python
{ "resource": "" }
q24325
Field.clean
train
def clean(self, data): """ Translates the value stored in the imported datasource to an appropriate Python object and returns it. """ try: value = data[self.column_name] except KeyError: raise KeyError("Column '%s' not found in dataset. Available " "columns are: %s" % (self.column_name, list(data))) # If ValueError is raised here, import_obj() will handle it value = self.widget.clean(value, row=data) if value in self.empty_values and self.default != NOT_PROVIDED: if callable(self.default): return self.default() return self.default return value
python
{ "resource": "" }
q24326
Field.get_value
train
def get_value(self, obj): """ Returns the value of the object's attribute. """ if self.attribute is None: return None attrs = self.attribute.split('__') value = obj for attr in attrs: try: value = getattr(value, attr, None) except (ValueError, ObjectDoesNotExist): # needs to have a primary key value before a many-to-many # relationship can be used. return None if value is None: return None # RelatedManager and ManyRelatedManager classes are callable in # Django >= 1.7 but we don't want to call them if callable(value) and not isinstance(value, Manager): value = value() return value
python
{ "resource": "" }
q24327
Field.export
train
def export(self, obj): """ Returns value from the provided object converted to export representation. """ value = self.get_value(obj) if value is None: return "" return self.widget.render(value, obj)
python
{ "resource": "" }
q24328
InvalidRow.field_specific_errors
train
def field_specific_errors(self): """Returns a dictionary of field-specific validation errors for this row.""" return { key: value for key, value in self.error_dict.items() if key != NON_FIELD_ERRORS }
python
{ "resource": "" }
q24329
InvalidRow.error_count
train
def error_count(self): """Returns the total number of validation errors for this row.""" count = 0 for error_list in self.error_dict.values(): count += len(error_list) return count
python
{ "resource": "" }
q24330
export_action_form_factory
train
def export_action_form_factory(formats): """ Returns an ActionForm subclass containing a ChoiceField populated with the given formats. """ class _ExportActionForm(ActionForm): """ Action form with export format ChoiceField. """ file_format = forms.ChoiceField( label=_('Format'), choices=formats, required=False) _ExportActionForm.__name__ = str('ExportActionForm') return _ExportActionForm
python
{ "resource": "" }
q24331
ForeignKeyWidget.get_queryset
train
def get_queryset(self, value, row, *args, **kwargs): """ Returns a queryset of all objects for this Model. Overwrite this method if you want to limit the pool of objects from which the related object is retrieved. :param value: The field's value in the datasource. :param row: The datasource's current row. As an example; if you'd like to have ForeignKeyWidget look up a Person by their pre- **and** lastname column, you could subclass the widget like so:: class FullNameForeignKeyWidget(ForeignKeyWidget): def get_queryset(self, value, row): return self.model.objects.filter( first_name__iexact=row["first_name"], last_name__iexact=row["last_name"] ) """ return self.model.objects.all()
python
{ "resource": "" }
q24332
get_config_dir
train
def get_config_dir(): """ Return tmuxp configuration directory. ``TMUXP_CONFIGDIR`` environmental variable has precedence if set. We also evaluate XDG default directory from XDG_CONFIG_HOME environmental variable if set or its default. Then the old default ~/.tmuxp is returned for compatibility. Returns ------- str : absolute path to tmuxp config directory """ paths = [] if 'TMUXP_CONFIGDIR' in os.environ: paths.append(os.environ['TMUXP_CONFIGDIR']) if 'XDG_CONFIG_HOME' in os.environ: paths.append(os.environ['XDG_CONFIG_HOME']) else: paths.append('~/.config/tmuxp/') paths.append('~/.tmuxp') for path in paths: path = os.path.expanduser(path) if os.path.isdir(path): return path # Return last path as default if none of the previous ones matched return path
python
{ "resource": "" }
q24333
get_tmuxinator_dir
train
def get_tmuxinator_dir(): """ Return tmuxinator configuration directory. Checks for ``TMUXINATOR_CONFIG`` environmental variable. Returns ------- str : absolute path to tmuxinator config directory See Also -------- :meth:`tmuxp.config.import_tmuxinator` """ if 'TMUXINATOR_CONFIG' in os.environ: return os.path.expanduser(os.environ['TMUXINATOR_CONFIG']) return os.path.expanduser('~/.tmuxinator/')
python
{ "resource": "" }
q24334
_validate_choices
train
def _validate_choices(options): """ Callback wrapper for validating click.prompt input. Parameters ---------- options : list List of allowed choices Returns ------- :func:`callable` callback function for value_proc in :func:`click.prompt`. Raises ------ :class:`click.BadParameter` """ def func(value): if value not in options: raise click.BadParameter( 'Possible choices are: {0}.'.format(', '.join(options)) ) return value return func
python
{ "resource": "" }
q24335
set_layout_hook
train
def set_layout_hook(session, hook_name): """Set layout hooks to normalize layout. References: - tmuxp issue: https://github.com/tmux-python/tmuxp/issues/309 - tmux issue: https://github.com/tmux/tmux/issues/1106 tmux 2.6+ requires that the window be viewed with the client before select-layout adjustments can take effect. To handle this, this function creates temporary hook for this session to iterate through all windows and select the layout. In order for layout changes to take effect, a client must at the very least be attached to the window (not just the session). hook_name is provided to allow this to set to multiple scenarios, such as 'client-attached' (which the user attaches the session). You may also want 'after-switch-client' for cases where the user loads tmuxp sessions inside tmux since tmuxp offers to switch for them. Also, the hooks are set immediately unbind after they're invoked via -u. Parameters ---------- session : :class:`libtmux.session.Session` session to bind hook to hook_name : str hook name to bind to, e.g. 'client-attached' """ cmd = ['set-hook', '-t', session.id, hook_name] hook_cmd = [] for window in session.windows: # unfortunately, select-layout won't work unless # we've literally selected the window at least once # with the client hook_cmd.append('selectw -t {}'.format(window.id)) # edit: removed -t, or else it won't respect main-pane-w/h hook_cmd.append('selectl'.format(window.id)) hook_cmd.append('selectw -p'.format(window.id)) # unset the hook immediately after executing hook_cmd.append( 'set-hook -u -t {target_session} {hook_name}'.format( target_session=session.id, hook_name=hook_name ) ) # join the hook's commands with semicolons hook_cmd = '{}'.format('; '.join(hook_cmd)) # append the hook command cmd.append(hook_cmd) # create the hook session.cmd(*cmd)
python
{ "resource": "" }
q24336
is_pure_name
train
def is_pure_name(path): """ Return True if path is a name and not a file path. Parameters ---------- path : str Path (can be absolute, relative, etc.) Returns ------- bool True if path is a name of config in config dir, not file path. """ return ( not os.path.isabs(path) and len(os.path.dirname(path)) == 0 and not os.path.splitext(path)[1] and path != '.' and path != '' )
python
{ "resource": "" }
q24337
scan_config
train
def scan_config(config, config_dir=None): """ Return the real config path or raise an exception. If config is directory, scan for .tmuxp.{yaml,yml,json} in directory. If one or more found, it will warn and pick the first. If config is ".", "./" or None, it will scan current directory. If config is has no path and only a filename, e.g. "myconfig.yaml" it will search config dir. If config has no path and only a name with no extension, e.g. "myconfig", it will scan for file name with yaml, yml and json. If multiple exist, it will warn and pick the first. Parameters ---------- config : str config file, valid examples: - a file name, myconfig.yaml - relative path, ../config.yaml or ../project - a period, . Raises ------ :class:`click.exceptions.FileError` """ if not config_dir: config_dir = get_config_dir() path = os.path exists, join, isabs = path.exists, path.join, path.isabs dirname, normpath, splitext = path.dirname, path.normpath, path.splitext cwd = os.getcwd() is_name = False file_error = None config = os.path.expanduser(config) # if purename, resolve to confg dir if is_pure_name(config): is_name = True elif ( not isabs(config) or len(dirname(config)) > 1 or config == '.' or config == "" or config == "./" ): # if relative, fill in full path config = normpath(join(cwd, config)) # no extension, scan if not splitext(config)[1]: if is_name: candidates = [ x for x in [ '%s%s' % (join(config_dir, config), ext) for ext in ['.yaml', '.yml', '.json'] ] if exists(x) ] if not len(candidates): file_error = ( 'config not found in config dir (yaml/yml/json) %s ' 'for name' % (config_dir) ) else: candidates = [ x for x in [ join(config, ext) for ext in ['.tmuxp.yaml', '.tmuxp.yml', '.tmuxp.json'] ] if exists(x) ] if len(candidates) > 1: click.secho( 'Multiple .tmuxp.{yml,yaml,json} configs in %s' % dirname(config), fg="red", ) click.echo( click.wrap_text( 'This is undefined behavior, use only one. ' 'Use file names e.g. myproject.json, coolproject.yaml. ' 'You can load them by filename.' ) ) elif not len(candidates): file_error = 'No tmuxp files found in directory' if len(candidates): config = candidates[0] elif not exists(config): file_error = 'file not found' if file_error: raise FileError(file_error, config) return config
python
{ "resource": "" }
q24338
load_workspace
train
def load_workspace( config_file, socket_name=None, socket_path=None, colors=None, detached=False, answer_yes=False, ): """ Load a tmux "workspace" session via tmuxp file. Parameters ---------- config_file : str absolute path to config file socket_name : str, optional ``tmux -L <socket-name>`` socket_path: str, optional ``tmux -S <socket-path>`` colors : str, optional '-2' Force tmux to support 256 colors detached : bool Force detached state. default False. answer_yes : bool Assume yes when given prompt. default False. Notes ----- tmuxp will check and load a configuration file. The file will use kaptan to load a JSON/YAML into a :py:obj:`dict`. Then :func:`config.expand` and :func:`config.trickle` will be used to expand any shorthands, template variables, or file paths relative to where the config/script is executed from. :func:`config.expand` accepts the directory of the config file, so the user's configuration can resolve absolute paths relative to where the config file is. In otherwords, if a config file at */var/moo/hi.yaml* has *./* in its configs, we want to be sure any file path with *./* is relative to */var/moo*, not the user's PWD. A :class:`libtmux.Server` object is created. No tmux server is started yet, just the object. The prepared configuration and the server object is passed into an instance of :class:`~tmuxp.workspacebuilder.WorkspaceBuilder`. A sanity check against :meth:`libtmux.common.which` is ran. It will raise an exception if tmux isn't found. If a tmux session under the same name as ``session_name`` in the tmuxp configuration exists, tmuxp offers to attach the session. Currently, tmuxp does not allow appending a workspace / incremental building on top of a current session (pull requests are welcome!). :meth:`~tmuxp.workspacebuilder.WorkspaceBuilder.build` will build the session in the background via using tmux's detached state (``-d``). After the session (workspace) is built, unless the user decided to load the session in the background via ``tmuxp -d`` (which is in the spirit of tmux's ``-d``), we need to prompt the user to attach the session. If the user is already inside a tmux client, which we detect via the ``TMUX`` environment variable bring present, we will prompt the user to switch their current client to it. If they're outside of tmux client - in a plain-old PTY - we will automatically ``attach``. If an exception is raised during the building of the workspace, tmuxp will prompt to cleanup (``$ tmux kill-session``) the session on the user's behalf. An exception raised during this process means it's not easy to predict how broken the session is. .. versionchanged:: tmux 2.6+ In tmux 2.6, the way layout and proportion's work when interfacing with tmux in a detached state (outside of a client) changed. Since tmuxp builds workspaces in a detached state, the WorkspaceBuilder isn't able to rely on functionality requiring awarness of session geometry, e.g. ``set-layout``. Thankfully, tmux is able to defer commands to run after the user performs certain actions, such as loading a client via ``attach-session`` or ``switch-client``. Upon client switch, ``client-session-changed`` is triggered [1]_. References ---------- .. [1] cmd-switch-client.c hook. GitHub repo for tmux. https://github.com/tmux/tmux/blob/2.6/cmd-switch-client.c#L132. Accessed April 8th, 2018. """ # get the canonical path, eliminating any symlinks config_file = os.path.realpath(config_file) # kaptan allows us to open a yaml or json file as a dict sconfig = kaptan.Kaptan() sconfig = sconfig.import_config(config_file).get() # shapes configurations relative to config / profile file location sconfig = config.expand(sconfig, os.path.dirname(config_file)) # propagate config inheritance (e.g. session -> window, window -> pane) sconfig = config.trickle(sconfig) t = Server( # create tmux server object socket_name=socket_name, socket_path=socket_path, colors=colors ) which('tmux') # raise exception if tmux not found try: # load WorkspaceBuilder object for tmuxp config / tmux server builder = WorkspaceBuilder(sconf=sconfig, server=t) except exc.EmptyConfigException: click.echo('%s is empty or parsed no config data' % config_file, err=True) return session_name = sconfig['session_name'] # if the session already exists, prompt the user to attach. tmuxp doesn't # support incremental session building or appending (yet, PR's welcome!) if builder.session_exists(session_name): if not detached and ( answer_yes or click.confirm( '%s is already running. Attach?' % click.style(session_name, fg='green'), default=True, ) ): _reattach(builder.session) return try: click.echo( click.style('[Loading] ', fg='green') + click.style(config_file, fg='blue', bold=True) ) builder.build() # load tmux session via workspace builder if 'TMUX' in os.environ: # tmuxp ran from inside tmux if not detached and ( answer_yes or click.confirm('Already inside TMUX, switch to session?') ): # unset TMUX, save it, e.g. '/tmp/tmux-1000/default,30668,0' tmux_env = os.environ.pop('TMUX') if has_gte_version('2.6'): set_layout_hook(builder.session, 'client-session-changed') builder.session.switch_client() # switch client to new session os.environ['TMUX'] = tmux_env # set TMUX back again return builder.session else: # session created in the background, from within tmux if has_gte_version('2.6'): # prepare for both cases set_layout_hook(builder.session, 'client-attached') set_layout_hook(builder.session, 'client-session-changed') sys.exit('Session created in detached state.') else: # tmuxp ran from inside tmux if has_gte_version('2.6'): # if attaching for first time set_layout_hook(builder.session, 'client-attached') # for cases where user switches client for first time set_layout_hook(builder.session, 'client-session-changed') if not detached: builder.session.attach_session() except exc.TmuxpException as e: import traceback click.echo(traceback.format_exc(), err=True) click.echo(e, err=True) choice = click.prompt( 'Error loading workspace. (k)ill, (a)ttach, (d)etach?', value_proc=_validate_choices(['k', 'a', 'd']), default='k', ) if choice == 'k': builder.session.kill_session() click.echo('Session killed.') elif choice == 'a': if 'TMUX' in os.environ: builder.session.switch_client() else: builder.session.attach_session() else: sys.exit() return builder.session
python
{ "resource": "" }
q24339
cli
train
def cli(log_level): """Manage tmux sessions. Pass the "--help" argument to any command to see detailed help. See detailed documentation and examples at: http://tmuxp.readthedocs.io/en/latest/""" try: has_minimum_version() except TmuxCommandNotFound: click.echo('tmux not found. tmuxp requires you install tmux first.') sys.exit() except exc.TmuxpException as e: click.echo(e, err=True) sys.exit() setup_logger(level=log_level.upper())
python
{ "resource": "" }
q24340
command_freeze
train
def command_freeze(session_name, socket_name, socket_path): """Snapshot a session into a config. If SESSION_NAME is provided, snapshot that session. Otherwise, use the current session.""" t = Server(socket_name=socket_name, socket_path=socket_path) try: session = t.find_where({'session_name': session_name}) if not session: raise exc.TmuxpException('Session not found.') except exc.TmuxpException as e: print(e) return sconf = freeze(session) configparser = kaptan.Kaptan() newconfig = config.inline(sconf) configparser.import_config(newconfig) config_format = click.prompt( 'Convert to', value_proc=_validate_choices(['yaml', 'json']), default='yaml' ) if config_format == 'yaml': newconfig = configparser.export( 'yaml', indent=2, default_flow_style=False, safe=True ) elif config_format == 'json': newconfig = configparser.export('json', indent=2) else: sys.exit('Unknown config format.') print(newconfig) print( '---------------------------------------------------------------' '\n' 'Freeze does it best to snapshot live tmux sessions.\n' ) if click.confirm( 'The new config *WILL* require adjusting afterwards. Save config?' ): dest = None while not dest: save_to = os.path.abspath( os.path.join( get_config_dir(), '%s.%s' % (sconf.get('session_name'), config_format), ) ) dest_prompt = click.prompt( 'Save to: %s' % save_to, value_proc=get_abs_path, default=save_to, confirmation_prompt=True, ) if os.path.exists(dest_prompt): print('%s exists. Pick a new filename.' % dest_prompt) continue dest = dest_prompt dest = os.path.abspath(os.path.relpath(os.path.expanduser(dest))) if click.confirm('Save to %s?' % dest): destdir = os.path.dirname(dest) if not os.path.isdir(destdir): os.makedirs(destdir) buf = open(dest, 'w') buf.write(newconfig) buf.close() print('Saved to %s.' % dest) else: print( 'tmuxp has examples in JSON and YAML format at ' '<http://tmuxp.readthedocs.io/en/latest/examples.html>\n' 'View tmuxp docs at <http://tmuxp.readthedocs.io/>.' ) sys.exit()
python
{ "resource": "" }
q24341
command_load
train
def command_load(ctx, config, socket_name, socket_path, answer_yes, detached, colors): """Load a tmux workspace from each CONFIG. CONFIG is a specifier for a configuration file. If CONFIG is a path to a directory, tmuxp will search it for ".tmuxp.{yaml,yml,json}". If CONFIG is has no directory component and only a filename, e.g. "myconfig.yaml", tmuxp will search the users's config directory for that file. If CONFIG has no directory component, and only a name with no extension, e.g. "myconfig", tmuxp will search the users's config directory for any file with the extension ".yaml", ".yml", or ".json" that matches that name. If multiple configuration files that match a given CONFIG are found, tmuxp will warn and pick the first one found. If multiple CONFIGs are provided, workspaces will be created for all of them. The last one provided will be attached. The others will be created in detached mode. """ util.oh_my_zsh_auto_title() tmux_options = { 'socket_name': socket_name, 'socket_path': socket_path, 'answer_yes': answer_yes, 'colors': colors, 'detached': detached, } if not config: click.echo("Enter at least one CONFIG") click.echo(ctx.get_help(), color=ctx.color) ctx.exit() if isinstance(config, string_types): load_workspace(config, **tmux_options) elif isinstance(config, tuple): config = list(config) # Load each configuration but the last to the background for cfg in config[:-1]: opt = tmux_options.copy() opt.update({'detached': True}) load_workspace(cfg, **opt) # todo: obey the -d in the cli args only if user specifies load_workspace(config[-1], **tmux_options)
python
{ "resource": "" }
q24342
command_convert
train
def command_convert(config): """Convert a tmuxp config between JSON and YAML.""" _, ext = os.path.splitext(config) if 'json' in ext: if click.confirm('convert to <%s> to yaml?' % config): configparser = kaptan.Kaptan() configparser.import_config(config) newfile = config.replace(ext, '.yaml') newconfig = configparser.export('yaml', indent=2, default_flow_style=False) if click.confirm('Save config to %s?' % newfile): buf = open(newfile, 'w') buf.write(newconfig) buf.close() print('New config saved to %s' % newfile) elif 'yaml' in ext: if click.confirm('convert to <%s> to json?' % config): configparser = kaptan.Kaptan() configparser.import_config(config) newfile = config.replace(ext, '.json') newconfig = configparser.export('json', indent=2) print(newconfig) if click.confirm('Save config to <%s>?' % newfile): buf = open(newfile, 'w') buf.write(newconfig) buf.close() print('New config saved to <%s>.' % newfile)
python
{ "resource": "" }
q24343
WorkspaceBuilder.build
train
def build(self, session=None): """ Build tmux workspace in session. Optionally accepts ``session`` to build with only session object. Without ``session``, it will use :class:`libmtux.Server` at ``self.server`` passed in on initialization to create a new Session object. Parameters ---------- session : :class:`libtmux.Session` session to build workspace in """ if not session: if not self.server: raise exc.TmuxpException( 'WorkspaceBuilder.build requires server to be passed ' + 'on initialization, or pass in session object to here.' ) if self.server.has_session(self.sconf['session_name']): self.session = self.server.find_where( {'session_name': self.sconf['session_name']} ) raise TmuxSessionExists( 'Session name %s is already running.' % self.sconf['session_name'] ) else: session = self.server.new_session( session_name=self.sconf['session_name'] ) assert self.sconf['session_name'] == session.name assert len(self.sconf['session_name']) > 0 self.session = session self.server = session.server self.server._list_sessions() assert self.server.has_session(session.name) assert session.id assert isinstance(session, Session) focus = None if 'before_script' in self.sconf: try: cwd = None # we want to run the before_script file cwd'd from the # session start directory, if it exists. if 'start_directory' in self.sconf: cwd = self.sconf['start_directory'] run_before_script(self.sconf['before_script'], cwd=cwd) except Exception as e: self.session.kill_session() raise e if 'options' in self.sconf: for option, value in self.sconf['options'].items(): self.session.set_option(option, value) if 'global_options' in self.sconf: for option, value in self.sconf['global_options'].items(): self.session.set_option(option, value, _global=True) if 'environment' in self.sconf: for option, value in self.sconf['environment'].items(): self.session.set_environment(option, value) for w, wconf in self.iter_create_windows(session): assert isinstance(w, Window) focus_pane = None for p, pconf in self.iter_create_panes(w, wconf): assert isinstance(p, Pane) p = p if 'layout' in wconf: w.select_layout(wconf['layout']) if 'focus' in pconf and pconf['focus']: focus_pane = p if 'focus' in wconf and wconf['focus']: focus = w self.config_after_window(w, wconf) if focus_pane: focus_pane.select_pane() if focus: focus.select_window()
python
{ "resource": "" }
q24344
WorkspaceBuilder.config_after_window
train
def config_after_window(self, w, wconf): """Actions to apply to window after window and pane finished. When building a tmux session, sometimes its easier to postpone things like setting options until after things are already structurally prepared. Parameters ---------- w : :class:`libtmux.Window` window to create panes for wconf : dict config section for window """ if 'options_after' in wconf and isinstance(wconf['options_after'], dict): for key, val in wconf['options_after'].items(): w.set_window_option(key, val)
python
{ "resource": "" }
q24345
validate_schema
train
def validate_schema(sconf): """ Return True if config schema is correct. Parameters ---------- sconf : dict session configuration Returns ------- bool """ # verify session_name if 'session_name' not in sconf: raise exc.ConfigError('config requires "session_name"') if 'windows' not in sconf: raise exc.ConfigError('config requires list of "windows"') for window in sconf['windows']: if 'window_name' not in window: raise exc.ConfigError('config window is missing "window_name"') if 'panes' not in window: raise exc.ConfigError( 'config window %s requires list of panes' % window['window_name'] ) return True
python
{ "resource": "" }
q24346
is_config_file
train
def is_config_file(filename, extensions=['.yml', '.yaml', '.json']): """ Return True if file has a valid config file type. Parameters ---------- filename : str filename to check (e.g. ``mysession.json``). extensions : str or list filetypes to check (e.g. ``['.yaml', '.json']``). Returns ------- bool """ extensions = [extensions] if isinstance(extensions, string_types) else extensions return any(filename.endswith(e) for e in extensions)
python
{ "resource": "" }
q24347
in_cwd
train
def in_cwd(): """ Return list of configs in current working directory. If filename is ``.tmuxp.py``, ``.tmuxp.json``, ``.tmuxp.yaml``. Returns ------- list configs in current working directory """ configs = [] for filename in os.listdir(os.getcwd()): if filename.startswith('.tmuxp') and is_config_file(filename): configs.append(filename) return configs
python
{ "resource": "" }
q24348
import_tmuxinator
train
def import_tmuxinator(sconf): """Return tmuxp config from a `tmuxinator`_ yaml config. .. _tmuxinator: https://github.com/aziz/tmuxinator Parameters ---------- sconf : dict python dict for session configuration. Returns ------- dict """ tmuxp_config = {} if 'project_name' in sconf: tmuxp_config['session_name'] = sconf.pop('project_name') elif 'name' in sconf: tmuxp_config['session_name'] = sconf.pop('name') else: tmuxp_config['session_name'] = None if 'project_root' in sconf: tmuxp_config['start_directory'] = sconf.pop('project_root') elif 'root' in sconf: tmuxp_config['start_directory'] = sconf.pop('root') if 'cli_args' in sconf: tmuxp_config['config'] = sconf['cli_args'] if '-f' in tmuxp_config['config']: tmuxp_config['config'] = tmuxp_config['config'].replace('-f', '').strip() elif 'tmux_options' in sconf: tmuxp_config['config'] = sconf['tmux_options'] if '-f' in tmuxp_config['config']: tmuxp_config['config'] = tmuxp_config['config'].replace('-f', '').strip() if 'socket_name' in sconf: tmuxp_config['socket_name'] = sconf['socket_name'] tmuxp_config['windows'] = [] if 'tabs' in sconf: sconf['windows'] = sconf.pop('tabs') if 'pre' in sconf and 'pre_window' in sconf: tmuxp_config['shell_command'] = sconf['pre'] if isinstance(sconf['pre'], string_types): tmuxp_config['shell_command_before'] = [sconf['pre_window']] else: tmuxp_config['shell_command_before'] = sconf['pre_window'] elif 'pre' in sconf: if isinstance(sconf['pre'], string_types): tmuxp_config['shell_command_before'] = [sconf['pre']] else: tmuxp_config['shell_command_before'] = sconf['pre'] if 'rbenv' in sconf: if 'shell_command_before' not in tmuxp_config: tmuxp_config['shell_command_before'] = [] tmuxp_config['shell_command_before'].append('rbenv shell %s' % sconf['rbenv']) for w in sconf['windows']: for k, v in w.items(): windowdict = {'window_name': k} if isinstance(v, string_types) or v is None: windowdict['panes'] = [v] tmuxp_config['windows'].append(windowdict) continue elif isinstance(v, list): windowdict['panes'] = v tmuxp_config['windows'].append(windowdict) continue if 'pre' in v: windowdict['shell_command_before'] = v['pre'] if 'panes' in v: windowdict['panes'] = v['panes'] if 'root' in v: windowdict['start_directory'] = v['root'] if 'layout' in v: windowdict['layout'] = v['layout'] tmuxp_config['windows'].append(windowdict) return tmuxp_config
python
{ "resource": "" }
q24349
import_teamocil
train
def import_teamocil(sconf): """Return tmuxp config from a `teamocil`_ yaml config. .. _teamocil: https://github.com/remiprev/teamocil Parameters ---------- sconf : dict python dict for session configuration Notes ----- Todos: - change 'root' to a cd or start_directory - width in pane -> main-pain-width - with_env_var - clear - cmd_separator """ tmuxp_config = {} if 'session' in sconf: sconf = sconf['session'] if 'name' in sconf: tmuxp_config['session_name'] = sconf['name'] else: tmuxp_config['session_name'] = None if 'root' in sconf: tmuxp_config['start_directory'] = sconf.pop('root') tmuxp_config['windows'] = [] for w in sconf['windows']: windowdict = {'window_name': w['name']} if 'clear' in w: windowdict['clear'] = w['clear'] if 'filters' in w: if 'before' in w['filters']: for b in w['filters']['before']: windowdict['shell_command_before'] = w['filters']['before'] if 'after' in w['filters']: for b in w['filters']['after']: windowdict['shell_command_after'] = w['filters']['after'] if 'root' in w: windowdict['start_directory'] = w.pop('root') if 'splits' in w: w['panes'] = w.pop('splits') if 'panes' in w: for p in w['panes']: if 'cmd' in p: p['shell_command'] = p.pop('cmd') if 'width' in p: # todo support for height/width p.pop('width') windowdict['panes'] = w['panes'] if 'layout' in w: windowdict['layout'] = w['layout'] tmuxp_config['windows'].append(windowdict) return tmuxp_config
python
{ "resource": "" }
q24350
oh_my_zsh_auto_title
train
def oh_my_zsh_auto_title(): """Give warning and offer to fix ``DISABLE_AUTO_TITLE``. see: https://github.com/robbyrussell/oh-my-zsh/pull/257 """ if 'SHELL' in os.environ and 'zsh' in os.environ.get('SHELL'): if os.path.exists(os.path.expanduser('~/.oh-my-zsh')): # oh-my-zsh exists if ( 'DISABLE_AUTO_TITLE' not in os.environ or os.environ.get('DISABLE_AUTO_TITLE') == "false" ): print( 'Please set:\n\n' '\texport DISABLE_AUTO_TITLE=\'true\'\n\n' 'in ~/.zshrc or where your zsh profile is stored.\n' 'Remember the "export" at the beginning!\n\n' 'Then create a new shell or type:\n\n' '\t$ source ~/.zshrc' )
python
{ "resource": "" }
q24351
setup
train
def setup(): """ Install handlers for Mitogen loggers to redirect them into the Ansible display framework. Ansible installs its own logging framework handlers when C.DEFAULT_LOG_PATH is set, therefore disable propagation for our handlers. """ l_mitogen = logging.getLogger('mitogen') l_mitogen_io = logging.getLogger('mitogen.io') l_ansible_mitogen = logging.getLogger('ansible_mitogen') for logger in l_mitogen, l_mitogen_io, l_ansible_mitogen: logger.handlers = [Handler(display.vvv)] logger.propagate = False if display.verbosity > 2: l_ansible_mitogen.setLevel(logging.DEBUG) l_mitogen.setLevel(logging.DEBUG) else: # Mitogen copies the active log level into new children, allowing them # to filter tiny messages before they hit the network, and therefore # before they wake the IO loop. Explicitly setting INFO saves ~4% # running against just the local machine. l_mitogen.setLevel(logging.ERROR) l_ansible_mitogen.setLevel(logging.ERROR) if display.verbosity > 3: l_mitogen_io.setLevel(logging.DEBUG)
python
{ "resource": "" }
q24352
Connection.get_default_env
train
def get_default_env(self): """ Vanilla Ansible local commands execute with an environment inherited from WorkerProcess, we must emulate that. """ return dict_diff( old=ansible_mitogen.process.MuxProcess.original_env, new=os.environ, )
python
{ "resource": "" }
q24353
_stdlib_paths
train
def _stdlib_paths(): """Return a set of paths from which Python imports the standard library. """ attr_candidates = [ 'prefix', 'real_prefix', # virtualenv: only set inside a virtual environment. 'base_prefix', # venv: always set, equal to prefix if outside. ] prefixes = (getattr(sys, a) for a in attr_candidates if hasattr(sys, a)) version = 'python%s.%s' % sys.version_info[0:2] return set(os.path.abspath(os.path.join(p, 'lib', version)) for p in prefixes)
python
{ "resource": "" }
q24354
get_child_modules
train
def get_child_modules(path): """Return the suffixes of submodules directly neated beneath of the package directory at `path`. :param str path: Path to the module's source code on disk, or some PEP-302-recognized equivalent. Usually this is the module's ``__file__`` attribute, but is specified explicitly to avoid loading the module. :return: List of submodule name suffixes. """ it = pkgutil.iter_modules([os.path.dirname(path)]) return [to_text(name) for _, name, _ in it]
python
{ "resource": "" }
q24355
scan_code_imports
train
def scan_code_imports(co): """ Given a code object `co`, scan its bytecode yielding any ``IMPORT_NAME`` and associated prior ``LOAD_CONST`` instructions representing an `Import` statement or `ImportFrom` statement. :return: Generator producing `(level, modname, namelist)` tuples, where: * `level`: -1 for normal import, 0, for absolute import, and >0 for relative import. * `modname`: Name of module to import, or from where `namelist` names are imported. * `namelist`: for `ImportFrom`, the list of names to be imported from `modname`. """ opit = iter_opcodes(co) opit, opit2, opit3 = itertools.tee(opit, 3) try: next(opit2) next(opit3) next(opit3) except StopIteration: return if sys.version_info >= (2, 5): for oparg1, oparg2, (op3, arg3) in izip(opit, opit2, opit3): if op3 == IMPORT_NAME: op2, arg2 = oparg2 op1, arg1 = oparg1 if op1 == op2 == LOAD_CONST: yield (co.co_consts[arg1], co.co_names[arg3], co.co_consts[arg2] or ()) else: # Python 2.4 did not yet have 'level', so stack format differs. for oparg1, (op2, arg2) in izip(opit, opit2): if op2 == IMPORT_NAME: op1, arg1 = oparg1 if op1 == LOAD_CONST: yield (-1, co.co_names[arg2], co.co_consts[arg1] or ())
python
{ "resource": "" }
q24356
ThreadWatcher._reset
train
def _reset(cls): """If we have forked since the watch dictionaries were initialized, all that has is garbage, so clear it.""" if os.getpid() != cls._cls_pid: cls._cls_pid = os.getpid() cls._cls_instances_by_target.clear() cls._cls_thread_by_target.clear()
python
{ "resource": "" }
q24357
ModuleFinder.add_source_override
train
def add_source_override(self, fullname, path, source, is_pkg): """ Explicitly install a source cache entry, preventing usual lookup methods from being used. Beware the value of `path` is critical when `is_pkg` is specified, since it directs where submodules are searched for. :param str fullname: Name of the module to override. :param str path: Module's path as it will appear in the cache. :param bytes source: Module source code as a bytestring. :param bool is_pkg: :data:`True` if the module is a package. """ self._found_cache[fullname] = (path, source, is_pkg)
python
{ "resource": "" }
q24358
ModuleFinder.get_module_source
train
def get_module_source(self, fullname): """Given the name of a loaded module `fullname`, attempt to find its source code. :returns: Tuple of `(module path, source text, is package?)`, or :data:`None` if the source cannot be found. """ tup = self._found_cache.get(fullname) if tup: return tup for method in self.get_module_methods: tup = method(self, fullname) if tup: #LOG.debug('%r returned %r', method, tup) break else: tup = None, None, None LOG.debug('get_module_source(%r): cannot find source', fullname) self._found_cache[fullname] = tup return tup
python
{ "resource": "" }
q24359
ModuleFinder.resolve_relpath
train
def resolve_relpath(self, fullname, level): """Given an ImportFrom AST node, guess the prefix that should be tacked on to an alias name to produce a canonical name. `fullname` is the name of the module in which the ImportFrom appears. """ mod = sys.modules.get(fullname, None) if hasattr(mod, '__path__'): fullname += '.__init__' if level == 0 or not fullname: return '' bits = fullname.split('.') if len(bits) <= level: # This would be an ImportError in real code. return '' return '.'.join(bits[:-level]) + '.'
python
{ "resource": "" }
q24360
ModuleFinder.find_related_imports
train
def find_related_imports(self, fullname): """ Return a list of non-stdlib modules that are directly imported by `fullname`, plus their parents. The list is determined by retrieving the source code of `fullname`, compiling it, and examining all IMPORT_NAME ops. :param fullname: Fully qualified name of an _already imported_ module for which source code can be retrieved :type fullname: str """ related = self._related_cache.get(fullname) if related is not None: return related modpath, src, _ = self.get_module_source(fullname) if src is None: return [] maybe_names = list(self.generate_parent_names(fullname)) co = compile(src, modpath, 'exec') for level, modname, namelist in scan_code_imports(co): if level == -1: modnames = [modname, '%s.%s' % (fullname, modname)] else: modnames = [ '%s%s' % (self.resolve_relpath(fullname, level), modname) ] maybe_names.extend(modnames) maybe_names.extend( '%s.%s' % (mname, name) for mname in modnames for name in namelist ) return self._related_cache.setdefault(fullname, sorted( set( mitogen.core.to_text(name) for name in maybe_names if sys.modules.get(name) is not None and not is_stdlib_name(name) and u'six.moves' not in name # TODO: crap ) ))
python
{ "resource": "" }
q24361
ModuleFinder.find_related
train
def find_related(self, fullname): """ Return a list of non-stdlib modules that are imported directly or indirectly by `fullname`, plus their parents. This method is like :py:meth:`find_related_imports`, but also recursively searches any modules which are imported by `fullname`. :param fullname: Fully qualified name of an _already imported_ module for which source code can be retrieved :type fullname: str """ stack = [fullname] found = set() while stack: name = stack.pop(0) names = self.find_related_imports(name) stack.extend(set(names).difference(set(found).union(stack))) found.update(names) found.discard(fullname) return sorted(found)
python
{ "resource": "" }
q24362
Router.get_stats
train
def get_stats(self): """ Return performance data for the module responder. :returns: Dict containing keys: * `get_module_count`: Integer count of :data:`mitogen.core.GET_MODULE` messages received. * `get_module_secs`: Floating point total seconds spent servicing :data:`mitogen.core.GET_MODULE` requests. * `good_load_module_count`: Integer count of successful :data:`mitogen.core.LOAD_MODULE` messages sent. * `good_load_module_size`: Integer total bytes sent in :data:`mitogen.core.LOAD_MODULE` message payloads. * `bad_load_module_count`: Integer count of negative :data:`mitogen.core.LOAD_MODULE` messages sent. * `minify_secs`: CPU seconds spent minifying modules marked minify-safe. """ return { 'get_module_count': self.responder.get_module_count, 'get_module_secs': self.responder.get_module_secs, 'good_load_module_count': self.responder.good_load_module_count, 'good_load_module_size': self.responder.good_load_module_size, 'bad_load_module_count': self.responder.bad_load_module_count, 'minify_secs': self.responder.minify_secs, }
python
{ "resource": "" }
q24363
Corker._cork_one
train
def _cork_one(self, s, obj): """ Construct a socketpair, saving one side of it, and passing the other to `obj` to be written to by one of its threads. """ rsock, wsock = mitogen.parent.create_socketpair(size=4096) mitogen.core.set_cloexec(rsock.fileno()) mitogen.core.set_cloexec(wsock.fileno()) mitogen.core.set_block(wsock) # gevent self._rsocks.append(rsock) obj.defer(self._do_cork, s, wsock)
python
{ "resource": "" }
q24364
Corker.cork
train
def cork(self): """ Arrange for any associated brokers and pools to be paused with no locks held. This will not return until each thread acknowledges it has ceased execution. """ s = mitogen.core.b('CORK') * ((128 // 4) * 1024) self._rsocks = [] # Pools must be paused first, as existing work may require the # participation of a broker in order to complete. for pool in self.pools: if not pool.closed: for x in range(pool.size): self._cork_one(s, pool) for broker in self.brokers: if broker._alive: self._cork_one(s, broker) # Pause until we can detect every thread has entered write(). for rsock in self._rsocks: self._verify_one(rsock)
python
{ "resource": "" }
q24365
get_fullname
train
def get_fullname(module): """ Reconstruct a Module's canonical path by recursing through its parents. """ bits = [str(module.name)] while module.parent: bits.append(str(module.parent.name)) module = module.parent return '.'.join(reversed(bits))
python
{ "resource": "" }
q24366
get_code
train
def get_code(module): """ Compile and return a Module's code object. """ fp = open(module.path) try: return compile(fp.read(), str(module.name), 'exec') finally: fp.close()
python
{ "resource": "" }
q24367
find
train
def find(name, path=(), parent=None): """ Return a Module instance describing the first matching module found on the search path. :param str name: Module name. :param list path: List of directory names to search for the module. :param Module parent: Optional module parent. """ assert isinstance(path, tuple) head, _, tail = name.partition('.') try: tup = imp.find_module(head, list(path)) except ImportError: return parent fp, modpath, (suffix, mode, kind) = tup if fp: fp.close() if parent and modpath == parent.path: # 'from timeout import timeout', where 'timeout' is a function but also # the name of the module being imported. return None if kind == imp.PKG_DIRECTORY: modpath = os.path.join(modpath, '__init__.py') module = Module(head, modpath, kind, parent) # TODO: this code is entirely wrong on Python 3.x, but works well enough # for Ansible. We need a new find_child() that only looks in the package # directory, never falling back to the parent search path. if tail and kind == imp.PKG_DIRECTORY: return find_relative(module, tail, path) return module
python
{ "resource": "" }
q24368
main
train
def main(router): """ Main program entry point. @mitogen.main() is just a helper to handle reliable setup/destruction of Broker, Router and the logging package. """ argv = sys.argv[1:] if not len(argv): print('mitop: Need a list of SSH hosts to connect to.') sys.exit(1) delay = 2.0 select = mitogen.select.Select(oneshot=False) hosts = [] # For each hostname on the command line, create a Host instance, a Mitogen # connection, a Receiver to accept messages from the host, and finally # start child_main() on the host to pump messages into the receiver. for hostname in argv: print('Starting on', hostname) host = Host() host.name = hostname if host.name == 'localhost': host.context = router.local() else: host.context = router.ssh(hostname=host.name) # A receiver wires up a handle (via Router.add_handler()) to an # internal thread-safe queue object, which can be drained through calls # to recv.get(). host.recv = mitogen.core.Receiver(router) host.recv.host = host # But we don't want to receive data from just one receiver, we want to # receive data from many. In this case we can use a Select(). It knows # how to efficiently sleep while waiting for the first message sent to # many receivers. select.add(host.recv) # The inverse of a Receiver is a Sender. Unlike receivers, senders are # serializable, so we can call the .to_sender() helper method to create # one equivalent to our host's receiver, and pass it directly to the # host as a function parameter. sender = host.recv.to_sender() # Finally invoke the function in the remote target. Since child_main() # is an infinite loop, using .call() would block the parent, since # child_main() never returns. Instead use .call_async(), which returns # another Receiver. We also want to wait for results from it -- # although child_main() never returns, if it crashes the exception will # be delivered instead. call_recv = host.context.call_async(child_main, sender, delay) call_recv.host = host # Adding call_recv to the select will cause mitogen.core.CallError to # be thrown by .get() if startup of any context fails, causing halt of # master_main(), and the exception to be printed. select.add(call_recv) hosts.append(host) # Painter just wraps up all the prehistory ncurses code and keeps it out of # master_main(). painter = Painter(hosts) try: try: master_main(painter, router, select, delay) except KeyboardInterrupt: # Shut down gracefully when the user presses CTRL+C. pass finally: painter.close()
python
{ "resource": "" }
q24369
key_from_dict
train
def key_from_dict(**kwargs): """ Return a unique string representation of a dict as quickly as possible. Used to generated deduplication keys from a request. """ out = [] stack = [kwargs] while stack: obj = stack.pop() if isinstance(obj, dict): stack.extend(sorted(obj.items())) elif isinstance(obj, (list, tuple)): stack.extend(obj) else: out.append(str(obj)) return ''.join(out)
python
{ "resource": "" }
q24370
ContextService.put
train
def put(self, context): """ Return a reference, making it eligable for recycling once its reference count reaches zero. """ LOG.debug('%r.put(%r)', self, context) self._lock.acquire() try: if self._refs_by_context.get(context, 0) == 0: LOG.warning('%r.put(%r): refcount was 0. shutdown_all called?', self, context) return self._refs_by_context[context] -= 1 finally: self._lock.release()
python
{ "resource": "" }
q24371
ContextService._produce_response
train
def _produce_response(self, key, response): """ Reply to every waiting request matching a configuration key with a response dictionary, deleting the list of waiters when done. :param str key: Result of :meth:`key_from_dict` :param dict response: Response dictionary :returns: Number of waiters that were replied to. """ self._lock.acquire() try: latches = self._latches_by_key.pop(key) count = len(latches) for latch in latches: latch.put(response) finally: self._lock.release() return count
python
{ "resource": "" }
q24372
ContextService._shutdown_unlocked
train
def _shutdown_unlocked(self, context, lru=None, new_context=None): """ Arrange for `context` to be shut down, and optionally add `new_context` to the LRU list while holding the lock. """ LOG.info('%r._shutdown_unlocked(): shutting down %r', self, context) context.shutdown() via = self._via_by_context.get(context) if via: lru = self._lru_by_via.get(via) if lru: if context in lru: lru.remove(context) if new_context: lru.append(new_context) self._forget_context_unlocked(context)
python
{ "resource": "" }
q24373
ContextService.dump
train
def dump(self): """ For testing, return a list of dicts describing every currently connected context. """ return [ { 'context_name': context.name, 'via': getattr(self._via_by_context.get(context), 'name', None), 'refs': self._refs_by_context.get(context), } for context, key in sorted(self._key_by_context.items(), key=lambda c_k: c_k[0].context_id) ]
python
{ "resource": "" }
q24374
ContextService.shutdown_all
train
def shutdown_all(self): """ For testing use, arrange for all connections to be shut down. """ self._lock.acquire() try: for context in list(self._key_by_context): self._shutdown_unlocked(context) finally: self._lock.release()
python
{ "resource": "" }
q24375
ContextService._on_context_disconnect
train
def _on_context_disconnect(self, context): """ Respond to Context disconnect event by deleting any record of the no longer reachable context. This method runs in the Broker thread and must not to block. """ self._lock.acquire() try: LOG.info('%r: Forgetting %r due to stream disconnect', self, context) self._forget_context_unlocked(context) finally: self._lock.release()
python
{ "resource": "" }
q24376
ContextService._connect
train
def _connect(self, key, spec, via=None): """ Actual connect implementation. Arranges for the Mitogen connection to be created and enqueues an asynchronous call to start the forked task parent in the remote context. :param key: Deduplication key representing the connection configuration. :param spec: Connection specification. :returns: Dict like:: { 'context': mitogen.core.Context or None, 'via': mitogen.core.Context or None, 'init_child_result': { 'fork_context': mitogen.core.Context, 'home_dir': str or None, }, 'msg': str or None } Where `context` is a reference to the newly constructed context, `init_child_result` is the result of executing :func:`ansible_mitogen.target.init_child` in that context, `msg` is an error message and the remaining fields are :data:`None`, or `msg` is :data:`None` and the remaining fields are set. """ try: method = getattr(self.router, spec['method']) except AttributeError: raise Error('unsupported method: %(transport)s' % spec) context = method(via=via, unidirectional=True, **spec['kwargs']) if via and spec.get('enable_lru'): self._update_lru(context, spec, via) # Forget the context when its disconnect event fires. mitogen.core.listen(context, 'disconnect', lambda: self._on_context_disconnect(context)) self._send_module_forwards(context) init_child_result = context.call( ansible_mitogen.target.init_child, log_level=LOG.getEffectiveLevel(), candidate_temp_dirs=self._get_candidate_temp_dirs(), ) if os.environ.get('MITOGEN_DUMP_THREAD_STACKS'): from mitogen import debug context.call(debug.dump_to_logger) self._key_by_context[context] = key self._refs_by_context[context] = 0 return { 'context': context, 'via': via, 'init_child_result': init_child_result, 'msg': None, }
python
{ "resource": "" }
q24377
ContextService.get
train
def get(self, msg, stack): """ Return a Context referring to an established connection with the given configuration, establishing new connections as necessary. :param list stack: Connection descriptions. Each element is a dict containing 'method' and 'kwargs' keys describing the Router method and arguments. Subsequent elements are proxied via the previous. :returns dict: * context: mitogen.parent.Context or None. * init_child_result: Result of :func:`init_child`. * msg: StreamError exception text or None. * method_name: string failing method name. """ via = None for spec in stack: try: result = self._wait_or_start(spec, via=via).get() if isinstance(result, tuple): # exc_info() reraise(*result) via = result['context'] except mitogen.core.ChannelError: return { 'context': None, 'init_child_result': None, 'method_name': spec['method'], 'msg': self.disconnect_msg, } except mitogen.core.StreamError as e: return { 'context': None, 'init_child_result': None, 'method_name': spec['method'], 'msg': str(e), } return result
python
{ "resource": "" }
q24378
Message._find_global
train
def _find_global(self, module, func): """Return the class implementing `module_name.class_name` or raise `StreamError` if the module is not whitelisted.""" if module == __name__: if func == '_unpickle_call_error' or func == 'CallError': return _unpickle_call_error elif func == '_unpickle_sender': return self._unpickle_sender elif func == '_unpickle_context': return self._unpickle_context elif func == 'Blob': return Blob elif func == 'Secret': return Secret elif func == 'Kwargs': return Kwargs elif module == '_codecs' and func == 'encode': return self._unpickle_bytes elif module == '__builtin__' and func == 'bytes': return BytesType raise StreamError('cannot unpickle %r/%r', module, func)
python
{ "resource": "" }
q24379
Message.dead
train
def dead(cls, reason=None, **kwargs): """ Syntax helper to construct a dead message. """ kwargs['data'], _ = UTF8_CODEC.encode(reason or u'') return cls(reply_to=IS_DEAD, **kwargs)
python
{ "resource": "" }
q24380
Sender.send
train
def send(self, data): """ Send `data` to the remote end. """ _vv and IOLOG.debug('%r.send(%r..)', self, repr(data)[:100]) self.context.send(Message.pickled(data, handle=self.dst_handle))
python
{ "resource": "" }
q24381
Receiver.get
train
def get(self, timeout=None, block=True, throw_dead=True): """ Sleep waiting for a message to arrive on this receiver. :param float timeout: If not :data:`None`, specifies a timeout in seconds. :raises mitogen.core.ChannelError: The remote end indicated the channel should be closed, communication with it was lost, or :meth:`close` was called in the local process. :raises mitogen.core.TimeoutError: Timeout was reached. :returns: :class:`Message` that was received. """ _vv and IOLOG.debug('%r.get(timeout=%r, block=%r)', self, timeout, block) try: msg = self._latch.get(timeout=timeout, block=block) except LatchError: raise ChannelError(self.closed_msg) if msg.is_dead and throw_dead: msg._throw_dead() return msg
python
{ "resource": "" }
q24382
Stream.on_transmit
train
def on_transmit(self, broker): """Transmit buffered messages.""" _vv and IOLOG.debug('%r.on_transmit()', self) if self._output_buf: buf = self._output_buf.popleft() written = self.transmit_side.write(buf) if not written: _v and LOG.debug('%r.on_transmit(): disconnection detected', self) self.on_disconnect(broker) return elif written != len(buf): self._output_buf.appendleft(BufferType(buf, written)) _vv and IOLOG.debug('%r.on_transmit() -> len %d', self, written) self._output_buf_len -= written if not self._output_buf: broker._stop_transmit(self)
python
{ "resource": "" }
q24383
Stream.send
train
def send(self, msg): """Send `data` to `handle`, and tell the broker we have output. May be called from any thread.""" self._router.broker.defer(self._send, msg)
python
{ "resource": "" }
q24384
Poller.stop_receive
train
def stop_receive(self, fd): """ Stop yielding readability events for `fd`. Redundant calls to :meth:`stop_receive` are silently ignored, this may change in future. """ self._rfds.pop(fd, None) self._update(fd)
python
{ "resource": "" }
q24385
Poller.stop_transmit
train
def stop_transmit(self, fd): """ Stop yielding writeability events for `fd`. Redundant calls to :meth:`stop_transmit` are silently ignored, this may change in future. """ self._wfds.pop(fd, None) self._update(fd)
python
{ "resource": "" }
q24386
Poller.poll
train
def poll(self, timeout=None): """ Block the calling thread until one or more FDs are ready for IO. :param float timeout: If not :data:`None`, seconds to wait without an event before returning an empty iterable. :returns: Iterable of `data` elements associated with ready FDs. """ _vv and IOLOG.debug('%r.poll(%r)', self, timeout) self._generation += 1 return self._poll(timeout)
python
{ "resource": "" }
q24387
Latch._on_fork
train
def _on_fork(cls): """ Clean up any files belonging to the parent process after a fork. """ cls._cls_idle_socketpairs = [] while cls._cls_all_sockets: cls._cls_all_sockets.pop().close()
python
{ "resource": "" }
q24388
Latch._get_socketpair
train
def _get_socketpair(self): """ Return an unused socketpair, creating one if none exist. """ try: return self._cls_idle_socketpairs.pop() # pop() must be atomic except IndexError: rsock, wsock = socket.socketpair() set_cloexec(rsock.fileno()) set_cloexec(wsock.fileno()) self._cls_all_sockets.extend((rsock, wsock)) return rsock, wsock
python
{ "resource": "" }
q24389
Latch._make_cookie
train
def _make_cookie(self): """ Return a string encoding the ID of the process, instance and thread. This disambiguates legitimate wake-ups, accidental writes to the FD, and buggy internal FD sharing. """ return struct.pack(self.COOKIE_FMT, self.COOKIE_MAGIC, os.getpid(), id(self), thread.get_ident())
python
{ "resource": "" }
q24390
Latch.get
train
def get(self, timeout=None, block=True): """ Return the next enqueued object, or sleep waiting for one. :param float timeout: If not :data:`None`, specifies a timeout in seconds. :param bool block: If :data:`False`, immediately raise :class:`mitogen.core.TimeoutError` if the latch is empty. :raises mitogen.core.LatchError: :meth:`close` has been called, and the object is no longer valid. :raises mitogen.core.TimeoutError: Timeout was reached. :returns: The de-queued object. """ _vv and IOLOG.debug('%r.get(timeout=%r, block=%r)', self, timeout, block) self._lock.acquire() try: if self.closed: raise LatchError() i = len(self._sleeping) if len(self._queue) > i: _vv and IOLOG.debug('%r.get() -> %r', self, self._queue[i]) return self._queue.pop(i) if not block: raise TimeoutError() rsock, wsock = self._get_socketpair() cookie = self._make_cookie() self._sleeping.append((wsock, cookie)) finally: self._lock.release() poller = self.poller_class() poller.start_receive(rsock.fileno()) try: return self._get_sleep(poller, timeout, block, rsock, wsock, cookie) finally: poller.close()
python
{ "resource": "" }
q24391
Latch.put
train
def put(self, obj=None): """ Enqueue an object, waking the first thread waiting for a result, if one exists. :param obj: Object to enqueue. Defaults to :data:`None` as a convenience when using :class:`Latch` only for synchronization. :raises mitogen.core.LatchError: :meth:`close` has been called, and the object is no longer valid. """ _vv and IOLOG.debug('%r.put(%r)', self, obj) self._lock.acquire() try: if self.closed: raise LatchError() self._queue.append(obj) if self._waking < len(self._sleeping): wsock, cookie = self._sleeping[self._waking] self._waking += 1 _vv and IOLOG.debug('%r.put() -> waking wfd=%r', self, wsock.fileno()) self._wake(wsock, cookie) elif self.notify: self.notify(self) finally: self._lock.release()
python
{ "resource": "" }
q24392
Waker.keep_alive
train
def keep_alive(self): """ Prevent immediate Broker shutdown while deferred functions remain. """ self._lock.acquire() try: return len(self._deferred) finally: self._lock.release()
python
{ "resource": "" }
q24393
Waker._wake
train
def _wake(self): """ Wake the multiplexer by writing a byte. If Broker is midway through teardown, the FD may already be closed, so ignore EBADF. """ try: self.transmit_side.write(b(' ')) except OSError: e = sys.exc_info()[1] if e.args[0] != errno.EBADF: raise
python
{ "resource": "" }
q24394
IoLogger.on_shutdown
train
def on_shutdown(self, broker): """Shut down the write end of the logging socket.""" _v and LOG.debug('%r.on_shutdown()', self) if not IS_WSL: # #333: WSL generates invalid readiness indication on shutdown() self._wsock.shutdown(socket.SHUT_WR) self._wsock.close() self.transmit_side.close()
python
{ "resource": "" }
q24395
Router.del_handler
train
def del_handler(self, handle): """ Remove the handle registered for `handle` :raises KeyError: The handle wasn't registered. """ _, _, _, respondent = self._handle_map.pop(handle) if respondent: self._handles_by_respondent[respondent].discard(handle)
python
{ "resource": "" }
q24396
Router._async_route
train
def _async_route(self, msg, in_stream=None): """ Arrange for `msg` to be forwarded towards its destination. If its destination is the local context, then arrange for it to be dispatched using the local handlers. This is a lower overhead version of :meth:`route` that may only be called from the :class:`Broker` thread. :param Stream in_stream: If not :data:`None`, the stream the message arrived on. Used for performing source route verification, to ensure sensitive messages such as ``CALL_FUNCTION`` arrive only from trusted contexts. """ _vv and IOLOG.debug('%r._async_route(%r, %r)', self, msg, in_stream) if len(msg.data) > self.max_message_size: self._maybe_send_dead(msg, self.too_large_msg % ( self.max_message_size, )) return # Perform source verification. if in_stream: parent = self._stream_by_id.get(mitogen.parent_id) expect = self._stream_by_id.get(msg.auth_id, parent) if in_stream != expect: LOG.error('%r: bad auth_id: got %r via %r, not %r: %r', self, msg.auth_id, in_stream, expect, msg) return if msg.src_id != msg.auth_id: expect = self._stream_by_id.get(msg.src_id, parent) if in_stream != expect: LOG.error('%r: bad src_id: got %r via %r, not %r: %r', self, msg.src_id, in_stream, expect, msg) return if in_stream.auth_id is not None: msg.auth_id = in_stream.auth_id # Maintain a set of IDs the source ever communicated with. in_stream.egress_ids.add(msg.dst_id) if msg.dst_id == mitogen.context_id: return self._invoke(msg, in_stream) out_stream = self._stream_by_id.get(msg.dst_id) if out_stream is None: out_stream = self._stream_by_id.get(mitogen.parent_id) if out_stream is None: self._maybe_send_dead(msg, self.no_route_msg, msg.dst_id, mitogen.context_id) return if in_stream and self.unidirectional and not \ (in_stream.is_privileged or out_stream.is_privileged): self._maybe_send_dead(msg, self.unidirectional_msg, in_stream.remote_id, out_stream.remote_id) return out_stream._send(msg)
python
{ "resource": "" }
q24397
Broker.shutdown
train
def shutdown(self): """ Request broker gracefully disconnect streams and stop. Safe to call from any thread. """ _v and LOG.debug('%r.shutdown()', self) def _shutdown(): self._alive = False if self._alive and not self._exitted: self.defer(_shutdown)
python
{ "resource": "" }
q24398
filter_debug
train
def filter_debug(stream, it): """ Read line chunks from it, either yielding them directly, or building up and logging individual lines if they look like SSH debug output. This contains the mess of dealing with both line-oriented input, and partial lines such as the password prompt. Yields `(line, partial)` tuples, where `line` is the line, `partial` is :data:`True` if no terminating newline character was present and no more data exists in the read buffer. Consuming code can use this to unreliably detect the presence of an interactive prompt. """ # The `partial` test is unreliable, but is only problematic when verbosity # is enabled: it's possible for a combination of SSH banner, password # prompt, verbose output, timing and OS buffering specifics to create a # situation where an otherwise newline-terminated line appears to not be # terminated, due to a partial read(). If something is broken when # ssh_debug_level>0, this is the first place to look. state = 'start_of_line' buf = b('') for chunk in it: buf += chunk while buf: if state == 'start_of_line': if len(buf) < 8: # short read near buffer limit, block awaiting at least 8 # bytes so we can discern a debug line, or the minimum # interesting token from above or the bootstrap # ('password', 'MITO000\n'). break elif any(buf.startswith(p) for p in DEBUG_PREFIXES): state = 'in_debug' else: state = 'in_plain' elif state == 'in_debug': if b('\n') not in buf: break line, _, buf = bytes_partition(buf, b('\n')) LOG.debug('%s: %s', stream.name, mitogen.core.to_text(line.rstrip())) state = 'start_of_line' elif state == 'in_plain': line, nl, buf = bytes_partition(buf, b('\n')) yield line + nl, not (nl or buf) if nl: state = 'start_of_line'
python
{ "resource": "" }
q24399
run
train
def run(dest, router, args, deadline=None, econtext=None): """ Run the command specified by `args` such that ``PATH`` searches for SSH by the command will cause its attempt to use SSH to execute a remote program to be redirected to use mitogen to execute that program using the context `dest` instead. :param list args: Argument vector. :param mitogen.core.Context dest: The destination context to execute the SSH command line in. :param mitogen.core.Router router: :param list[str] args: Command line arguments for local program, e.g. ``['rsync', '/tmp', 'remote:/tmp']`` :returns: Exit status of the child process. """ if econtext is not None: mitogen.parent.upgrade_router(econtext) context_id = router.allocate_id() fakessh = mitogen.parent.Context(router, context_id) fakessh.name = u'fakessh.%d' % (context_id,) sock1, sock2 = socket.socketpair() stream = mitogen.core.Stream(router, context_id) stream.name = u'fakessh' stream.accept(sock1.fileno(), sock1.fileno()) router.register(fakessh, stream) # Held in socket buffer until process is booted. fakessh.call_async(_fakessh_main, dest.context_id) tmp_path = tempfile.mkdtemp(prefix='mitogen_fakessh') try: ssh_path = os.path.join(tmp_path, 'ssh') fp = open(ssh_path, 'w') try: fp.write('#!%s\n' % (mitogen.parent.get_sys_executable(),)) fp.write(inspect.getsource(mitogen.core)) fp.write('\n') fp.write('ExternalContext(%r).main()\n' % ( _get_econtext_config(context, sock2), )) finally: fp.close() os.chmod(ssh_path, int('0755', 8)) env = os.environ.copy() env.update({ 'PATH': '%s:%s' % (tmp_path, env.get('PATH', '')), 'ARGV0': mitogen.parent.get_sys_executable(), 'SSH_PATH': ssh_path, }) proc = subprocess.Popen(args, env=env) return proc.wait() finally: shutil.rmtree(tmp_path)
python
{ "resource": "" }