_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q5900
array_has_real_numbers
train
def array_has_real_numbers(array): """ Uses the dtype kind of the numpy array to determine if it represents real numbers. That is, the array kind should be one of: i u f Possible dtype.kind values. b boolean i signed integer u unsigned integer f floating-point c complex floating-point m timedelta M datetime O object S (byte-)string U Unicode V void """ kind = array.dtype.kind assert kind in 'biufcmMOSUV', "Unexpected array kind: {}".format(kind) return kind in 'iuf'
python
{ "resource": "" }
q5901
createPenStyleCti
train
def createPenStyleCti(nodeName, defaultData=0, includeNone=False): """ Creates a ChoiceCti with Qt PenStyles. If includeEmtpy is True, the first option will be None. """ displayValues=PEN_STYLE_DISPLAY_VALUES configValues=PEN_STYLE_CONFIG_VALUES if includeNone: displayValues = [''] + list(displayValues) configValues = [None] + list(configValues) return ChoiceCti(nodeName, defaultData, displayValues=displayValues, configValues=configValues)
python
{ "resource": "" }
q5902
createPenWidthCti
train
def createPenWidthCti(nodeName, defaultData=1.0, zeroValueText=None): """ Creates a FloatCti with defaults for configuring a QPen width. If specialValueZero is set, this string will be displayed when 0.0 is selected. If specialValueZero is None, the minValue will be 0.1 """ # A pen line width of zero indicates a cosmetic pen. This means that the pen width is # always drawn one pixel wide, independent of the transformation set on the painter. # Note that line widths other than 1 may be slow when anti aliasing is on. return FloatCti(nodeName, defaultData=defaultData, specialValueText=zeroValueText, minValue=0.1 if zeroValueText is None else 0.0, maxValue=100, stepSize=0.1, decimals=1)
python
{ "resource": "" }
q5903
ColorCti._enforceDataType
train
def _enforceDataType(self, data): """ Converts to str so that this CTI always stores that type. """ qColor = QtGui.QColor(data) # TODO: store a RGB string? if not qColor.isValid(): raise ValueError("Invalid color specification: {!r}".format(data)) return qColor
python
{ "resource": "" }
q5904
ColorCti.createEditor
train
def createEditor(self, delegate, parent, option): """ Creates a ColorCtiEditor. For the parameters see the AbstractCti constructor documentation. """ return ColorCtiEditor(self, delegate, parent=parent)
python
{ "resource": "" }
q5905
ColorCtiEditor.getData
train
def getData(self): """ Gets data from the editor widget. """ text = self.lineEditor.text() if not text.startswith('#'): text = '#' + text validator = self.lineEditor.validator() if validator is not None: state, text, _ = validator.validate(text, 0) if state != QtGui.QValidator.Acceptable: raise InvalidInputError("Invalid input: {!r}".format(text)) return QtGui.QColor(text)
python
{ "resource": "" }
q5906
FontCti.data
train
def data(self, data): """ Sets the font data of this item. Does type conversion to ensure data is always of the correct type. Also updates the children (which is the reason for this property to be overloaded. """ self._data = self._enforceDataType(data) # Enforce self._data to be a QFont self.familyCti.data = fontFamilyIndex(self.data, list(self.familyCti.iterConfigValues)) self.pointSizeCti.data = self.data.pointSize() self.weightCti.data = fontWeightIndex(self.data, list(self.weightCti.iterConfigValues)) self.italicCti.data = self.data.italic()
python
{ "resource": "" }
q5907
FontCti.defaultData
train
def defaultData(self, defaultData): """ Sets the data of this item. Does type conversion to ensure default data is always of the correct type. """ self._defaultData = self._enforceDataType(defaultData) # Enforce to be a QFont self.familyCti.defaultData = fontFamilyIndex(self.defaultData, list(self.familyCti.iterConfigValues)) self.pointSizeCti.defaultData = self.defaultData.pointSize() self.weightCti.defaultData = self.defaultData.weight() self.italicCti.defaultData = self.defaultData.italic()
python
{ "resource": "" }
q5908
FontCti._updateTargetFromNode
train
def _updateTargetFromNode(self): """ Applies the font config settings to the target widget's font. That is the targetWidget.setFont() is called with a font create from the config values. """ font = self.data if self.familyCti.configValue: font.setFamily(self.familyCti.configValue) else: font.setFamily(QtGui.QFont().family()) # default family font.setPointSize(self.pointSizeCti.configValue) font.setWeight(self.weightCti.configValue) font.setItalic(self.italicCti.configValue) self._targetWidget.setFont(font)
python
{ "resource": "" }
q5909
FontCti.createEditor
train
def createEditor(self, delegate, parent, option): """ Creates a FontCtiEditor. For the parameters see the AbstractCti documentation. """ return FontCtiEditor(self, delegate, parent=parent)
python
{ "resource": "" }
q5910
PenCti.configValue
train
def configValue(self): """ Creates a QPen made of the children's config values. """ if not self.data: return None else: pen = QtGui.QPen() pen.setCosmetic(True) pen.setColor(self.colorCti.configValue) style = self.styleCti.configValue if style is not None: pen.setStyle(style) pen.setWidthF(self.widthCti.configValue) return pen
python
{ "resource": "" }
q5911
PenCti.createPen
train
def createPen(self, altStyle=None, altWidth=None): """ Creates a pen from the config values with the style overridden by altStyle if the None-option is selected in the combo box. """ pen = self.configValue if pen is not None: style = self.findByNodePath('style').configValue if style is None and altStyle is not None: pen.setStyle(altStyle) width = self.findByNodePath('width').configValue if width == 0.0 and altWidth is not None: #logger.debug("Setting altWidth = {!r}".format(altWidth)) pen.setWidthF(altWidth) return pen
python
{ "resource": "" }
q5912
maskedNanPercentile
train
def maskedNanPercentile(maskedArray, percentiles, *args, **kwargs): """ Calculates np.nanpercentile on the non-masked values """ #https://docs.scipy.org/doc/numpy/reference/maskedarray.generic.html#accessing-the-data awm = ArrayWithMask.createFromMaskedArray(maskedArray) maskIdx = awm.maskIndex() validData = awm.data[~maskIdx] if len(validData) >= 1: result = np.nanpercentile(validData, percentiles, *args, **kwargs) else: # If np.nanpercentile on an empty list only returns a single Nan. We correct this here. result = len(percentiles) * [np.nan] assert len(result) == len(percentiles), \ "shape mismatch: {} != {}".format(len(result), len(percentiles)) return result
python
{ "resource": "" }
q5913
ArrayWithMask.mask
train
def mask(self, mask): """ The mask values. Must be an array or a boolean scalar.""" check_class(mask, (np.ndarray, bool, np.bool_)) if isinstance(mask, (bool, np.bool_)): self._mask = bool(mask) else: self._mask = mask
python
{ "resource": "" }
q5914
ArrayWithMask.checkIsConsistent
train
def checkIsConsistent(self): """ Raises a ConsistencyError if the mask has an incorrect shape. """ if is_an_array(self.mask) and self.mask.shape != self.data.shape: raise ConsistencyError("Shape mismatch mask={}, data={}" .format(self.mask.shape != self.data.shape))
python
{ "resource": "" }
q5915
ArrayWithMask.createFromMaskedArray
train
def createFromMaskedArray(cls, masked_arr): """ Creates an ArrayWithMak :param masked_arr: a numpy MaskedArray or numpy array :return: ArrayWithMask """ if isinstance(masked_arr, ArrayWithMask): return masked_arr check_class(masked_arr, (np.ndarray, ma.MaskedArray)) # A MaskedConstant (i.e. masked) is a special case of MaskedArray. It does not seem to have # a fill_value so we use None to use the default. # https://docs.scipy.org/doc/numpy/reference/maskedarray.baseclass.html#numpy.ma.masked fill_value = getattr(masked_arr, 'fill_value', None) return cls(masked_arr.data, masked_arr.mask, fill_value)
python
{ "resource": "" }
q5916
ArrayWithMask.asMaskedArray
train
def asMaskedArray(self): """ Creates converts to a masked array """ return ma.masked_array(data=self.data, mask=self.mask, fill_value=self.fill_value)
python
{ "resource": "" }
q5917
ArrayWithMask.maskAt
train
def maskAt(self, index): """ Returns the mask at the index. It the mask is a boolean it is returned since this boolean representes the mask for all array elements. """ if isinstance(self.mask, bool): return self.mask else: return self.mask[index]
python
{ "resource": "" }
q5918
ArrayWithMask.maskIndex
train
def maskIndex(self): """ Returns a boolean index with True if the value is masked. Always has the same shape as the maksedArray.data, event if the mask is a single boolan. """ if isinstance(self.mask, bool): return np.full(self.data.shape, self.mask, dtype=np.bool) else: return self.mask
python
{ "resource": "" }
q5919
ArrayWithMask.transpose
train
def transpose(self, *args, **kwargs): """ Transposes the array and mask separately :param awm: ArrayWithMask :return: copy/view with transposed """ tdata = np.transpose(self.data, *args, **kwargs) tmask = np.transpose(self.mask, *args, **kwargs) if is_an_array(self.mask) else self.mask return ArrayWithMask(tdata, tmask, self.fill_value)
python
{ "resource": "" }
q5920
RegistryTab.importRegItem
train
def importRegItem(self, regItem): """ Imports the regItem Writes this in the statusLabel while the import is in progress. """ self.statusLabel.setText("Importing {}...".format(regItem.fullName)) QtWidgets.qApp.processEvents() regItem.tryImportClass() self.tableView.model().emitDataChanged(regItem) self.statusLabel.setText("") QtWidgets.qApp.processEvents()
python
{ "resource": "" }
q5921
RegistryTab.tryImportAllPlugins
train
def tryImportAllPlugins(self): """ Tries to import all underlying plugin classes """ for regItem in self.registeredItems: if not regItem.triedImport: self.importRegItem(regItem) logger.debug("Importing finished.")
python
{ "resource": "" }
q5922
RegistryTab.setCurrentRegItem
train
def setCurrentRegItem(self, regItem): """ Sets the current item to the regItem """ check_class(regItem, ClassRegItem, allow_none=True) self.tableView.setCurrentRegItem(regItem)
python
{ "resource": "" }
q5923
RegistryTab.currentItemChanged
train
def currentItemChanged(self, _currentIndex=None, _previousIndex=None): """ Updates the description text widget when the user clicks on a selector in the table. The _currentIndex and _previousIndex parameters are ignored. """ self.editor.clear() self.editor.setTextColor(QCOLOR_REGULAR) regItem = self.getCurrentRegItem() if regItem is None: return if self._importOnSelect and regItem.successfullyImported is None: self.importRegItem(regItem) if regItem.successfullyImported is None: self.editor.setTextColor(QCOLOR_NOT_IMPORTED) self.editor.setPlainText('<plugin not yet imported>') elif regItem.successfullyImported is False: self.editor.setTextColor(QCOLOR_ERROR) self.editor.setPlainText(str(regItem.exception)) elif regItem.descriptionHtml: self.editor.setHtml(regItem.descriptionHtml) else: self.editor.setPlainText(regItem.docString)
python
{ "resource": "" }
q5924
PluginsDialog.tryImportAllPlugins
train
def tryImportAllPlugins(self): """ Refreshes the tables of all tables by importing the underlying classes """ logger.debug("Importing plugins: {}".format(self)) for tabNr in range(self.tabWidget.count()): tab = self.tabWidget.widget(tabNr) tab.tryImportAllPlugins()
python
{ "resource": "" }
q5925
dimNamesFromDataset
train
def dimNamesFromDataset(h5Dataset): """ Constructs the dimension names given a h5py dataset. First looks in the dataset's dimension scales to see if it refers to another dataset. In that case the referred dataset's name is used. If not, the label of the dimension scale is used. Finally, if this is empty, the dimension is numbered. """ dimNames = [] # TODO: cache? for dimNr, dimScales in enumerate(h5Dataset.dims): if len(dimScales) == 0: dimNames.append('Dim{}'.format(dimNr)) elif len(dimScales) == 1: dimScaleLabel, dimScaleDataset = dimScales.items()[0] path = dimScaleDataset.name if path: dimNames.append(os.path.basename(path)) elif dimScaleLabel: # This could potentially be long so it's our second choice dimNames.append(dimScaleLabel) else: dimNames.append('Dim{}'.format(dimNr)) else: # TODO: multiple scales for this dimension. What to do? logger.warn("More than one dimension scale found: {!r}".format(dimScales)) dimNames.append('Dim{}'.format(dimNr)) # For now, just number them return dimNames
python
{ "resource": "" }
q5926
dataSetElementType
train
def dataSetElementType(h5Dataset): """ Returns a string describing the element type of the dataset """ dtype = h5Dataset.dtype if dtype.names: return '<structured>' else: if dtype.metadata and 'vlen' in dtype.metadata: vlen_type = dtype.metadata['vlen'] try: return "<vlen {}>".format(vlen_type.__name__) # when vlen_type is a type except AttributeError: # return "<vlen {}>".format(vlen_type.name) # when vlen_type is a dtype return str(dtype)
python
{ "resource": "" }
q5927
dataSetUnit
train
def dataSetUnit(h5Dataset): """ Returns the unit of the h5Dataset by looking in the attributes. It searches in the attributes for one of the following keys: 'unit', 'units', 'Unit', 'Units', 'UNIT', 'UNITS'. If these are not found, the empty string is returned. Always returns a string """ attributes = h5Dataset.attrs if not attributes: return '' # a premature optimization :-) for key in ('unit', 'units', 'Unit', 'Units', 'UNIT', 'UNITS'): if key in attributes: # In Python3 the attributes are byte strings so we must decode them # This a bug in h5py, see https://github.com/h5py/h5py/issues/379 return to_string(attributes[key]) # Not found return ''
python
{ "resource": "" }
q5928
dataSetMissingValue
train
def dataSetMissingValue(h5Dataset): """ Returns the missingData given a HDF-5 dataset Looks for one of the following attributes: _FillValue, missing_value, MissingValue, missingValue. Returns None if these attributes are not found. HDF-EOS and NetCDF files seem to put the attributes in 1-element arrays. So if the attribute contains an array of one element, that first element is returned here. """ attributes = h5Dataset.attrs if not attributes: return None # a premature optimization :-) for key in ('missing_value', 'MissingValue', 'missingValue', 'FillValue', '_FillValue'): if key in attributes: missingDataValue = attributes[key] if is_an_array(missingDataValue) and len(missingDataValue) == 1: return missingDataValue[0] # In case of HDF-EOS and NetCDF files else: return missingDataValue return None
python
{ "resource": "" }
q5929
H5pyFieldRti._subArrayShape
train
def _subArrayShape(self): """ Returns the shape of the sub-array An empty tuple is returned for regular fields, which have no sub array. """ if self._h5Dataset.dtype.fields is None: return tuple() # regular field else: fieldName = self.nodeName fieldDtype = self._h5Dataset.dtype.fields[fieldName][0] return fieldDtype.shape
python
{ "resource": "" }
q5930
H5pyFieldRti.unit
train
def unit(self): """ Returns the unit of the RTI by calling dataSetUnit on the underlying dataset """ unit = dataSetUnit(self._h5Dataset) fieldNames = self._h5Dataset.dtype.names # If the missing value attribute is a list with the same length as the number of fields, # return the missing value for field that equals the self.nodeName. if hasattr(unit, '__len__') and len(unit) == len(fieldNames): idx = fieldNames.index(self.nodeName) return unit[idx] else: return unit
python
{ "resource": "" }
q5931
H5pyDatasetRti.iconGlyph
train
def iconGlyph(self): """ Shows an Array icon for regular datasets but a dimension icon for dimension scales """ if self._h5Dataset.attrs.get('CLASS', None) == b'DIMENSION_SCALE': return RtiIconFactory.DIMENSION else: return RtiIconFactory.ARRAY
python
{ "resource": "" }
q5932
crossPlotAutoRangeMethods
train
def crossPlotAutoRangeMethods(pgImagePlot2d, crossPlot, intialItems=None): """ Creates an ordered dict with autorange methods for an PgImagePlot2d inspector. :param pgImagePlot2d: the range methods will work on (the sliced array) of this inspector. :param crossPlot: if None, the range will be calculated from the entire sliced array, if "horizontal" or "vertical" the range will be calculated from the data under the horizontal or vertical cross hairs :param intialItems: will be passed on to the OrderedDict constructor. """ rangeFunctions = OrderedDict({} if intialItems is None else intialItems) # If crossPlot is "horizontal" or "vertical" make functions that determine the range from the # data at the cross hair. if crossPlot: rangeFunctions['cross all data'] = partial(calcPgImagePlot2dDataRange, pgImagePlot2d, 0.0, crossPlot) for percentage in [0.1, 0.2, 0.5, 1, 2, 5, 10, 20]: label = "cross discard {}%".format(percentage) rangeFunctions[label] = partial(calcPgImagePlot2dDataRange, pgImagePlot2d, percentage, crossPlot) # Always add functions that determine the data from the complete sliced array. for percentage in [0.1, 0.2, 0.5, 1, 2, 5, 10, 20]: rangeFunctions['image all data'] = partial(calcPgImagePlot2dDataRange, pgImagePlot2d, 0.0, None) label = "image discard {}%".format(percentage) rangeFunctions[label] = partial(calcPgImagePlot2dDataRange, pgImagePlot2d, percentage, None) return rangeFunctions
python
{ "resource": "" }
q5933
PgImagePlot2dCti.setImagePlotAutoRangeOn
train
def setImagePlotAutoRangeOn(self, axisNumber): """ Sets the image plot's auto-range on for the axis with number axisNumber. :param axisNumber: 0 (X-axis), 1 (Y-axis), 2, (Both X and Y axes). """ setXYAxesAutoRangeOn(self, self.xAxisRangeCti, self.yAxisRangeCti, axisNumber)
python
{ "resource": "" }
q5934
PgImagePlot2dCti.setHorCrossPlotAutoRangeOn
train
def setHorCrossPlotAutoRangeOn(self, axisNumber): """ Sets the horizontal cross-hair plot's auto-range on for the axis with number axisNumber. :param axisNumber: 0 (X-axis), 1 (Y-axis), 2, (Both X and Y axes). """ setXYAxesAutoRangeOn(self, self.xAxisRangeCti, self.horCrossPlotRangeCti, axisNumber)
python
{ "resource": "" }
q5935
PgImagePlot2dCti.setVerCrossPlotAutoRangeOn
train
def setVerCrossPlotAutoRangeOn(self, axisNumber): """ Sets the vertical cross-hair plot's auto-range on for the axis with number axisNumber. :param axisNumber: 0 (X-axis), 1 (Y-axis), 2, (Both X and Y axes). """ setXYAxesAutoRangeOn(self, self.verCrossPlotRangeCti, self.yAxisRangeCti, axisNumber)
python
{ "resource": "" }
q5936
PgImagePlot2d._clearContents
train
def _clearContents(self): """ Clears the contents when no valid data is available """ logger.debug("Clearing inspector contents") self.titleLabel.setText('') # Don't clear the imagePlotItem, the imageItem is only added in the constructor. self.imageItem.clear() self.imagePlotItem.setLabel('left', '') self.imagePlotItem.setLabel('bottom', '') # Set the histogram range and levels to finite values to prevent futher errors if this # function was called after an exception in self.drawContents self.histLutItem.setHistogramRange(0, 100) self.histLutItem.setLevels(0, 100) self.crossPlotRow, self.crossPlotCol = None, None self.probeLabel.setText('') self.crossLineHorizontal.setVisible(False) self.crossLineVertical.setVisible(False) self.crossLineHorShadow.setVisible(False) self.crossLineVerShadow.setVisible(False) self.horCrossPlotItem.clear() self.verCrossPlotItem.clear()
python
{ "resource": "" }
q5937
RtiRegItem.asDict
train
def asDict(self): """ Returns a dictionary for serialization. """ dct = super(RtiRegItem, self).asDict() dct['extensions'] = self.extensions return dct
python
{ "resource": "" }
q5938
RtiRegistry._registerExtension
train
def _registerExtension(self, extension, rtiRegItem): """ Links an file name extension to a repository tree item. """ check_is_a_string(extension) check_class(rtiRegItem, RtiRegItem) logger.debug(" Registering extension {!r} for {}".format(extension, rtiRegItem)) # TODO: type checking if extension in self._extensionMap: logger.info("Overriding extension {!r}: old={}, new={}" .format(extension, self._extensionMap[extension], rtiRegItem)) self._extensionMap[extension] = rtiRegItem
python
{ "resource": "" }
q5939
RtiRegistry.registerRti
train
def registerRti(self, fullName, fullClassName, extensions=None, pythonPath=''): """ Class that maintains the collection of registered inspector classes. Maintains a lit of file extensions that open the RTI by default. """ check_is_a_sequence(extensions) extensions = extensions if extensions is not None else [] extensions = [prepend_point_to_extension(ext) for ext in extensions] regRti = RtiRegItem(fullName, fullClassName, extensions, pythonPath=pythonPath) self.registerItem(regRti)
python
{ "resource": "" }
q5940
RtiRegistry.getDefaultItems
train
def getDefaultItems(self): """ Returns a list with the default plugins in the repo tree item registry. """ return [ RtiRegItem('HDF-5 file', 'argos.repo.rtiplugins.hdf5.H5pyFileRti', extensions=['hdf5', 'h5', 'h5e', 'he5', 'nc']), # hdf extension is for HDF-4 RtiRegItem('MATLAB file', 'argos.repo.rtiplugins.scipyio.MatlabFileRti', extensions=['mat']), RtiRegItem('NetCDF file', 'argos.repo.rtiplugins.ncdf.NcdfFileRti', #extensions=['nc', 'nc3', 'nc4']), extensions=['nc', 'nc4']), #extensions=[]), RtiRegItem('NumPy binary file', 'argos.repo.rtiplugins.numpyio.NumpyBinaryFileRti', extensions=['npy']), RtiRegItem('NumPy compressed file', 'argos.repo.rtiplugins.numpyio.NumpyCompressedFileRti', extensions=['npz']), RtiRegItem('NumPy text file', 'argos.repo.rtiplugins.numpyio.NumpyTextFileRti', #extensions=['txt', 'text']), extensions=['dat']), RtiRegItem('IDL save file', 'argos.repo.rtiplugins.scipyio.IdlSaveFileRti', extensions=['sav']), RtiRegItem('Pandas CSV file', 'argos.repo.rtiplugins.pandasio.PandasCsvFileRti', extensions=['csv']), RtiRegItem('Pillow image', 'argos.repo.rtiplugins.pillowio.PillowFileRti', extensions=['bmp', 'eps', 'im', 'gif', 'jpg', 'jpeg', 'msp', 'pcx', 'png', 'ppm', 'spi', 'tif', 'tiff', 'xbm', 'xv']), RtiRegItem('Wav file', 'argos.repo.rtiplugins.scipyio.WavFileRti', extensions=['wav'])]
python
{ "resource": "" }
q5941
AboutDialog.addDependencyInfo
train
def addDependencyInfo(self): """ Adds version info about the installed dependencies """ logger.debug("Adding dependency info to the AboutDialog") self.progressLabel.setText("Retrieving package info...") self.editor.clear() self._addModuleInfo(mi.PythonModuleInfo()) self._addModuleInfo(mi.QtModuleInfo()) modules = ['numpy', 'scipy', 'pandas', 'pyqtgraph'] for module in modules: self._addModuleInfo(module) self._addModuleInfo(mi.PillowInfo()) self._addModuleInfo(mi.H5pyModuleInfo()) self._addModuleInfo(mi.NetCDF4ModuleInfo()) self.progressLabel.setText("") logger.debug("Finished adding dependency info to the AboutDialog")
python
{ "resource": "" }
q5942
FloatCti._enforceDataType
train
def _enforceDataType(self, data): """ Converts to float so that this CTI always stores that type. Replaces infinite with the maximum respresentable float. Raises a ValueError if data is a NaN. """ value = float(data) if math.isnan(value): raise ValueError("FloatCti can't store NaNs") if math.isinf(value): if value > 0: logger.warn("Replacing inf by the largest representable float") value = sys.float_info.max else: logger.warn("Replacing -inf by the smallest representable float") value = -sys.float_info.max return value
python
{ "resource": "" }
q5943
detectRtiFromFileName
train
def detectRtiFromFileName(fileName): """ Determines the type of RepoTreeItem to use given a file name. Uses a DirectoryRti for directories and an UnknownFileRti if the file extension doesn't match one of the registered RTI extensions. Returns (cls, regItem) tuple. Both the cls ond the regItem can be None. If the file is a directory, (DirectoryRti, None) is returned. If the file extension is not in the registry, (UnknownFileRti, None) is returned. If the cls cannot be imported (None, regItem) returned. regItem.exception will be set. Otherwise (cls, regItem) will be returned. """ _, extension = os.path.splitext(fileName) if os.path.isdir(fileName): rtiRegItem = None cls = DirectoryRti else: try: rtiRegItem = globalRtiRegistry().getRtiRegItemByExtension(extension) except (KeyError): logger.debug("No file RTI registered for extension: {}".format(extension)) rtiRegItem = None cls = UnknownFileRti else: cls = rtiRegItem.getClass(tryImport=True) # cls can be None return cls, rtiRegItem
python
{ "resource": "" }
q5944
createRtiFromFileName
train
def createRtiFromFileName(fileName): """ Determines the type of RepoTreeItem to use given a file name and creates it. Uses a DirectoryRti for directories and an UnknownFileRti if the file extension doesn't match one of the registered RTI extensions. """ cls, rtiRegItem = detectRtiFromFileName(fileName) if cls is None: logger.warn("Unable to import plugin {}: {}" .format(rtiRegItem.fullName, rtiRegItem.exception)) rti = UnknownFileRti.createFromFileName(fileName) rti.setException(rtiRegItem.exception) else: rti = cls.createFromFileName(fileName) assert rti, "Sanity check failed (createRtiFromFileName). Please report this bug." return rti
python
{ "resource": "" }
q5945
DirectoryRti._fetchAllChildren
train
def _fetchAllChildren(self): """ Gets all sub directories and files within the current directory. Does not fetch hidden files. """ childItems = [] fileNames = os.listdir(self._fileName) absFileNames = [os.path.join(self._fileName, fn) for fn in fileNames] # Add subdirectories for fileName, absFileName in zip(fileNames, absFileNames): if os.path.isdir(absFileName) and not fileName.startswith('.'): childItems.append(DirectoryRti(fileName=absFileName, nodeName=fileName)) # Add regular files for fileName, absFileName in zip(fileNames, absFileNames): if os.path.isfile(absFileName) and not fileName.startswith('.'): childItem = createRtiFromFileName(absFileName) childItems.append(childItem) return childItems
python
{ "resource": "" }
q5946
CollectorTree.resizeColumnsToContents
train
def resizeColumnsToContents(self, startCol=None, stopCol=None): """ Resizes all columns to the contents """ numCols = self.model().columnCount() startCol = 0 if startCol is None else max(startCol, 0) stopCol = numCols if stopCol is None else min(stopCol, numCols) row = 0 for col in range(startCol, stopCol): indexWidget = self.indexWidget(self.model().index(row, col)) if indexWidget: contentsWidth = indexWidget.sizeHint().width() else: contentsWidth = self.header().sectionSizeHint(col) self.header().resizeSection(col, contentsWidth)
python
{ "resource": "" }
q5947
CollectorSpinBox.sizeHint
train
def sizeHint(self): """ Reimplemented from the C++ Qt source of QAbstractSpinBox.sizeHint, but without truncating to a maximum of 18 characters. """ # The cache is invalid after the prefix, postfix and other properties # have been set. I disabled it because sizeHint isn't called that often. #if self._cachedSizeHint is not None: # return self._cachedSizeHint orgSizeHint = super(CollectorSpinBox, self).sizeHint() self.ensurePolished() d = self fm = QtGui.QFontMetrics(self.fontMetrics()) # This was h = d.edit.sizeHint().height(), but that didn't work. In the end we set the # height to the height calculated from the parent. h = orgSizeHint.height() w = 0 # QLatin1Char seems not to be implemented. # Using regular string literals and hope for the best s = d.prefix() + d.textFromValue(d.minimum()) + d.suffix() + ' ' # We disabled truncating the string here!! #s = s[:18] w = max(w, fm.width(s)) s = d.prefix() + d.textFromValue(d.maximum()) + d.suffix() + ' ' # We disabled truncating the string here!! #s = s[:18] w = max(w, fm.width(s)) if len(d.specialValueText()): s = d.specialValueText() w = max(w, fm.width(s)) w += 2 # cursor blinking space opt = QtWidgets.QStyleOptionSpinBox() self.initStyleOption(opt) hint = QtCore.QSize(w, h) extra = QtCore.QSize(35, 6) opt.rect.setSize(hint + extra) extra += hint - self.style().subControlRect(QtWidgets.QStyle.CC_SpinBox, opt, QtWidgets.QStyle.SC_SpinBoxEditField, self).size() # get closer to final result by repeating the calculation opt.rect.setSize(hint + extra) extra += hint - self.style().subControlRect(QtWidgets.QStyle.CC_SpinBox, opt, QtWidgets.QStyle.SC_SpinBoxEditField, self).size() hint += extra opt.rect = self.rect() result = (self.style().sizeFromContents(QtWidgets.QStyle.CT_SpinBox, opt, hint, self) .expandedTo(QtWidgets.QApplication.globalStrut())) self._cachedSizeHint = result # Use the height ancestor's sizeHint result.setHeight(orgSizeHint.height()) return result
python
{ "resource": "" }
q5948
ConfigItemDelegate.createEditor
train
def createEditor(self, parent, option, index): """ Returns the widget used to change data from the model and can be reimplemented to customize editing behavior. Reimplemented from QStyledItemDelegate. """ logger.debug("ConfigItemDelegate.createEditor, parent: {!r}".format(parent.objectName())) assert index.isValid(), "sanity check failed: invalid index" cti = index.model().getItem(index) editor = cti.createEditor(self, parent, option) return editor
python
{ "resource": "" }
q5949
ConfigItemDelegate.setEditorData
train
def setEditorData(self, editor, index): """ Provides the widget with data to manipulate. Calls the setEditorValue of the config tree item at the index. :type editor: QWidget :type index: QModelIndex Reimplemented from QStyledItemDelegate. """ # We take the config value via the model to be consistent with setModelData data = index.model().data(index, Qt.EditRole) editor.setData(data)
python
{ "resource": "" }
q5950
ConfigItemDelegate.setModelData
train
def setModelData(self, editor, model, index): """ Gets data from the editor widget and stores it in the specified model at the item index. Does this by calling getEditorValue of the config tree item at the index. :type editor: QWidget :type model: ConfigTreeModel :type index: QModelIndex Reimplemented from QStyledItemDelegate. """ try: data = editor.getData() except InvalidInputError as ex: logger.warn(ex) else: # The value is set via the model so that signals are emitted logger.debug("ConfigItemDelegate.setModelData: {}".format(data)) model.setData(index, data, Qt.EditRole)
python
{ "resource": "" }
q5951
ConfigItemDelegate.updateEditorGeometry
train
def updateEditorGeometry(self, editor, option, index): """ Ensures that the editor is displayed correctly with respect to the item view. """ cti = index.model().getItem(index) if cti.checkState is None: displayRect = option.rect else: checkBoxRect = widgetSubCheckBoxRect(editor, option) offset = checkBoxRect.x() + checkBoxRect.width() displayRect = option.rect displayRect.adjust(offset, 0, 0, 0) editor.setGeometry(displayRect)
python
{ "resource": "" }
q5952
ConfigTreeView.closeEditor
train
def closeEditor(self, editor, hint): """ Finalizes, closes and releases the given editor. """ # It would be nicer if this method was part of ConfigItemDelegate since createEditor also # lives there. However, QAbstractItemView.closeEditor is sometimes called directly, # without the QAbstractItemDelegate.closeEditor signal begin emitted, e.g when the # currentItem changes. Therefore we cannot connect the QAbstractItemDelegate.closeEditor # signal to a slot in the ConfigItemDelegate. configItemDelegate = self.itemDelegate() configItemDelegate.finalizeEditor(editor) super(ConfigTreeView, self).closeEditor(editor, hint)
python
{ "resource": "" }
q5953
ncVarAttributes
train
def ncVarAttributes(ncVar): """ Returns the attributes of ncdf variable """ try: return ncVar.__dict__ except Exception as ex: # Due to some internal error netCDF4 may raise an AttributeError or KeyError, # depending on its version. logger.warn("Unable to read the attributes from {}. Reason: {}" .format(ncVar.name, ex)) return {}
python
{ "resource": "" }
q5954
ncVarUnit
train
def ncVarUnit(ncVar): """ Returns the unit of the ncVar by looking in the attributes. It searches in the attributes for one of the following keys: 'unit', 'units', 'Unit', 'Units', 'UNIT', 'UNITS'. If these are not found, the empty string is returned. """ attributes = ncVarAttributes(ncVar) if not attributes: return '' # a premature optimization :-) for key in ('unit', 'units', 'Unit', 'Units', 'UNIT', 'UNITS'): if key in attributes: # In Python3 the attribures are byte strings so we must decode them # This a bug in h5py, see https://github.com/h5py/h5py/issues/379 return attributes[key] else: return ''
python
{ "resource": "" }
q5955
variableMissingValue
train
def variableMissingValue(ncVar): """ Returns the missingData given a NetCDF variable Looks for one of the following attributes: _FillValue, missing_value, MissingValue, missingValue. Returns None if these attributes are not found. """ attributes = ncVarAttributes(ncVar) if not attributes: return None # a premature optimization :-) for key in ('missing_value', 'MissingValue', 'missingValue', 'FillValue', '_FillValue'): if key in attributes: missingDataValue = attributes[key] return missingDataValue return None
python
{ "resource": "" }
q5956
NcdfFieldRti.unit
train
def unit(self): """ Returns the unit attribute of the underlying ncdf variable. If the units has a length (e.g is a list) and has precisely one element per field, the unit for this field is returned. """ unit = ncVarUnit(self._ncVar) fieldNames = self._ncVar.dtype.names # If the missing value attribute is a list with the same length as the number of fields, # return the missing value for field that equals the self.nodeName. if hasattr(unit, '__len__') and len(unit) == len(fieldNames): idx = fieldNames.index(self.nodeName) return unit[idx] else: return unit
python
{ "resource": "" }
q5957
WavFileRti._openResources
train
def _openResources(self): """ Uses numpy.loadtxt to open the underlying file. """ try: rate, data = scipy.io.wavfile.read(self._fileName, mmap=True) except Exception as ex: logger.warning(ex) logger.warning("Unable to read wav with memmory mapping. Trying without now.") rate, data = scipy.io.wavfile.read(self._fileName, mmap=False) self._array = data self.attributes['rate'] = rate
python
{ "resource": "" }
q5958
WavFileRti._fetchAllChildren
train
def _fetchAllChildren(self): """ Adds an ArrayRti per column as children so that they can be inspected easily """ childItems = [] if self._array.ndim == 2: _nRows, nCols = self._array.shape if self._array is not None else (0, 0) for col in range(nCols): colItem = SliceRti(self._array[:, col], nodeName="channel-{}".format(col), fileName=self.fileName, iconColor=self.iconColor, attributes=self.attributes) childItems.append(colItem) return childItems
python
{ "resource": "" }
q5959
middleMouseClickEvent
train
def middleMouseClickEvent(argosPgPlotItem, axisNumber, mouseClickEvent): """ Emits sigAxisReset when the middle mouse button is clicked on an axis of the the plot item. """ if mouseClickEvent.button() == QtCore.Qt.MiddleButton: mouseClickEvent.accept() argosPgPlotItem.emitResetAxisSignal(axisNumber)
python
{ "resource": "" }
q5960
ArgosPgPlotItem.close
train
def close(self): """ Is called before destruction. Can be used to clean-up resources Could be called 'finalize' but PlotItem already has a close so we reuse that. """ logger.debug("Finalizing: {}".format(self)) super(ArgosPgPlotItem, self).close()
python
{ "resource": "" }
q5961
ArgosPgPlotItem.contextMenuEvent
train
def contextMenuEvent(self, event): """ Shows the context menu at the cursor position We need to take the event-based approach because ArgosPgPlotItem does derives from QGraphicsWidget, and not from QWidget, and therefore doesn't have the customContextMenuRequested signal. """ contextMenu = QtWidgets.QMenu() for action in self.actions(): contextMenu.addAction(action) contextMenu.exec_(event.screenPos())
python
{ "resource": "" }
q5962
ArgosPgPlotItem.emitResetAxisSignal
train
def emitResetAxisSignal(self, axisNumber): """ Emits the sigResetAxis with the axisNumber as parameter axisNumber should be 0 for X, 1 for Y, and 2 for both axes. """ assert axisNumber in (VALID_AXES_NUMBERS), \ "Axis Nr should be one of {}, got {}".format(VALID_AXES_NUMBERS, axisNumber) # Hide 'auto-scale (A)' button logger.debug("ArgosPgPlotItem.autoBtnClicked, mode:{}".format(self.autoBtn.mode)) if self.autoBtn.mode == 'auto': self.autoBtn.hide() else: # Does this occur? msg = "Unexpected autobutton mode: {}".format(self.autoBtn.mode) if DEBUGGING: raise ValueError(msg) else: logger.warn(msg) logger.debug("Emitting sigAxisReset({}) for {!r}".format(axisNumber, self)) self.sigAxisReset.emit(axisNumber)
python
{ "resource": "" }
q5963
notebook_merge
train
def notebook_merge(local, base, remote, check_modified=False): """Unify three notebooks into a single notebook with merge metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the merge. Parameters ---------- local : dict The local branch's version of the notebook. base : dict The last common ancestor of local and remote. remote : dict The remote branch's version of the notebook. Returns ------- nb : A valid notebook containing merge metadata. """ local_cells = get_cells(local) base_cells = get_cells(base) remote_cells = get_cells(remote) rows = [] current_row = [] empty_cell = lambda: { 'cell_type': 'code', 'language': 'python', 'outputs': [], 'prompt_number': 1, 'text': ['Placeholder'], 'metadata': {'state': 'empty'} } diff_of_diffs = merge(local_cells, base_cells, remote_cells) # For each item in the higher-order diff, create a "row" that # corresponds to a row in the NBDiff interface. A row contains: # | LOCAL | BASE | REMOTE | for item in diff_of_diffs: state = item['state'] cell = copy.deepcopy(diff_result_to_cell(item['value'])) if state == 'deleted': # This change is between base and local branches. # It can be an addition or a deletion. if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue cell['metadata']['side'] = 'local' remote_cell = empty_cell() remote_cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': base_cell = copy.deepcopy(cell) else: base_cell = empty_cell() base_cell['metadata']['side'] = 'base' # This change is on the right. current_row = [ cell, base_cell, remote_cell, ] elif state == 'added': # This change is between base and remote branches. # It can be an addition or a deletion. cell['metadata']['side'] = 'remote' if cell['metadata']['state'] == 'unchanged': # This side doesn't have the change; wait # until we encounter the change to create the row. continue if cell['metadata']['state'] == 'deleted': base_cell = copy.deepcopy(cell) base_cell['metadata']['state'] = 'unchanged' local_cell = copy.deepcopy(cell) local_cell['metadata']['state'] = 'unchanged' else: base_cell = empty_cell() local_cell = empty_cell() base_cell['metadata']['side'] = 'base' local_cell['metadata']['side'] = 'local' current_row = [ local_cell, base_cell, cell, ] elif state == 'unchanged': # The same item occurs between base-local and base-remote. # This happens if both branches made the same change, whether # that is an addition or deletion. If neither branches # changed a given cell, that cell shows up here too. cell1 = copy.deepcopy(cell) cell3 = copy.deepcopy(cell) if cell['metadata']['state'] == 'deleted' \ or cell['metadata']['state'] == 'unchanged': # If the change is a deletion, the cell-to-be-deleted # should in the base as 'unchanged'. The user will # choose to make it deleted. cell2 = copy.deepcopy(cell) cell2['metadata']['state'] = 'unchanged' else: # If the change is an addition, it should not # show in the base; the user must add it to the merged version. cell2 = empty_cell() cell1['metadata']['side'] = 'local' cell2['metadata']['side'] = 'base' cell3['metadata']['side'] = 'remote' current_row = [ cell1, cell2, cell3, ] rows.append(current_row) # Chain all rows together; create a flat array from the nested array. # Use the base notebook's notebook-level metadata (title, version, etc.) result_notebook = local if len(result_notebook['worksheets']) == 0: result_notebook['worksheets'] = [nbformat.new_worksheet()] new_cell_array = list(it.chain.from_iterable(rows)) result_notebook['worksheets'][0]['cells'] = new_cell_array result_notebook['metadata']['nbdiff-type'] = 'merge' return result_notebook
python
{ "resource": "" }
q5964
NotebookParser.parse
train
def parse(self, json_data): """Parse a notebook .ipynb file. Parameters ---------- json_data : file A file handle for an .ipynb file. Returns ------- nb : An IPython Notebook data structure. """ data = current.read(json_data, 'ipynb') json_data.close() return data
python
{ "resource": "" }
q5965
notebook_diff
train
def notebook_diff(nb1, nb2, check_modified=True): """Unify two notebooks into a single notebook with diff metadata. The result of this function is a valid notebook that can be loaded by the IPython Notebook front-end. This function adds additional cell metadata that the front-end Javascript uses to render the diffs. Parameters ---------- nb1 : dict An IPython Notebook to use as the baseline version. nb2 : dict An IPython Notebook to compare against the baseline. check_modified : bool Whether or not to detect cell modification. Returns ------- nb : A valid notebook containing diff metadata. """ nb1_cells = nb1['worksheets'][0]['cells'] nb2_cells = nb2['worksheets'][0]['cells'] diffed_nb = cells_diff(nb1_cells, nb2_cells, check_modified=check_modified) line_diffs = diff_modified_items(diffed_nb) cell_list = list() for i, item in enumerate(diffed_nb): cell = diff_result_to_cell(item) if i in line_diffs: cell['metadata']['extra-diff-data'] = line_diffs[i] cell_list.append(cell) nb1['worksheets'][0]['cells'] = cell_list nb1['metadata']['nbdiff-type'] = 'diff' return nb1
python
{ "resource": "" }
q5966
diff_result_to_cell
train
def diff_result_to_cell(item): '''diff.diff returns a dictionary with all the information we need, but we want to extract the cell and change its metadata.''' state = item['state'] if state == 'modified': new_cell = item['modifiedvalue'].data old_cell = item['originalvalue'].data new_cell['metadata']['state'] = state new_cell['metadata']['original'] = old_cell cell = new_cell else: cell = item['value'].data cell['metadata']['state'] = state return cell
python
{ "resource": "" }
q5967
cells_diff
train
def cells_diff(before_cells, after_cells, check_modified=False): '''Diff two arrays of cells.''' before_comps = [ CellComparator(cell, check_modified=check_modified) for cell in before_cells ] after_comps = [ CellComparator(cell, check_modified=check_modified) for cell in after_cells ] diff_result = diff( before_comps, after_comps, check_modified=check_modified ) return diff_result
python
{ "resource": "" }
q5968
words_diff
train
def words_diff(before_words, after_words): '''Diff the words in two strings. This is intended for use in diffing prose and other forms of text where line breaks have little semantic value. Parameters ---------- before_words : str A string to be used as the baseline version. after_words : str A string to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information. ''' before_comps = before_words.split() after_comps = after_words.split() diff_result = diff( before_comps, after_comps ) return diff_result
python
{ "resource": "" }
q5969
lines_diff
train
def lines_diff(before_lines, after_lines, check_modified=False): '''Diff the lines in two strings. Parameters ---------- before_lines : iterable Iterable containing lines used as the baseline version. after_lines : iterable Iterable containing lines to be compared against the baseline. Returns ------- diff_result : A list of dictionaries containing diff information. ''' before_comps = [ LineComparator(line, check_modified=check_modified) for line in before_lines ] after_comps = [ LineComparator(line, check_modified=check_modified) for line in after_lines ] diff_result = diff( before_comps, after_comps, check_modified=check_modified ) return diff_result
python
{ "resource": "" }
q5970
diff
train
def diff(before, after, check_modified=False): """Diff two sequences of comparable objects. The result of this function is a list of dictionaries containing values in ``before`` or ``after`` with a ``state`` of either 'unchanged', 'added', 'deleted', or 'modified'. >>> import pprint >>> result = diff(['a', 'b', 'c'], ['b', 'c', 'd']) >>> pprint.pprint(result) [{'state': 'deleted', 'value': 'a'}, {'state': 'unchanged', 'value': 'b'}, {'state': 'unchanged', 'value': 'c'}, {'state': 'added', 'value': 'd'}] Parameters ---------- before : iterable An iterable containing values to be used as the baseline version. after : iterable An iterable containing values to be compared against the baseline. check_modified : bool Whether or not to check for modifiedness. Returns ------- diff_items : A list of dictionaries containing diff information. """ # The grid will be empty if `before` or `after` are # empty; this will violate the assumptions made in the rest # of this function. # If this is the case, we know what the result of the diff is # anyways: the contents of the other, non-empty input. if len(before) == 0: return [ {'state': 'added', 'value': v} for v in after ] elif len(after) == 0: return [ {'state': 'deleted', 'value': v} for v in before ] grid = create_grid(before, after) nrows = len(grid[0]) ncols = len(grid) dps = diff_points(grid) result = [] for kind, col, row in dps: if kind == 'unchanged': value = before[col] result.append({ 'state': kind, 'value': value, }) elif kind == 'deleted': assert col < ncols value = before[col] result.append({ 'state': kind, 'value': value, }) elif kind == 'added': assert row < nrows value = after[row] result.append({ 'state': kind, 'value': value, }) elif check_modified and kind == 'modified': result.append({ 'state': kind, 'originalvalue': before[col], 'modifiedvalue': after[row], }) elif (not check_modified) and kind == 'modified': result.append({ 'state': 'deleted', 'value': before[col], }) result.append({ 'state': 'added', 'value': after[row], }) else: raise Exception('We should not be here.') return result
python
{ "resource": "" }
q5971
run_application
train
def run_application(component: Union[Component, Dict[str, Any]], *, event_loop_policy: str = None, max_threads: int = None, logging: Union[Dict[str, Any], int, None] = INFO, start_timeout: Union[int, float, None] = 10): """ Configure logging and start the given root component in the default asyncio event loop. Assuming the root component was started successfully, the event loop will continue running until the process is terminated. Initializes the logging system first based on the value of ``logging``: * If the value is a dictionary, it is passed to :func:`logging.config.dictConfig` as argument. * If the value is an integer, it is passed to :func:`logging.basicConfig` as the logging level. * If the value is ``None``, logging setup is skipped entirely. By default, the logging system is initialized using :func:`~logging.basicConfig` using the ``INFO`` logging level. The default executor in the event loop is replaced with a new :class:`~concurrent.futures.ThreadPoolExecutor` where the maximum number of threads is set to the value of ``max_threads`` or, if omitted, the default value of :class:`~concurrent.futures.ThreadPoolExecutor`. :param component: the root component (either a component instance or a configuration dictionary where the special ``type`` key is either a component class or a ``module:varname`` reference to one) :param event_loop_policy: entry point name (from the ``asphalt.core.event_loop_policies`` namespace) of an alternate event loop policy (or a module:varname reference to one) :param max_threads: the maximum number of worker threads in the default thread pool executor (the default value depends on the event loop implementation) :param logging: a logging configuration dictionary, :ref:`logging level <python:levels>` or ``None`` :param start_timeout: seconds to wait for the root component (and its subcomponents) to start up before giving up (``None`` = wait forever) """ assert check_argument_types() # Configure the logging system if isinstance(logging, dict): dictConfig(logging) elif isinstance(logging, int): basicConfig(level=logging) # Inform the user whether -O or PYTHONOPTIMIZE was set when Python was launched logger = getLogger(__name__) logger.info('Running in %s mode', 'development' if __debug__ else 'production') # Switch to an alternate event loop policy if one was provided if event_loop_policy: create_policy = policies.resolve(event_loop_policy) policy = create_policy() asyncio.set_event_loop_policy(policy) logger.info('Switched event loop policy to %s', qualified_name(policy)) # Assign a new default executor with the given max worker thread limit if one was provided event_loop = asyncio.get_event_loop() if max_threads is not None: event_loop.set_default_executor(ThreadPoolExecutor(max_threads)) logger.info('Installed a new thread pool executor with max_workers=%d', max_threads) # Instantiate the root component if a dict was given if isinstance(component, dict): component = cast(Component, component_types.create_object(**component)) logger.info('Starting application') context = Context() exception = None # type: Optional[BaseException] exit_code = 0 # Start the root component try: coro = asyncio.wait_for(component.start(context), start_timeout, loop=event_loop) event_loop.run_until_complete(coro) except asyncio.TimeoutError as e: exception = e logger.error('Timeout waiting for the root component to start') exit_code = 1 except Exception as e: exception = e logger.exception('Error during application startup') exit_code = 1 else: logger.info('Application started') # Add a signal handler to gracefully deal with SIGTERM try: event_loop.add_signal_handler(signal.SIGTERM, sigterm_handler, logger, event_loop) except NotImplementedError: pass # Windows does not support signals very well # Finally, run the event loop until the process is terminated or Ctrl+C is pressed try: event_loop.run_forever() except KeyboardInterrupt: pass except SystemExit as e: exit_code = e.code # Close the root context logger.info('Stopping application') event_loop.run_until_complete(context.close(exception)) # Shut down leftover async generators (requires Python 3.6+) try: event_loop.run_until_complete(event_loop.shutdown_asyncgens()) except (AttributeError, NotImplementedError): pass # Finally, close the event loop itself event_loop.close() logger.info('Application stopped') # Shut down the logging system shutdown() if exit_code: sys.exit(exit_code)
python
{ "resource": "" }
q5972
executor
train
def executor(func_or_executor: Union[Executor, str, Callable[..., T_Retval]]) -> \ Union[WrappedCallable, Callable[..., WrappedCallable]]: """ Decorate a function to run in an executor. If no executor (or ``None``) is given, the current event loop's default executor is used. Otherwise, the argument must be a PEP 3148 compliant thread pool executor or the name of an :class:`~concurrent.futures.Executor` instance. If a decorated callable is called in a worker thread, the executor argument is ignored and the wrapped function is called directly. Callables wrapped with this decorator must be used with ``await`` when called in the event loop thread. Example use with the default executor (``None``):: @executor def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) With a named :class:`~concurrent.futures.Executor` resource:: @executor('special_ops') def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) :param func_or_executor: either a callable (when used as a decorator), an executor instance or the name of an :class:`~concurrent.futures.Executor` resource """ def outer(func: Callable[..., T_Retval], executor: Union[Executor, str] = None) -> Callable[..., Awaitable[T_Retval]]: def wrapper(*args, **kwargs): try: loop = get_event_loop() except RuntimeError: # Event loop not available -- we're in a worker thread return func(*args, **kwargs) # Resolve the executor resource name to an Executor instance if isinstance(executor, str): try: ctx = next(obj for obj in args[:2] if isinstance(obj, Context)) except StopIteration: raise RuntimeError('the callable needs to be called with a Context as the ' 'first or second positional argument') _executor = ctx.require_resource(Executor, executor) else: _executor = executor callback = partial(func, *args, **kwargs) return loop.run_in_executor(_executor, callback) assert check_argument_types() assert not inspect.iscoroutinefunction(func), \ 'Cannot wrap coroutine functions to be run in an executor' return wraps(func)(wrapper) if isinstance(func_or_executor, (str, Executor)): return partial(outer, executor=func_or_executor) else: return outer(func_or_executor)
python
{ "resource": "" }
q5973
merge_config
train
def merge_config(original: Optional[Dict[str, Any]], overrides: Optional[Dict[str, Any]]) -> Dict[str, Any]: """ Return a copy of the ``original`` configuration dictionary, with overrides from ``overrides`` applied. This similar to what :meth:`dict.update` does, but when a dictionary is about to be replaced with another dictionary, it instead merges the contents. If a key in ``overrides`` is a dotted path (ie. ``foo.bar.baz: value``), it is assumed to be a shorthand for ``foo: {bar: {baz: value}}``. :param original: a configuration dictionary (or ``None``) :param overrides: a dictionary containing overriding values to the configuration (or ``None``) :return: the merge result """ assert check_argument_types() copied = original.copy() if original else {} if overrides: for key, value in overrides.items(): if '.' in key: key, rest = key.split('.', 1) value = {rest: value} orig_value = copied.get(key) if isinstance(orig_value, dict) and isinstance(value, dict): copied[key] = merge_config(orig_value, value) else: copied[key] = value return copied
python
{ "resource": "" }
q5974
PluginContainer.resolve
train
def resolve(self, obj): """ Resolve a reference to an entry point or a variable in a module. If ``obj`` is a ``module:varname`` reference to an object, :func:`resolve_reference` is used to resolve it. If it is a string of any other kind, the named entry point is loaded from this container's namespace. Otherwise, ``obj`` is returned as is. :param obj: an entry point identifier, an object reference or an arbitrary object :return: the loaded entry point, resolved object or the unchanged input value :raises LookupError: if ``obj`` was a string but the named entry point was not found """ if not isinstance(obj, str): return obj if ':' in obj: return resolve_reference(obj) value = self._entrypoints.get(obj) if value is None: raise LookupError('no such entry point in {}: {}'.format(self.namespace, obj)) if isinstance(value, EntryPoint): value = self._entrypoints[obj] = value.load() return value
python
{ "resource": "" }
q5975
PluginContainer.create_object
train
def create_object(self, type: Union[type, str], **constructor_kwargs): """ Instantiate a plugin. The entry points in this namespace must point to subclasses of the ``base_class`` parameter passed to this container. :param type: an entry point identifier, a ``module:varname`` reference to a class, or an actual class object :param constructor_kwargs: keyword arguments passed to the constructor of the plugin class :return: the plugin instance """ assert check_argument_types() assert self.base_class, 'base class has not been defined' plugin_class = self.resolve(type) if not issubclass(plugin_class, self.base_class): raise TypeError('{} is not a subclass of {}'.format( qualified_name(plugin_class), qualified_name(self.base_class))) return plugin_class(**constructor_kwargs)
python
{ "resource": "" }
q5976
ContainerComponent.add_component
train
def add_component(self, alias: str, type: Union[str, type] = None, **config): """ Add a child component. This will instantiate a component class, as specified by the ``type`` argument. If the second argument is omitted, the value of ``alias`` is used as its value. The locally given configuration can be overridden by component configuration parameters supplied to the constructor (via the ``components`` argument). When configuration values are provided both as keyword arguments to this method and component configuration through the ``components`` constructor argument, the configurations are merged together using :func:`~asphalt.core.util.merge_config` in a way that the configuration values from the ``components`` argument override the keyword arguments to this method. :param alias: a name for the component instance, unique within this container :param type: entry point name or :class:`Component` subclass or a ``module:varname`` reference to one :param config: keyword arguments passed to the component's constructor """ assert check_argument_types() if not isinstance(alias, str) or not alias: raise TypeError('component_alias must be a nonempty string') if alias in self.child_components: raise ValueError('there is already a child component named "{}"'.format(alias)) config['type'] = type or alias # Allow the external configuration to override the constructor arguments override_config = self.component_configs.get(alias) or {} config = merge_config(config, override_config) component = component_types.create_object(**config) self.child_components[alias] = component
python
{ "resource": "" }
q5977
context_teardown
train
def context_teardown(func: Callable): """ Wrap an async generator function to execute the rest of the function at context teardown. This function returns an async function, which, when called, starts the wrapped async generator. The wrapped async function is run until the first ``yield`` statement (``await async_generator.yield_()`` on Python 3.5). When the context is being torn down, the exception that ended the context, if any, is sent to the generator. For example:: class SomeComponent(Component): @context_teardown async def start(self, ctx: Context): service = SomeService() ctx.add_resource(service) exception = yield service.stop() :param func: an async generator function :return: an async function """ @wraps(func) async def wrapper(*args, **kwargs) -> None: async def teardown_callback(exception: Optional[Exception]): try: await generator.asend(exception) except StopAsyncIteration: pass finally: await generator.aclose() try: ctx = next(arg for arg in args[:2] if isinstance(arg, Context)) except StopIteration: raise RuntimeError('the first positional argument to {}() has to be a Context ' 'instance'.format(callable_name(func))) from None generator = func(*args, **kwargs) try: await generator.asend(None) except StopAsyncIteration: pass except BaseException: await generator.aclose() raise else: ctx.add_teardown_callback(teardown_callback, True) if iscoroutinefunction(func): func = async_generator(func) elif not isasyncgenfunction(func): raise TypeError('{} must be an async generator function'.format(callable_name(func))) return wrapper
python
{ "resource": "" }
q5978
Context.context_chain
train
def context_chain(self) -> List['Context']: """Return a list of contexts starting from this one, its parent and so on.""" contexts = [] ctx = self # type: Optional[Context] while ctx is not None: contexts.append(ctx) ctx = ctx.parent return contexts
python
{ "resource": "" }
q5979
Context.add_teardown_callback
train
def add_teardown_callback(self, callback: Callable, pass_exception: bool = False) -> None: """ Add a callback to be called when this context closes. This is intended for cleanup of resources, and the list of callbacks is processed in the reverse order in which they were added, so the last added callback will be called first. The callback may return an awaitable. If it does, the awaitable is awaited on before calling any further callbacks. :param callback: a callable that is called with either no arguments or with the exception that ended this context, based on the value of ``pass_exception`` :param pass_exception: ``True`` to pass the callback the exception that ended this context (or ``None`` if the context ended cleanly) """ assert check_argument_types() self._check_closed() self._teardown_callbacks.append((callback, pass_exception))
python
{ "resource": "" }
q5980
Context.close
train
async def close(self, exception: BaseException = None) -> None: """ Close this context and call any necessary resource teardown callbacks. If a teardown callback returns an awaitable, the return value is awaited on before calling any further teardown callbacks. All callbacks will be processed, even if some of them raise exceptions. If at least one callback raised an error, this method will raise a :exc:`~.TeardownError` at the end. After this method has been called, resources can no longer be requested or published on this context. :param exception: the exception, if any, that caused this context to be closed :raises .TeardownError: if one or more teardown callbacks raise an exception """ self._check_closed() self._closed = True exceptions = [] for callback, pass_exception in reversed(self._teardown_callbacks): try: retval = callback(exception) if pass_exception else callback() if isawaitable(retval): await retval except Exception as e: exceptions.append(e) del self._teardown_callbacks if exceptions: raise TeardownError(exceptions)
python
{ "resource": "" }
q5981
Context.add_resource
train
def add_resource(self, value, name: str = 'default', context_attr: str = None, types: Union[type, Sequence[type]] = ()) -> None: """ Add a resource to this context. This will cause a ``resource_added`` event to be dispatched. :param value: the actual resource value :param name: name of this resource (unique among all its registered types within a single context) :param context_attr: name of the context attribute this resource will be accessible as :param types: type(s) to register the resource as (omit to use the type of ``value``) :raises asphalt.core.context.ResourceConflict: if the resource conflicts with an existing one in any way """ assert check_argument_types() self._check_closed() if isinstance(types, type): types = (types,) elif not types: types = (type(value),) if value is None: raise ValueError('"value" must not be None') if not resource_name_re.fullmatch(name): raise ValueError('"name" must be a nonempty string consisting only of alphanumeric ' 'characters and underscores') if context_attr and getattr_static(self, context_attr, None) is not None: raise ResourceConflict('this context already has an attribute {!r}'.format( context_attr)) for resource_type in types: if (resource_type, name) in self._resources: raise ResourceConflict( 'this context already contains a resource of type {} using the name {!r}'. format(qualified_name(resource_type), name)) resource = ResourceContainer(value, tuple(types), name, context_attr, False) for type_ in resource.types: self._resources[(type_, name)] = resource if context_attr: setattr(self, context_attr, value) # Notify listeners that a new resource has been made available self.resource_added.dispatch(types, name, False)
python
{ "resource": "" }
q5982
Context.add_resource_factory
train
def add_resource_factory(self, factory_callback: factory_callback_type, types: Union[type, Sequence[Type]], name: str = 'default', context_attr: str = None) -> None: """ Add a resource factory to this context. This will cause a ``resource_added`` event to be dispatched. A resource factory is a callable that generates a "contextual" resource when it is requested by either using any of the methods :meth:`get_resource`, :meth:`require_resource` or :meth:`request_resource` or its context attribute is accessed. When a new resource is created in this manner, it is always bound to the context through it was requested, regardless of where in the chain the factory itself was added to. :param factory_callback: a (non-coroutine) callable that takes a context instance as argument and returns the created resource object :param types: one or more types to register the generated resource as on the target context :param name: name of the resource that will be created in the target context :param context_attr: name of the context attribute the created resource will be accessible as :raises asphalt.core.context.ResourceConflict: if there is an existing resource factory for the given type/name combinations or the given context variable """ assert check_argument_types() self._check_closed() if not resource_name_re.fullmatch(name): raise ValueError('"name" must be a nonempty string consisting only of alphanumeric ' 'characters and underscores') if iscoroutinefunction(factory_callback): raise TypeError('"factory_callback" must not be a coroutine function') if not types: raise ValueError('"types" must not be empty') if isinstance(types, type): resource_types = (types,) # type: Tuple[type, ...] else: resource_types = tuple(types) # Check for a conflicting context attribute if context_attr in self._resource_factories_by_context_attr: raise ResourceConflict( 'this context already contains a resource factory for the context attribute {!r}'. format(context_attr)) # Check for conflicts with existing resource factories for type_ in resource_types: if (type_, name) in self._resource_factories: raise ResourceConflict('this context already contains a resource factory for the ' 'type {}'.format(qualified_name(type_))) # Add the resource factory to the appropriate lookup tables resource = ResourceContainer(factory_callback, resource_types, name, context_attr, True) for type_ in resource_types: self._resource_factories[(type_, name)] = resource if context_attr: self._resource_factories_by_context_attr[context_attr] = resource # Notify listeners that a new resource has been made available self.resource_added.dispatch(resource_types, name, True)
python
{ "resource": "" }
q5983
Context.get_resources
train
def get_resources(self, type: Type[T_Resource]) -> Set[T_Resource]: """ Retrieve all the resources of the given type in this context and its parents. Any matching resource factories are also triggered if necessary. :param type: type of the resources to get :return: a set of all found resources of the given type """ assert check_argument_types() # Collect all the matching resources from this context resources = {container.name: container.value_or_factory for container in self._resources.values() if not container.is_factory and type in container.types } # type: Dict[str, T_Resource] # Next, find all matching resource factories in the context chain and generate resources resources.update({container.name: container.generate_value(self) for ctx in self.context_chain for container in ctx._resources.values() if container.is_factory and type in container.types and container.name not in resources}) # Finally, add the resource values from the parent contexts resources.update({container.name: container.value_or_factory for ctx in self.context_chain[1:] for container in ctx._resources.values() if not container.is_factory and type in container.types and container.name not in resources}) return set(resources.values())
python
{ "resource": "" }
q5984
Context.require_resource
train
def require_resource(self, type: Type[T_Resource], name: str = 'default') -> T_Resource: """ Look up a resource in the chain of contexts and raise an exception if it is not found. This is like :meth:`get_resource` except that instead of returning ``None`` when a resource is not found, it will raise :exc:`~asphalt.core.context.ResourceNotFound`. :param type: type of the requested resource :param name: name of the requested resource :return: the requested resource :raises asphalt.core.context.ResourceNotFound: if a resource of the given type and name was not found """ resource = self.get_resource(type, name) if resource is None: raise ResourceNotFound(type, name) return resource
python
{ "resource": "" }
q5985
Context.call_async
train
def call_async(self, func: Callable, *args, **kwargs): """ Call the given callable in the event loop thread. This method lets you call asynchronous code from a worker thread. Do not use it from within the event loop thread. If the callable returns an awaitable, it is resolved before returning to the caller. :param func: a regular function or a coroutine function :param args: positional arguments to call the callable with :param kwargs: keyword arguments to call the callable with :return: the return value of the call """ return asyncio_extras.call_async(self.loop, func, *args, **kwargs)
python
{ "resource": "" }
q5986
Context.call_in_executor
train
def call_in_executor(self, func: Callable, *args, executor: Union[Executor, str] = None, **kwargs) -> Awaitable: """ Call the given callable in an executor. :param func: the callable to call :param args: positional arguments to call the callable with :param executor: either an :class:`~concurrent.futures.Executor` instance, the resource name of one or ``None`` to use the event loop's default executor :param kwargs: keyword arguments to call the callable with :return: an awaitable that resolves to the return value of the call """ assert check_argument_types() if isinstance(executor, str): executor = self.require_resource(Executor, executor) return asyncio_extras.call_in_executor(func, *args, executor=executor, **kwargs)
python
{ "resource": "" }
q5987
stream_events
train
def stream_events(signals: Sequence[Signal], filter: Callable[[T_Event], bool] = None, *, max_queue_size: int = 0) -> AsyncIterator[T_Event]: """ Return an async generator that yields events from the given signals. Only events that pass the filter callable (if one has been given) are returned. If no filter function was given, all events are yielded from the generator. :param signals: the signals to get events from :param filter: a callable that takes an event object as an argument and returns ``True`` if the event should pass, ``False`` if not :param max_queue_size: maximum size of the queue, after which it will start to drop events """ @async_generator async def streamer(): try: while True: event = await queue.get() if filter is None or filter(event): await yield_(event) finally: cleanup() def cleanup(): nonlocal queue if queue is not None: for signal in signals: signal.disconnect(queue.put_nowait) queue = None assert check_argument_types() queue = Queue(max_queue_size) # type: Queue[T_Event] for signal in signals: signal.connect(queue.put_nowait) gen = [streamer()] # this is to allow the reference count to drop to 0 weakref.finalize(gen[0], cleanup) return gen.pop()
python
{ "resource": "" }
q5988
Signal.connect
train
def connect(self, callback: Callable[[T_Event], Any]) -> Callable[[T_Event], Any]: """ Connect a callback to this signal. Each callable can only be connected once. Duplicate registrations are ignored. If you need to pass extra arguments to the callback, you can use :func:`functools.partial` to wrap the callable. :param callback: a callable that will receive an event object as its only argument. :return: the value of ``callback`` argument """ assert check_argument_types() if self.listeners is None: self.listeners = [] if callback not in self.listeners: self.listeners.append(callback) return callback
python
{ "resource": "" }
q5989
Signal.disconnect
train
def disconnect(self, callback: Callable) -> None: """ Disconnects the given callback. The callback will no longer receive events from this signal. No action is taken if the callback is not on the list of listener callbacks. :param callback: the callable to remove """ assert check_argument_types() try: if self.listeners is not None: self.listeners.remove(callback) except ValueError: pass
python
{ "resource": "" }
q5990
Signal.dispatch_raw
train
def dispatch_raw(self, event: Event) -> Awaitable[bool]: """ Dispatch the given event object to all listeners. Creates a new task in which all listener callbacks are called with the given event as the only argument. Coroutine callbacks are converted to their own respective tasks and waited for concurrently. Before the dispatching is done, a snapshot of the listeners is taken and the event is only dispatched to those listeners, so adding a listener between the call to this method and the actual dispatching will only affect future calls to this method. :param event: the event object to dispatch :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise """ async def do_dispatch() -> None: awaitables = [] all_successful = True for callback in listeners: try: retval = callback(event) except Exception: logger.exception('Uncaught exception in event listener') all_successful = False else: if isawaitable(retval): awaitables.append(retval) # For any callbacks that returned awaitables, wait for their completion and log any # exceptions they raised if awaitables: done, _ = await wait(awaitables, loop=loop) for f in done: exc = f.exception() if exc is not None: all_successful = False logger.error('Uncaught exception in event listener', exc_info=exc) if not future.cancelled(): future.set_result(all_successful) if not isinstance(event, self.event_class): raise TypeError('event must be of type {}'.format(qualified_name(self.event_class))) loop = get_event_loop() future = loop.create_future() if self.listeners: listeners = list(self.listeners) loop.create_task(do_dispatch()) else: future.set_result(True) return future
python
{ "resource": "" }
q5991
Signal.dispatch
train
def dispatch(self, *args, **kwargs) -> Awaitable[bool]: """ Create and dispatch an event. This method constructs an event object and then passes it to :meth:`dispatch_event` for the actual dispatching. :param args: positional arguments to the constructor of the associated event class :param kwargs: keyword arguments to the constructor of the associated event class :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise """ event = self.event_class(self.source(), cast(str, self.topic), *args, **kwargs) return self.dispatch_raw(event)
python
{ "resource": "" }
q5992
stream._escape_sequence
train
def _escape_sequence(self, char): """ Handle characters seen when in an escape sequence. Most non-vt52 commands start with a left-bracket after the escape and then a stream of parameters and a command. """ num = ord(char) if char == "[": self.state = "escape-lb" elif char == "(": self.state = "charset-g0" elif char == ")": self.state = "charset-g1" elif num in self.escape: self.dispatch(self.escape[num]) self.state = "stream" elif self.fail_on_unknown_esc: raise StreamProcessError("Unexpected character '%c' == '0x%02x'" % (char, ord(char)))
python
{ "resource": "" }
q5993
stream._end_escape_sequence
train
def _end_escape_sequence(self, char): """ Handle the end of an escape sequence. The final character in an escape sequence is the command to execute, which corresponds to the event that is dispatched here. """ num = ord(char) if num in self.sequence: self.dispatch(self.sequence[num], *self.params) self.state = "stream" self.current_param = "" self.params = []
python
{ "resource": "" }
q5994
stream._stream
train
def _stream(self, char): """ Process a character when in the default 'stream' state. """ num = ord(char) if num in self.basic: self.dispatch(self.basic[num]) elif num == ctrl.ESC: self.state = "escape" elif num == 0x00: # nulls are just ignored. pass else: self.dispatch("print", char)
python
{ "resource": "" }
q5995
stream.consume
train
def consume(self, char): """ Consume a single character and advance the state as necessary. """ if self.state == "stream": self._stream(char) elif self.state == "escape": self._escape_sequence(char) elif self.state == "escape-lb": self._escape_parameters(char) elif self.state == "mode": self._mode(char) elif self.state == "charset-g0": self._charset_g0(char) elif self.state == "charset-g1": self._charset_g1(char)
python
{ "resource": "" }
q5996
stream.process
train
def process(self, chars): """ Consume a string of and advance the state as necessary. """ while len(chars) > 0: self.consume(chars[0]) chars = chars[1:]
python
{ "resource": "" }
q5997
stream.dispatch
train
def dispatch(self, event, *args): """ Dispatch an event where `args` is a tuple of the arguments to send to any callbacks. If any callback throws an exception, the subsequent callbacks will be aborted. """ for callback in self.listeners.get(event, []): if len(args) > 0: callback(*args) else: callback()
python
{ "resource": "" }
q5998
screen.attach
train
def attach(self, events): """ Attach this screen to a events that processes commands and dispatches events. Sets up the appropriate event handlers so that the screen will update itself automatically as the events processes data. """ if events is not None: events.add_event_listener("print", self._print) events.add_event_listener("backspace", self._backspace) events.add_event_listener("tab", self._tab) events.add_event_listener("linefeed", self._linefeed) events.add_event_listener("reverse-linefeed", self._reverse_linefeed) events.add_event_listener("carriage-return", self._carriage_return) events.add_event_listener("index", self._index) events.add_event_listener("reverse-index", self._reverse_index) events.add_event_listener("store-cursor", self._save_cursor) events.add_event_listener("restore-cursor", self._restore_cursor) events.add_event_listener("cursor-up", self._cursor_up) events.add_event_listener("cursor-down", self._cursor_down) events.add_event_listener("cursor-right", self._cursor_forward) events.add_event_listener("cursor-left", self._cursor_back) events.add_event_listener("cursor-move", self._cursor_position) events.add_event_listener("erase-in-line", self._erase_in_line) events.add_event_listener("erase-in-display", self._erase_in_display) events.add_event_listener("delete-characters", self._delete_character) events.add_event_listener("insert-lines", self._insert_line) events.add_event_listener("delete-lines", self._delete_line) events.add_event_listener("select-graphic-rendition", self._select_graphic_rendition) events.add_event_listener("charset-g0", self._charset_g0) events.add_event_listener("charset-g1", self._charset_g1) events.add_event_listener("shift-in", self._shift_in) events.add_event_listener("shift-out", self._shift_out) events.add_event_listener("bell", self._bell)
python
{ "resource": "" }
q5999
screen.resize
train
def resize(self, shape): """ Resize the screen. If the requested screen size has more rows than the existing screen, rows will be added at the bottom. If the requested size has less rows than the existing screen rows will be clipped at the top of the screen. Similarly if the existing screen has less columns than the requested size, columns will be added at the right, and it it has more, columns will be clipped at the right. """ rows, cols = shape # Honestly though, you can't trust anyone these days... assert(rows > 0 and cols > 0) # First resize the rows if self.size[0] < rows: # If the current display size is shorter than the requested screen # size, then add rows to the bottom. Note that the old column size # is used here so these new rows will get expanded/contracted as # necessary by the column resize when it happens next. self.display += [u" " * self.size[1]] * (rows - self.size[0]) self.attributes += [[self.default_attributes] * self.size[1]] * \ (rows - self.size[0]) elif self.size[0] > rows: # If the current display size is taller than the requested display, # then take rows off the top. self.display = self.display[self.size[0]-rows:] self.attributes = self.attributes[self.size[0]-rows:] # Next, of course, resize the columns. if self.size[1] < cols: # If the current display size is thinner than the requested size, # expand each row to be the new size. self.display = \ [row + (u" " * (cols - self.size[1])) for row in self.display] self.attributes = \ [row + ([self.default_attributes] * (cols - self.size[1])) for row in self.attributes] elif self.size[1] > cols: # If the current display size is fatter than the requested size, # then trim each row from the right to be the new size. self.display = [row[:cols-self.size[1]] for row in self.display] self.attributes = [row[:cols-self.size[1]] for row in self.attributes] self.size = (rows, cols) return self.size
python
{ "resource": "" }