text stringlengths 0 828 |
|---|
newInstance.__dict__.update(jsonobject) |
newInstance.uniquePeptides = set(newInstance.uniquePeptides) |
newInstance.sharedPeptides = set(newInstance.sharedPeptides) |
return newInstance" |
1982,"def save(self, path, compress=True): |
""""""Writes the ``.proteins`` and ``.peptides`` entries to the hard disk |
as a ``proteindb`` file. |
.. note:: |
If ``.save()`` is called and no ``proteindb`` file is present in the |
specified path a new files is generated, otherwise the old file is |
replaced. |
:param path: filedirectory to which the ``proteindb`` file is written. |
The output file name is specified by ``self.info['name']`` |
:param compress: bool, True to use zip file compression |
"""""" |
with aux.PartiallySafeReplace() as msr: |
filename = self.info['name'] + '.proteindb' |
filepath = aux.joinpath(path, filename) |
with msr.open(filepath, mode='w+b') as openfile: |
self._writeContainer(openfile, compress=compress)" |
1983,"def _writeContainer(self, filelike, compress=True): |
""""""Writes the ``.proteins`` and ``.peptides`` entries to the |
``proteindb`` format. In addition it also dumps the ``self.info`` entry |
to the zipfile with the filename ``info``. For details see |
:func:`maspy.auxiliary.writeJsonZipfile()` |
:param filelike: path to a file (str) or a file-like object |
:param compress: bool, True to use zip file compression |
"""""" |
aux.writeJsonZipfile(filelike, self.proteins, compress, 'w', 'proteins') |
aux.writeJsonZipfile(filelike, self.peptides, compress, 'a', 'peptides') |
zipcomp = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED |
with zipfile.ZipFile(filelike, 'a', allowZip64=True) as containerFile: |
infodata = {key: value for key, value in |
viewitems(self.info) if key != 'path' |
} |
containerFile.writestr('info', json.dumps(infodata, zipcomp))" |
1984,"def load(cls, path, name): |
""""""Imports the specified ``proteindb`` file from the hard disk. |
:param path: filedirectory of the ``proteindb`` file |
:param name: filename without the file extension "".proteindb"" |
.. note:: this generates rather large files, which actually take longer |
to import than to newly generate. Maybe saving / loading should be |
limited to the protein database whitout in silico digestion |
information. |
"""""" |
filepath = aux.joinpath(path, name + '.proteindb') |
with zipfile.ZipFile(filepath, 'r', allowZip64=True) as containerZip: |
#Convert the zipfile data into a str object, necessary since |
#containerZip.read() returns a bytes object. |
proteinsString = io.TextIOWrapper(containerZip.open('proteins'), |
encoding='utf-8' |
).read() |
peptidesString = io.TextIOWrapper(containerZip.open('peptides'), |
encoding='utf-8' |
).read() |
infoString = io.TextIOWrapper(containerZip.open('info'), |
encoding='utf-8' |
).read() |
newInstance = cls() |
newInstance.proteins = json.loads(proteinsString, |
object_hook=ProteinSequence.jsonHook) |
newInstance.peptides = json.loads(peptidesString, |
object_hook=PeptideSequence.jsonHook) |
newInstance.info.update(json.loads(infoString)) |
return newInstance" |
1985,"def _calculateCoverageMasks(proteindb, peptidedb): |
""""""Calcualte the sequence coverage masks for all proteindb elements. |
Private method used by :class:`ProteinDatabase`. |
A coverage mask is a numpy boolean array with the length of the protein |
sequence. Each protein position that has been covered in at least one |
peptide is set to True. Coverage masks are calculated for unique and for |
shared peptides. Peptides are matched to proteins according to positions |
derived by the digestion of the FASTA file. |
Alternatively peptides could also be matched to proteins just by |
sequence as it is done in :func:`pyteomics.parser.coverage`, but this is |
not the case here. |
:param proteindb: a dictionary containing :class:`ProteinSequence` |
entries, for example ``ProteinDatabase.proteins`` |
:param proteindb: a dictionary containing :class:`PeptideSequence` |
entries, for example ``ProteinDatabase.peptides`` |
Sets two attributes for each ``ProteinSequence`` entry: |
``.coverageMaskUnique`` = coverage mask of unique peptides |
``.coverageMaskShared`` = coverage mask of shared peptides |
"""""" |
for proteinId, proteinEntry in viewitems(proteindb): |
coverageMaskUnique = numpy.zeros(proteinEntry.length(), dtype='bool') |
for peptide in proteinEntry.uniquePeptides: |
startPos, endPos = peptidedb[peptide].proteinPositions[proteinId] |
coverageMaskUnique[startPos-1:endPos] = True |
coverageMaskShared = numpy.zeros(proteinEntry.length(), dtype='bool') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.