repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.colAdd
def colAdd(self,name="",desc="",unit="",comment="",coltype=0,data=[],pos=None): """ column types: 0: Y 1: Disregard 2: Y Error 3: X 4: Label 5: Z 6: X Error """ if pos is None: pos=len(self.colNames) self.colNames.insert(pos,name) self.colDesc.insert(pos,desc) self.colUnits.insert(pos,unit) self.colComments.insert(pos,comment) self.colTypes.insert(pos,coltype) self.colData.insert(pos,data) return
python
def colAdd(self,name="",desc="",unit="",comment="",coltype=0,data=[],pos=None): """ column types: 0: Y 1: Disregard 2: Y Error 3: X 4: Label 5: Z 6: X Error """ if pos is None: pos=len(self.colNames) self.colNames.insert(pos,name) self.colDesc.insert(pos,desc) self.colUnits.insert(pos,unit) self.colComments.insert(pos,comment) self.colTypes.insert(pos,coltype) self.colData.insert(pos,data) return
[ "def", "colAdd", "(", "self", ",", "name", "=", "\"\"", ",", "desc", "=", "\"\"", ",", "unit", "=", "\"\"", ",", "comment", "=", "\"\"", ",", "coltype", "=", "0", ",", "data", "=", "[", "]", ",", "pos", "=", "None", ")", ":", "if", "pos", "is", "None", ":", "pos", "=", "len", "(", "self", ".", "colNames", ")", "self", ".", "colNames", ".", "insert", "(", "pos", ",", "name", ")", "self", ".", "colDesc", ".", "insert", "(", "pos", ",", "desc", ")", "self", ".", "colUnits", ".", "insert", "(", "pos", ",", "unit", ")", "self", ".", "colComments", ".", "insert", "(", "pos", ",", "comment", ")", "self", ".", "colTypes", ".", "insert", "(", "pos", ",", "coltype", ")", "self", ".", "colData", ".", "insert", "(", "pos", ",", "data", ")", "return" ]
column types: 0: Y 1: Disregard 2: Y Error 3: X 4: Label 5: Z 6: X Error
[ "column", "types", ":", "0", ":", "Y", "1", ":", "Disregard", "2", ":", "Y", "Error", "3", ":", "X", "4", ":", "Label", "5", ":", "Z", "6", ":", "X", "Error" ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L49-L68
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.colDelete
def colDelete(self,colI=-1): """delete a column at a single index. Negative numbers count from the end.""" # print("DELETING COLUMN: [%d] %s"%(colI,self.colDesc[colI])) self.colNames.pop(colI) self.colDesc.pop(colI) self.colUnits.pop(colI) self.colComments.pop(colI) self.colTypes.pop(colI) self.colData.pop(colI) return
python
def colDelete(self,colI=-1): """delete a column at a single index. Negative numbers count from the end.""" # print("DELETING COLUMN: [%d] %s"%(colI,self.colDesc[colI])) self.colNames.pop(colI) self.colDesc.pop(colI) self.colUnits.pop(colI) self.colComments.pop(colI) self.colTypes.pop(colI) self.colData.pop(colI) return
[ "def", "colDelete", "(", "self", ",", "colI", "=", "-", "1", ")", ":", "# print(\"DELETING COLUMN: [%d] %s\"%(colI,self.colDesc[colI]))", "self", ".", "colNames", ".", "pop", "(", "colI", ")", "self", ".", "colDesc", ".", "pop", "(", "colI", ")", "self", ".", "colUnits", ".", "pop", "(", "colI", ")", "self", ".", "colComments", ".", "pop", "(", "colI", ")", "self", ".", "colTypes", ".", "pop", "(", "colI", ")", "self", ".", "colData", ".", "pop", "(", "colI", ")", "return" ]
delete a column at a single index. Negative numbers count from the end.
[ "delete", "a", "column", "at", "a", "single", "index", ".", "Negative", "numbers", "count", "from", "the", "end", "." ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L70-L79
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.onex
def onex(self): """ delete all X columns except the first one. """ xCols=[i for i in range(self.nCols) if self.colTypes[i]==3] if len(xCols)>1: for colI in xCols[1:][::-1]: self.colDelete(colI)
python
def onex(self): """ delete all X columns except the first one. """ xCols=[i for i in range(self.nCols) if self.colTypes[i]==3] if len(xCols)>1: for colI in xCols[1:][::-1]: self.colDelete(colI)
[ "def", "onex", "(", "self", ")", ":", "xCols", "=", "[", "i", "for", "i", "in", "range", "(", "self", ".", "nCols", ")", "if", "self", ".", "colTypes", "[", "i", "]", "==", "3", "]", "if", "len", "(", "xCols", ")", ">", "1", ":", "for", "colI", "in", "xCols", "[", "1", ":", "]", "[", ":", ":", "-", "1", "]", ":", "self", ".", "colDelete", "(", "colI", ")" ]
delete all X columns except the first one.
[ "delete", "all", "X", "columns", "except", "the", "first", "one", "." ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L81-L88
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.alignXY
def alignXY(self): """aligns XY pairs (or XYYY etc) by X value.""" # figure out what data we have and will align to xVals=[] xCols=[x for x in range(self.nCols) if self.colTypes[x]==3] yCols=[x for x in range(self.nCols) if self.colTypes[x]==0] xCols,yCols=np.array(xCols),np.array(yCols) for xCol in xCols: xVals.extend(self.colData[xCol]) #xVals=list(np.round(set(xVals),5)) xVals=list(sorted(list(set(xVals)))) # prepare our new aligned dataset newData=np.empty(len(xVals)*self.nCols) newData[:]=np.nan newData=newData.reshape(len(xVals),self.nCols) oldData=np.round(self.data,5) # do the alignment for xCol in xCols: columnsToShift=[xCol] for col in range(xCol+1,self.nCols): if self.colTypes[col]==0: columnsToShift.append(col) else: break # determine how to move each row for row in range(len(oldData)): oldXvalue=oldData[row,xCol] if oldXvalue in xVals: newRow=xVals.index(oldXvalue) newData[newRow,columnsToShift]=oldData[row,columnsToShift] # commit changes newData[:,0]=xVals self.data=newData self.onex()
python
def alignXY(self): """aligns XY pairs (or XYYY etc) by X value.""" # figure out what data we have and will align to xVals=[] xCols=[x for x in range(self.nCols) if self.colTypes[x]==3] yCols=[x for x in range(self.nCols) if self.colTypes[x]==0] xCols,yCols=np.array(xCols),np.array(yCols) for xCol in xCols: xVals.extend(self.colData[xCol]) #xVals=list(np.round(set(xVals),5)) xVals=list(sorted(list(set(xVals)))) # prepare our new aligned dataset newData=np.empty(len(xVals)*self.nCols) newData[:]=np.nan newData=newData.reshape(len(xVals),self.nCols) oldData=np.round(self.data,5) # do the alignment for xCol in xCols: columnsToShift=[xCol] for col in range(xCol+1,self.nCols): if self.colTypes[col]==0: columnsToShift.append(col) else: break # determine how to move each row for row in range(len(oldData)): oldXvalue=oldData[row,xCol] if oldXvalue in xVals: newRow=xVals.index(oldXvalue) newData[newRow,columnsToShift]=oldData[row,columnsToShift] # commit changes newData[:,0]=xVals self.data=newData self.onex()
[ "def", "alignXY", "(", "self", ")", ":", "# figure out what data we have and will align to", "xVals", "=", "[", "]", "xCols", "=", "[", "x", "for", "x", "in", "range", "(", "self", ".", "nCols", ")", "if", "self", ".", "colTypes", "[", "x", "]", "==", "3", "]", "yCols", "=", "[", "x", "for", "x", "in", "range", "(", "self", ".", "nCols", ")", "if", "self", ".", "colTypes", "[", "x", "]", "==", "0", "]", "xCols", ",", "yCols", "=", "np", ".", "array", "(", "xCols", ")", ",", "np", ".", "array", "(", "yCols", ")", "for", "xCol", "in", "xCols", ":", "xVals", ".", "extend", "(", "self", ".", "colData", "[", "xCol", "]", ")", "#xVals=list(np.round(set(xVals),5))", "xVals", "=", "list", "(", "sorted", "(", "list", "(", "set", "(", "xVals", ")", ")", ")", ")", "# prepare our new aligned dataset", "newData", "=", "np", ".", "empty", "(", "len", "(", "xVals", ")", "*", "self", ".", "nCols", ")", "newData", "[", ":", "]", "=", "np", ".", "nan", "newData", "=", "newData", ".", "reshape", "(", "len", "(", "xVals", ")", ",", "self", ".", "nCols", ")", "oldData", "=", "np", ".", "round", "(", "self", ".", "data", ",", "5", ")", "# do the alignment", "for", "xCol", "in", "xCols", ":", "columnsToShift", "=", "[", "xCol", "]", "for", "col", "in", "range", "(", "xCol", "+", "1", ",", "self", ".", "nCols", ")", ":", "if", "self", ".", "colTypes", "[", "col", "]", "==", "0", ":", "columnsToShift", ".", "append", "(", "col", ")", "else", ":", "break", "# determine how to move each row", "for", "row", "in", "range", "(", "len", "(", "oldData", ")", ")", ":", "oldXvalue", "=", "oldData", "[", "row", ",", "xCol", "]", "if", "oldXvalue", "in", "xVals", ":", "newRow", "=", "xVals", ".", "index", "(", "oldXvalue", ")", "newData", "[", "newRow", ",", "columnsToShift", "]", "=", "oldData", "[", "row", ",", "columnsToShift", "]", "# commit changes", "newData", "[", ":", ",", "0", "]", "=", "xVals", "self", ".", "data", "=", "newData", "self", ".", "onex", "(", ")" ]
aligns XY pairs (or XYYY etc) by X value.
[ "aligns", "XY", "pairs", "(", "or", "XYYY", "etc", ")", "by", "X", "value", "." ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L90-L127
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.wiggle
def wiggle(self,noiseLevel=.1): """Slightly changes value of every cell in the worksheet. Used for testing.""" noise=(np.random.rand(*self.data.shape))-.5 self.data=self.data+noise*noiseLevel
python
def wiggle(self,noiseLevel=.1): """Slightly changes value of every cell in the worksheet. Used for testing.""" noise=(np.random.rand(*self.data.shape))-.5 self.data=self.data+noise*noiseLevel
[ "def", "wiggle", "(", "self", ",", "noiseLevel", "=", ".1", ")", ":", "noise", "=", "(", "np", ".", "random", ".", "rand", "(", "*", "self", ".", "data", ".", "shape", ")", ")", "-", ".5", "self", ".", "data", "=", "self", ".", "data", "+", "noise", "*", "noiseLevel" ]
Slightly changes value of every cell in the worksheet. Used for testing.
[ "Slightly", "changes", "value", "of", "every", "cell", "in", "the", "worksheet", ".", "Used", "for", "testing", "." ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L131-L134
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.pull
def pull(self,bookName=None,sheetName=None): """pull data into this OR.SHEET from a real book/sheet in Origin""" # tons of validation if bookName is None and self.bookName: bookName=self.bookName if sheetName is None and self.sheetName: sheetName=self.sheetName if bookName is None: bookName=OR.activeBook() if bookName and sheetName is None: sheetName=OR.activeSheet() if not bookName or not sheetName: print("can't figure out where to pull from! [%s]%s"%(bookName,sheetName)) return # finally doing the thing poSheet=OR.getSheet(bookName,sheetName) self.bookName=bookName self.sheetName=sheetName self.desc=poSheet.GetLongName() self.colNames=[poCol.GetName() for poCol in poSheet.Columns()] self.colDesc=[poCol.GetLongName() for poCol in poSheet.Columns()] self.colUnits=[poCol.GetUnits() for poCol in poSheet.Columns()] self.colComments=[poCol.GetComments() for poCol in poSheet.Columns()] self.colTypes=[poCol.GetType() for poCol in poSheet.Columns()] self.colData=[poCol.GetData() for poCol in poSheet.Columns()]
python
def pull(self,bookName=None,sheetName=None): """pull data into this OR.SHEET from a real book/sheet in Origin""" # tons of validation if bookName is None and self.bookName: bookName=self.bookName if sheetName is None and self.sheetName: sheetName=self.sheetName if bookName is None: bookName=OR.activeBook() if bookName and sheetName is None: sheetName=OR.activeSheet() if not bookName or not sheetName: print("can't figure out where to pull from! [%s]%s"%(bookName,sheetName)) return # finally doing the thing poSheet=OR.getSheet(bookName,sheetName) self.bookName=bookName self.sheetName=sheetName self.desc=poSheet.GetLongName() self.colNames=[poCol.GetName() for poCol in poSheet.Columns()] self.colDesc=[poCol.GetLongName() for poCol in poSheet.Columns()] self.colUnits=[poCol.GetUnits() for poCol in poSheet.Columns()] self.colComments=[poCol.GetComments() for poCol in poSheet.Columns()] self.colTypes=[poCol.GetType() for poCol in poSheet.Columns()] self.colData=[poCol.GetData() for poCol in poSheet.Columns()]
[ "def", "pull", "(", "self", ",", "bookName", "=", "None", ",", "sheetName", "=", "None", ")", ":", "# tons of validation", "if", "bookName", "is", "None", "and", "self", ".", "bookName", ":", "bookName", "=", "self", ".", "bookName", "if", "sheetName", "is", "None", "and", "self", ".", "sheetName", ":", "sheetName", "=", "self", ".", "sheetName", "if", "bookName", "is", "None", ":", "bookName", "=", "OR", ".", "activeBook", "(", ")", "if", "bookName", "and", "sheetName", "is", "None", ":", "sheetName", "=", "OR", ".", "activeSheet", "(", ")", "if", "not", "bookName", "or", "not", "sheetName", ":", "print", "(", "\"can't figure out where to pull from! [%s]%s\"", "%", "(", "bookName", ",", "sheetName", ")", ")", "return", "# finally doing the thing", "poSheet", "=", "OR", ".", "getSheet", "(", "bookName", ",", "sheetName", ")", "self", ".", "bookName", "=", "bookName", "self", ".", "sheetName", "=", "sheetName", "self", ".", "desc", "=", "poSheet", ".", "GetLongName", "(", ")", "self", ".", "colNames", "=", "[", "poCol", ".", "GetName", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colDesc", "=", "[", "poCol", ".", "GetLongName", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colUnits", "=", "[", "poCol", ".", "GetUnits", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colComments", "=", "[", "poCol", ".", "GetComments", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colTypes", "=", "[", "poCol", ".", "GetType", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]", "self", ".", "colData", "=", "[", "poCol", ".", "GetData", "(", ")", "for", "poCol", "in", "poSheet", ".", "Columns", "(", ")", "]" ]
pull data into this OR.SHEET from a real book/sheet in Origin
[ "pull", "data", "into", "this", "OR", ".", "SHEET", "from", "a", "real", "book", "/", "sheet", "in", "Origin" ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L138-L160
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.push
def push(self,bookName=None,sheetName=None,overwrite=False): """pull this OR.SHEET into a real book/sheet in Origin""" # tons of validation if bookName: self.bookName=bookName if sheetName: self.sheetName=sheetName if not self.sheetName in OR.sheetNames(bookName): print("can't find [%s]%s!"%(bookName,sheetName)) return # clear out out sheet by deleting EVERY column poSheet=OR.getSheet(bookName,sheetName) # CPyWorksheetPageI if not poSheet: print("WARNING: didn't get posheet",poSheet,bookName,sheetName) for poCol in [x for x in poSheet if x.IsValid()]: poCol.Destroy() # create columns and assign properties to each for i in range(len(self.colNames)): poSheet.InsertCol(i,self.colNames[i]) poSheet.Columns(i).SetName(self.colNames[i]) poSheet.Columns(i).SetLongName(self.colDesc[i]) poSheet.Columns(i).SetUnits(self.colUnits[i]) poSheet.Columns(i).SetComments(self.colComments[i]) poSheet.Columns(i).SetType(self.colTypes[i]) poSheet.Columns(i).SetData(self.colData[i])
python
def push(self,bookName=None,sheetName=None,overwrite=False): """pull this OR.SHEET into a real book/sheet in Origin""" # tons of validation if bookName: self.bookName=bookName if sheetName: self.sheetName=sheetName if not self.sheetName in OR.sheetNames(bookName): print("can't find [%s]%s!"%(bookName,sheetName)) return # clear out out sheet by deleting EVERY column poSheet=OR.getSheet(bookName,sheetName) # CPyWorksheetPageI if not poSheet: print("WARNING: didn't get posheet",poSheet,bookName,sheetName) for poCol in [x for x in poSheet if x.IsValid()]: poCol.Destroy() # create columns and assign properties to each for i in range(len(self.colNames)): poSheet.InsertCol(i,self.colNames[i]) poSheet.Columns(i).SetName(self.colNames[i]) poSheet.Columns(i).SetLongName(self.colDesc[i]) poSheet.Columns(i).SetUnits(self.colUnits[i]) poSheet.Columns(i).SetComments(self.colComments[i]) poSheet.Columns(i).SetType(self.colTypes[i]) poSheet.Columns(i).SetData(self.colData[i])
[ "def", "push", "(", "self", ",", "bookName", "=", "None", ",", "sheetName", "=", "None", ",", "overwrite", "=", "False", ")", ":", "# tons of validation", "if", "bookName", ":", "self", ".", "bookName", "=", "bookName", "if", "sheetName", ":", "self", ".", "sheetName", "=", "sheetName", "if", "not", "self", ".", "sheetName", "in", "OR", ".", "sheetNames", "(", "bookName", ")", ":", "print", "(", "\"can't find [%s]%s!\"", "%", "(", "bookName", ",", "sheetName", ")", ")", "return", "# clear out out sheet by deleting EVERY column", "poSheet", "=", "OR", ".", "getSheet", "(", "bookName", ",", "sheetName", ")", "# CPyWorksheetPageI", "if", "not", "poSheet", ":", "print", "(", "\"WARNING: didn't get posheet\"", ",", "poSheet", ",", "bookName", ",", "sheetName", ")", "for", "poCol", "in", "[", "x", "for", "x", "in", "poSheet", "if", "x", ".", "IsValid", "(", ")", "]", ":", "poCol", ".", "Destroy", "(", ")", "# create columns and assign properties to each", "for", "i", "in", "range", "(", "len", "(", "self", ".", "colNames", ")", ")", ":", "poSheet", ".", "InsertCol", "(", "i", ",", "self", ".", "colNames", "[", "i", "]", ")", "poSheet", ".", "Columns", "(", "i", ")", ".", "SetName", "(", "self", ".", "colNames", "[", "i", "]", ")", "poSheet", ".", "Columns", "(", "i", ")", ".", "SetLongName", "(", "self", ".", "colDesc", "[", "i", "]", ")", "poSheet", ".", "Columns", "(", "i", ")", ".", "SetUnits", "(", "self", ".", "colUnits", "[", "i", "]", ")", "poSheet", ".", "Columns", "(", "i", ")", ".", "SetComments", "(", "self", ".", "colComments", "[", "i", "]", ")", "poSheet", ".", "Columns", "(", "i", ")", ".", "SetType", "(", "self", ".", "colTypes", "[", "i", "]", ")", "poSheet", ".", "Columns", "(", "i", ")", ".", "SetData", "(", "self", ".", "colData", "[", "i", "]", ")" ]
pull this OR.SHEET into a real book/sheet in Origin
[ "pull", "this", "OR", ".", "SHEET", "into", "a", "real", "book", "/", "sheet", "in", "Origin" ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L162-L186
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.nRows
def nRows(self): """returns maximum number of rows based on the longest colData""" if self.nCols: return max([len(x) for x in self.colData]) else: return 0
python
def nRows(self): """returns maximum number of rows based on the longest colData""" if self.nCols: return max([len(x) for x in self.colData]) else: return 0
[ "def", "nRows", "(", "self", ")", ":", "if", "self", ".", "nCols", ":", "return", "max", "(", "[", "len", "(", "x", ")", "for", "x", "in", "self", ".", "colData", "]", ")", "else", ":", "return", "0" ]
returns maximum number of rows based on the longest colData
[ "returns", "maximum", "number", "of", "rows", "based", "on", "the", "longest", "colData" ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L191-L194
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.data
def data(self): """return all of colData as a 2D numpy array.""" data=np.empty((self.nRows,self.nCols),dtype=np.float) data[:]=np.nan # make everything nan by default for colNum,colData in enumerate(self.colData): validIs=np.where([np.isreal(v) for v in colData])[0] validData=np.ones(len(colData))*np.nan validData[validIs]=np.array(colData)[validIs] data[:len(colData),colNum]=validData # only fill cells that have data return data
python
def data(self): """return all of colData as a 2D numpy array.""" data=np.empty((self.nRows,self.nCols),dtype=np.float) data[:]=np.nan # make everything nan by default for colNum,colData in enumerate(self.colData): validIs=np.where([np.isreal(v) for v in colData])[0] validData=np.ones(len(colData))*np.nan validData[validIs]=np.array(colData)[validIs] data[:len(colData),colNum]=validData # only fill cells that have data return data
[ "def", "data", "(", "self", ")", ":", "data", "=", "np", ".", "empty", "(", "(", "self", ".", "nRows", ",", "self", ".", "nCols", ")", ",", "dtype", "=", "np", ".", "float", ")", "data", "[", ":", "]", "=", "np", ".", "nan", "# make everything nan by default", "for", "colNum", ",", "colData", "in", "enumerate", "(", "self", ".", "colData", ")", ":", "validIs", "=", "np", ".", "where", "(", "[", "np", ".", "isreal", "(", "v", ")", "for", "v", "in", "colData", "]", ")", "[", "0", "]", "validData", "=", "np", ".", "ones", "(", "len", "(", "colData", ")", ")", "*", "np", ".", "nan", "validData", "[", "validIs", "]", "=", "np", ".", "array", "(", "colData", ")", "[", "validIs", "]", "data", "[", ":", "len", "(", "colData", ")", ",", "colNum", "]", "=", "validData", "# only fill cells that have data", "return", "data" ]
return all of colData as a 2D numpy array.
[ "return", "all", "of", "colData", "as", "a", "2D", "numpy", "array", "." ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L204-L214
swharden/PyOriginTools
PyOriginTools/workbook.py
SHEET.data
def data(self,data): """Given a 2D numpy array, fill colData with it.""" assert type(data) is np.ndarray assert data.shape[1] == self.nCols for i in range(self.nCols): self.colData[i]=data[:,i].tolist()
python
def data(self,data): """Given a 2D numpy array, fill colData with it.""" assert type(data) is np.ndarray assert data.shape[1] == self.nCols for i in range(self.nCols): self.colData[i]=data[:,i].tolist()
[ "def", "data", "(", "self", ",", "data", ")", ":", "assert", "type", "(", "data", ")", "is", "np", ".", "ndarray", "assert", "data", ".", "shape", "[", "1", "]", "==", "self", ".", "nCols", "for", "i", "in", "range", "(", "self", ".", "nCols", ")", ":", "self", ".", "colData", "[", "i", "]", "=", "data", "[", ":", ",", "i", "]", ".", "tolist", "(", ")" ]
Given a 2D numpy array, fill colData with it.
[ "Given", "a", "2D", "numpy", "array", "fill", "colData", "with", "it", "." ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/PyOriginTools/workbook.py#L217-L222
j4321/tkColorPicker
tkcolorpicker/spinbox.py
Spinbox.focusout
def focusout(self, event): """Change style on focus out events.""" bc = self.style.lookup("TEntry", "bordercolor", ("!focus",)) dc = self.style.lookup("TEntry", "darkcolor", ("!focus",)) lc = self.style.lookup("TEntry", "lightcolor", ("!focus",)) self.style.configure("%s.spinbox.TFrame" % self.frame, bordercolor=bc, darkcolor=dc, lightcolor=lc)
python
def focusout(self, event): """Change style on focus out events.""" bc = self.style.lookup("TEntry", "bordercolor", ("!focus",)) dc = self.style.lookup("TEntry", "darkcolor", ("!focus",)) lc = self.style.lookup("TEntry", "lightcolor", ("!focus",)) self.style.configure("%s.spinbox.TFrame" % self.frame, bordercolor=bc, darkcolor=dc, lightcolor=lc)
[ "def", "focusout", "(", "self", ",", "event", ")", ":", "bc", "=", "self", ".", "style", ".", "lookup", "(", "\"TEntry\"", ",", "\"bordercolor\"", ",", "(", "\"!focus\"", ",", ")", ")", "dc", "=", "self", ".", "style", ".", "lookup", "(", "\"TEntry\"", ",", "\"darkcolor\"", ",", "(", "\"!focus\"", ",", ")", ")", "lc", "=", "self", ".", "style", ".", "lookup", "(", "\"TEntry\"", ",", "\"lightcolor\"", ",", "(", "\"!focus\"", ",", ")", ")", "self", ".", "style", ".", "configure", "(", "\"%s.spinbox.TFrame\"", "%", "self", ".", "frame", ",", "bordercolor", "=", "bc", ",", "darkcolor", "=", "dc", ",", "lightcolor", "=", "lc", ")" ]
Change style on focus out events.
[ "Change", "style", "on", "focus", "out", "events", "." ]
train
https://github.com/j4321/tkColorPicker/blob/ee2d583115e0c7ad7f29795763fc6b4ddc4e8c1d/tkcolorpicker/spinbox.py#L99-L105
j4321/tkColorPicker
tkcolorpicker/spinbox.py
Spinbox.focusin
def focusin(self, event): """Change style on focus in events.""" self.old_value = self.get() bc = self.style.lookup("TEntry", "bordercolor", ("focus",)) dc = self.style.lookup("TEntry", "darkcolor", ("focus",)) lc = self.style.lookup("TEntry", "lightcolor", ("focus",)) self.style.configure("%s.spinbox.TFrame" % self.frame, bordercolor=bc, darkcolor=dc, lightcolor=lc)
python
def focusin(self, event): """Change style on focus in events.""" self.old_value = self.get() bc = self.style.lookup("TEntry", "bordercolor", ("focus",)) dc = self.style.lookup("TEntry", "darkcolor", ("focus",)) lc = self.style.lookup("TEntry", "lightcolor", ("focus",)) self.style.configure("%s.spinbox.TFrame" % self.frame, bordercolor=bc, darkcolor=dc, lightcolor=lc)
[ "def", "focusin", "(", "self", ",", "event", ")", ":", "self", ".", "old_value", "=", "self", ".", "get", "(", ")", "bc", "=", "self", ".", "style", ".", "lookup", "(", "\"TEntry\"", ",", "\"bordercolor\"", ",", "(", "\"focus\"", ",", ")", ")", "dc", "=", "self", ".", "style", ".", "lookup", "(", "\"TEntry\"", ",", "\"darkcolor\"", ",", "(", "\"focus\"", ",", ")", ")", "lc", "=", "self", ".", "style", ".", "lookup", "(", "\"TEntry\"", ",", "\"lightcolor\"", ",", "(", "\"focus\"", ",", ")", ")", "self", ".", "style", ".", "configure", "(", "\"%s.spinbox.TFrame\"", "%", "self", ".", "frame", ",", "bordercolor", "=", "bc", ",", "darkcolor", "=", "dc", ",", "lightcolor", "=", "lc", ")" ]
Change style on focus in events.
[ "Change", "style", "on", "focus", "in", "events", "." ]
train
https://github.com/j4321/tkColorPicker/blob/ee2d583115e0c7ad7f29795763fc6b4ddc4e8c1d/tkcolorpicker/spinbox.py#L107-L114
xzased/lvm2py
lvm2py/lvm.py
LVM.open
def open(self): """ Obtains the lvm handle. Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion.py *Raises:* * HandleError """ if not self.handle: try: path = self.system_dir except AttributeError: path = '' self.__handle = lvm_init(path) if not bool(self.__handle): raise HandleError("Failed to initialize LVM handle.")
python
def open(self): """ Obtains the lvm handle. Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion.py *Raises:* * HandleError """ if not self.handle: try: path = self.system_dir except AttributeError: path = '' self.__handle = lvm_init(path) if not bool(self.__handle): raise HandleError("Failed to initialize LVM handle.")
[ "def", "open", "(", "self", ")", ":", "if", "not", "self", ".", "handle", ":", "try", ":", "path", "=", "self", ".", "system_dir", "except", "AttributeError", ":", "path", "=", "''", "self", ".", "__handle", "=", "lvm_init", "(", "path", ")", "if", "not", "bool", "(", "self", ".", "__handle", ")", ":", "raise", "HandleError", "(", "\"Failed to initialize LVM handle.\"", ")" ]
Obtains the lvm handle. Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion.py *Raises:* * HandleError
[ "Obtains", "the", "lvm", "handle", ".", "Usually", "you", "would", "never", "need", "to", "use", "this", "method", "unless", "you", "are", "trying", "to", "do", "operations", "using", "the", "ctypes", "function", "wrappers", "in", "conversion", ".", "py" ]
train
https://github.com/xzased/lvm2py/blob/34ce69304531a474c2fe4a4009ca445a8c103cd6/lvm2py/lvm.py#L57-L73
xzased/lvm2py
lvm2py/lvm.py
LVM.close
def close(self): """ Closes the lvm handle. Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion.py *Raises:* * HandleError """ if self.handle: q = lvm_quit(self.handle) if q != 0: raise HandleError("Failed to close LVM handle.") self.__handle = None
python
def close(self): """ Closes the lvm handle. Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion.py *Raises:* * HandleError """ if self.handle: q = lvm_quit(self.handle) if q != 0: raise HandleError("Failed to close LVM handle.") self.__handle = None
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "handle", ":", "q", "=", "lvm_quit", "(", "self", ".", "handle", ")", "if", "q", "!=", "0", ":", "raise", "HandleError", "(", "\"Failed to close LVM handle.\"", ")", "self", ".", "__handle", "=", "None" ]
Closes the lvm handle. Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion.py *Raises:* * HandleError
[ "Closes", "the", "lvm", "handle", ".", "Usually", "you", "would", "never", "need", "to", "use", "this", "method", "unless", "you", "are", "trying", "to", "do", "operations", "using", "the", "ctypes", "function", "wrappers", "in", "conversion", ".", "py" ]
train
https://github.com/xzased/lvm2py/blob/34ce69304531a474c2fe4a4009ca445a8c103cd6/lvm2py/lvm.py#L75-L88
xzased/lvm2py
lvm2py/lvm.py
LVM.get_vg
def get_vg(self, name, mode="r"): """ Returns an instance of VolumeGroup. The name parameter should be an existing volume group. By default, all volume groups are open in "read" mode:: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg") To open a volume group with write permissions set the mode parameter to "w":: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg", "w") *Args:* * name (str): An existing volume group name. * mode (str): "r" or "w" for read/write respectively. Default is "r". *Raises:* * HandleError """ vg = VolumeGroup(self, name=name, mode=mode) return vg
python
def get_vg(self, name, mode="r"): """ Returns an instance of VolumeGroup. The name parameter should be an existing volume group. By default, all volume groups are open in "read" mode:: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg") To open a volume group with write permissions set the mode parameter to "w":: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg", "w") *Args:* * name (str): An existing volume group name. * mode (str): "r" or "w" for read/write respectively. Default is "r". *Raises:* * HandleError """ vg = VolumeGroup(self, name=name, mode=mode) return vg
[ "def", "get_vg", "(", "self", ",", "name", ",", "mode", "=", "\"r\"", ")", ":", "vg", "=", "VolumeGroup", "(", "self", ",", "name", "=", "name", ",", "mode", "=", "mode", ")", "return", "vg" ]
Returns an instance of VolumeGroup. The name parameter should be an existing volume group. By default, all volume groups are open in "read" mode:: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg") To open a volume group with write permissions set the mode parameter to "w":: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg", "w") *Args:* * name (str): An existing volume group name. * mode (str): "r" or "w" for read/write respectively. Default is "r". *Raises:* * HandleError
[ "Returns", "an", "instance", "of", "VolumeGroup", ".", "The", "name", "parameter", "should", "be", "an", "existing", "volume", "group", ".", "By", "default", "all", "volume", "groups", "are", "open", "in", "read", "mode", "::" ]
train
https://github.com/xzased/lvm2py/blob/34ce69304531a474c2fe4a4009ca445a8c103cd6/lvm2py/lvm.py#L130-L157
xzased/lvm2py
lvm2py/lvm.py
LVM.create_vg
def create_vg(self, name, devices): """ Returns a new instance of VolumeGroup with the given name and added physycal volumes (devices):: from lvm2py import * lvm = LVM() vg = lvm.create_vg("myvg", ["/dev/sdb1", "/dev/sdb2"]) *Args:* * name (str): A volume group name. * devices (list): A list of device paths. *Raises:* * HandleError, CommitError, ValueError """ self.open() vgh = lvm_vg_create(self.handle, name) if not bool(vgh): self.close() raise HandleError("Failed to create VG.") for device in devices: if not os.path.exists(device): self._destroy_vg(vgh) raise ValueError("%s does not exist." % device) ext = lvm_vg_extend(vgh, device) if ext != 0: self._destroy_vg(vgh) raise CommitError("Failed to extend Volume Group.") try: self._commit_vg(vgh) except CommitError: self._destroy_vg(vgh) raise CommitError("Failed to add %s to VolumeGroup." % device) self._close_vg(vgh) vg = VolumeGroup(self, name) return vg
python
def create_vg(self, name, devices): """ Returns a new instance of VolumeGroup with the given name and added physycal volumes (devices):: from lvm2py import * lvm = LVM() vg = lvm.create_vg("myvg", ["/dev/sdb1", "/dev/sdb2"]) *Args:* * name (str): A volume group name. * devices (list): A list of device paths. *Raises:* * HandleError, CommitError, ValueError """ self.open() vgh = lvm_vg_create(self.handle, name) if not bool(vgh): self.close() raise HandleError("Failed to create VG.") for device in devices: if not os.path.exists(device): self._destroy_vg(vgh) raise ValueError("%s does not exist." % device) ext = lvm_vg_extend(vgh, device) if ext != 0: self._destroy_vg(vgh) raise CommitError("Failed to extend Volume Group.") try: self._commit_vg(vgh) except CommitError: self._destroy_vg(vgh) raise CommitError("Failed to add %s to VolumeGroup." % device) self._close_vg(vgh) vg = VolumeGroup(self, name) return vg
[ "def", "create_vg", "(", "self", ",", "name", ",", "devices", ")", ":", "self", ".", "open", "(", ")", "vgh", "=", "lvm_vg_create", "(", "self", ".", "handle", ",", "name", ")", "if", "not", "bool", "(", "vgh", ")", ":", "self", ".", "close", "(", ")", "raise", "HandleError", "(", "\"Failed to create VG.\"", ")", "for", "device", "in", "devices", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "device", ")", ":", "self", ".", "_destroy_vg", "(", "vgh", ")", "raise", "ValueError", "(", "\"%s does not exist.\"", "%", "device", ")", "ext", "=", "lvm_vg_extend", "(", "vgh", ",", "device", ")", "if", "ext", "!=", "0", ":", "self", ".", "_destroy_vg", "(", "vgh", ")", "raise", "CommitError", "(", "\"Failed to extend Volume Group.\"", ")", "try", ":", "self", ".", "_commit_vg", "(", "vgh", ")", "except", "CommitError", ":", "self", ".", "_destroy_vg", "(", "vgh", ")", "raise", "CommitError", "(", "\"Failed to add %s to VolumeGroup.\"", "%", "device", ")", "self", ".", "_close_vg", "(", "vgh", ")", "vg", "=", "VolumeGroup", "(", "self", ",", "name", ")", "return", "vg" ]
Returns a new instance of VolumeGroup with the given name and added physycal volumes (devices):: from lvm2py import * lvm = LVM() vg = lvm.create_vg("myvg", ["/dev/sdb1", "/dev/sdb2"]) *Args:* * name (str): A volume group name. * devices (list): A list of device paths. *Raises:* * HandleError, CommitError, ValueError
[ "Returns", "a", "new", "instance", "of", "VolumeGroup", "with", "the", "given", "name", "and", "added", "physycal", "volumes", "(", "devices", ")", "::" ]
train
https://github.com/xzased/lvm2py/blob/34ce69304531a474c2fe4a4009ca445a8c103cd6/lvm2py/lvm.py#L159-L198
xzased/lvm2py
lvm2py/lvm.py
LVM.remove_vg
def remove_vg(self, vg): """ Removes a volume group:: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg", "w") lvm.remove_vg(vg) *Args:* * vg (obj): A VolumeGroup instance. *Raises:* * HandleError, CommitError .. note:: The VolumeGroup instance must be in write mode, otherwise CommitError is raised. """ vg.open() rm = lvm_vg_remove(vg.handle) if rm != 0: vg.close() raise CommitError("Failed to remove VG.") com = lvm_vg_write(vg.handle) if com != 0: vg.close() raise CommitError("Failed to commit changes to disk.") vg.close()
python
def remove_vg(self, vg): """ Removes a volume group:: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg", "w") lvm.remove_vg(vg) *Args:* * vg (obj): A VolumeGroup instance. *Raises:* * HandleError, CommitError .. note:: The VolumeGroup instance must be in write mode, otherwise CommitError is raised. """ vg.open() rm = lvm_vg_remove(vg.handle) if rm != 0: vg.close() raise CommitError("Failed to remove VG.") com = lvm_vg_write(vg.handle) if com != 0: vg.close() raise CommitError("Failed to commit changes to disk.") vg.close()
[ "def", "remove_vg", "(", "self", ",", "vg", ")", ":", "vg", ".", "open", "(", ")", "rm", "=", "lvm_vg_remove", "(", "vg", ".", "handle", ")", "if", "rm", "!=", "0", ":", "vg", ".", "close", "(", ")", "raise", "CommitError", "(", "\"Failed to remove VG.\"", ")", "com", "=", "lvm_vg_write", "(", "vg", ".", "handle", ")", "if", "com", "!=", "0", ":", "vg", ".", "close", "(", ")", "raise", "CommitError", "(", "\"Failed to commit changes to disk.\"", ")", "vg", ".", "close", "(", ")" ]
Removes a volume group:: from lvm2py import * lvm = LVM() vg = lvm.get_vg("myvg", "w") lvm.remove_vg(vg) *Args:* * vg (obj): A VolumeGroup instance. *Raises:* * HandleError, CommitError .. note:: The VolumeGroup instance must be in write mode, otherwise CommitError is raised.
[ "Removes", "a", "volume", "group", "::" ]
train
https://github.com/xzased/lvm2py/blob/34ce69304531a474c2fe4a4009ca445a8c103cd6/lvm2py/lvm.py#L200-L232
xzased/lvm2py
lvm2py/lvm.py
LVM.vgscan
def vgscan(self): """ Probes the system for volume groups and returns a list of VolumeGroup instances:: from lvm2py import * lvm = LVM() vgs = lvm.vgscan() *Raises:* * HandleError """ vg_list = [] self.open() names = lvm_list_vg_names(self.handle) if not bool(names): return vg_list vgnames = [] vg = dm_list_first(names) while vg: c = cast(vg, POINTER(lvm_str_list)) vgnames.append(c.contents.str) if dm_list_end(names, vg): # end of linked list break vg = dm_list_next(names, vg) self.close() for name in vgnames: vginst = self.get_vg(name) vg_list.append(vginst) return vg_list
python
def vgscan(self): """ Probes the system for volume groups and returns a list of VolumeGroup instances:: from lvm2py import * lvm = LVM() vgs = lvm.vgscan() *Raises:* * HandleError """ vg_list = [] self.open() names = lvm_list_vg_names(self.handle) if not bool(names): return vg_list vgnames = [] vg = dm_list_first(names) while vg: c = cast(vg, POINTER(lvm_str_list)) vgnames.append(c.contents.str) if dm_list_end(names, vg): # end of linked list break vg = dm_list_next(names, vg) self.close() for name in vgnames: vginst = self.get_vg(name) vg_list.append(vginst) return vg_list
[ "def", "vgscan", "(", "self", ")", ":", "vg_list", "=", "[", "]", "self", ".", "open", "(", ")", "names", "=", "lvm_list_vg_names", "(", "self", ".", "handle", ")", "if", "not", "bool", "(", "names", ")", ":", "return", "vg_list", "vgnames", "=", "[", "]", "vg", "=", "dm_list_first", "(", "names", ")", "while", "vg", ":", "c", "=", "cast", "(", "vg", ",", "POINTER", "(", "lvm_str_list", ")", ")", "vgnames", ".", "append", "(", "c", ".", "contents", ".", "str", ")", "if", "dm_list_end", "(", "names", ",", "vg", ")", ":", "# end of linked list", "break", "vg", "=", "dm_list_next", "(", "names", ",", "vg", ")", "self", ".", "close", "(", ")", "for", "name", "in", "vgnames", ":", "vginst", "=", "self", ".", "get_vg", "(", "name", ")", "vg_list", ".", "append", "(", "vginst", ")", "return", "vg_list" ]
Probes the system for volume groups and returns a list of VolumeGroup instances:: from lvm2py import * lvm = LVM() vgs = lvm.vgscan() *Raises:* * HandleError
[ "Probes", "the", "system", "for", "volume", "groups", "and", "returns", "a", "list", "of", "VolumeGroup", "instances", "::" ]
train
https://github.com/xzased/lvm2py/blob/34ce69304531a474c2fe4a4009ca445a8c103cd6/lvm2py/lvm.py#L234-L266
davidmcclure/textplot
textplot/text.py
Text.from_file
def from_file(cls, path): """ Create a text from a file. Args: path (str): The file path. """ with open(path, 'r', errors='replace') as f: return cls(f.read())
python
def from_file(cls, path): """ Create a text from a file. Args: path (str): The file path. """ with open(path, 'r', errors='replace') as f: return cls(f.read())
[ "def", "from_file", "(", "cls", ",", "path", ")", ":", "with", "open", "(", "path", ",", "'r'", ",", "errors", "=", "'replace'", ")", "as", "f", ":", "return", "cls", "(", "f", ".", "read", "(", ")", ")" ]
Create a text from a file. Args: path (str): The file path.
[ "Create", "a", "text", "from", "a", "file", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L22-L32
davidmcclure/textplot
textplot/text.py
Text.load_stopwords
def load_stopwords(self, path): """ Load a set of stopwords. Args: path (str): The stopwords file path. """ if path: with open(path) as f: self.stopwords = set(f.read().splitlines()) else: self.stopwords = set( pkgutil .get_data('textplot', 'data/stopwords.txt') .decode('utf8') .splitlines() )
python
def load_stopwords(self, path): """ Load a set of stopwords. Args: path (str): The stopwords file path. """ if path: with open(path) as f: self.stopwords = set(f.read().splitlines()) else: self.stopwords = set( pkgutil .get_data('textplot', 'data/stopwords.txt') .decode('utf8') .splitlines() )
[ "def", "load_stopwords", "(", "self", ",", "path", ")", ":", "if", "path", ":", "with", "open", "(", "path", ")", "as", "f", ":", "self", ".", "stopwords", "=", "set", "(", "f", ".", "read", "(", ")", ".", "splitlines", "(", ")", ")", "else", ":", "self", ".", "stopwords", "=", "set", "(", "pkgutil", ".", "get_data", "(", "'textplot'", ",", "'data/stopwords.txt'", ")", ".", "decode", "(", "'utf8'", ")", ".", "splitlines", "(", ")", ")" ]
Load a set of stopwords. Args: path (str): The stopwords file path.
[ "Load", "a", "set", "of", "stopwords", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L50-L69
davidmcclure/textplot
textplot/text.py
Text.tokenize
def tokenize(self): """ Tokenize the text. """ self.tokens = [] self.terms = OrderedDict() # Generate tokens. for token in utils.tokenize(self.text): # Ignore stopwords. if token['unstemmed'] in self.stopwords: self.tokens.append(None) else: # Token: self.tokens.append(token) # Term: offsets = self.terms.setdefault(token['stemmed'], []) offsets.append(token['offset'])
python
def tokenize(self): """ Tokenize the text. """ self.tokens = [] self.terms = OrderedDict() # Generate tokens. for token in utils.tokenize(self.text): # Ignore stopwords. if token['unstemmed'] in self.stopwords: self.tokens.append(None) else: # Token: self.tokens.append(token) # Term: offsets = self.terms.setdefault(token['stemmed'], []) offsets.append(token['offset'])
[ "def", "tokenize", "(", "self", ")", ":", "self", ".", "tokens", "=", "[", "]", "self", ".", "terms", "=", "OrderedDict", "(", ")", "# Generate tokens.", "for", "token", "in", "utils", ".", "tokenize", "(", "self", ".", "text", ")", ":", "# Ignore stopwords.", "if", "token", "[", "'unstemmed'", "]", "in", "self", ".", "stopwords", ":", "self", ".", "tokens", ".", "append", "(", "None", ")", "else", ":", "# Token:", "self", ".", "tokens", ".", "append", "(", "token", ")", "# Term:", "offsets", "=", "self", ".", "terms", ".", "setdefault", "(", "token", "[", "'stemmed'", "]", ",", "[", "]", ")", "offsets", ".", "append", "(", "token", "[", "'offset'", "]", ")" ]
Tokenize the text.
[ "Tokenize", "the", "text", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L72-L95
davidmcclure/textplot
textplot/text.py
Text.term_counts
def term_counts(self): """ Returns: OrderedDict: An ordered dictionary of term counts. """ counts = OrderedDict() for term in self.terms: counts[term] = len(self.terms[term]) return utils.sort_dict(counts)
python
def term_counts(self): """ Returns: OrderedDict: An ordered dictionary of term counts. """ counts = OrderedDict() for term in self.terms: counts[term] = len(self.terms[term]) return utils.sort_dict(counts)
[ "def", "term_counts", "(", "self", ")", ":", "counts", "=", "OrderedDict", "(", ")", "for", "term", "in", "self", ".", "terms", ":", "counts", "[", "term", "]", "=", "len", "(", "self", ".", "terms", "[", "term", "]", ")", "return", "utils", ".", "sort_dict", "(", "counts", ")" ]
Returns: OrderedDict: An ordered dictionary of term counts.
[ "Returns", ":", "OrderedDict", ":", "An", "ordered", "dictionary", "of", "term", "counts", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L98-L109
davidmcclure/textplot
textplot/text.py
Text.term_count_buckets
def term_count_buckets(self): """ Returns: dict: A dictionary that maps occurrence counts to the terms that appear that many times in the text. """ buckets = {} for term, count in self.term_counts().items(): if count in buckets: buckets[count].append(term) else: buckets[count] = [term] return buckets
python
def term_count_buckets(self): """ Returns: dict: A dictionary that maps occurrence counts to the terms that appear that many times in the text. """ buckets = {} for term, count in self.term_counts().items(): if count in buckets: buckets[count].append(term) else: buckets[count] = [term] return buckets
[ "def", "term_count_buckets", "(", "self", ")", ":", "buckets", "=", "{", "}", "for", "term", ",", "count", "in", "self", ".", "term_counts", "(", ")", ".", "items", "(", ")", ":", "if", "count", "in", "buckets", ":", "buckets", "[", "count", "]", ".", "append", "(", "term", ")", "else", ":", "buckets", "[", "count", "]", "=", "[", "term", "]", "return", "buckets" ]
Returns: dict: A dictionary that maps occurrence counts to the terms that appear that many times in the text.
[ "Returns", ":", "dict", ":", "A", "dictionary", "that", "maps", "occurrence", "counts", "to", "the", "terms", "that", "appear", "that", "many", "times", "in", "the", "text", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L112-L125
davidmcclure/textplot
textplot/text.py
Text.most_frequent_terms
def most_frequent_terms(self, depth): """ Get the X most frequent terms in the text, and then probe down to get any other terms that have the same count as the last term. Args: depth (int): The number of terms. Returns: set: The set of frequent terms. """ counts = self.term_counts() # Get the top X terms and the instance count of the last word. top_terms = set(list(counts.keys())[:depth]) end_count = list(counts.values())[:depth][-1] # Merge in all other words with that appear that number of times, so # that we don't truncate the last bucket - eg, half of the words that # appear 5 times, but not the other half. bucket = self.term_count_buckets()[end_count] return top_terms.union(set(bucket))
python
def most_frequent_terms(self, depth): """ Get the X most frequent terms in the text, and then probe down to get any other terms that have the same count as the last term. Args: depth (int): The number of terms. Returns: set: The set of frequent terms. """ counts = self.term_counts() # Get the top X terms and the instance count of the last word. top_terms = set(list(counts.keys())[:depth]) end_count = list(counts.values())[:depth][-1] # Merge in all other words with that appear that number of times, so # that we don't truncate the last bucket - eg, half of the words that # appear 5 times, but not the other half. bucket = self.term_count_buckets()[end_count] return top_terms.union(set(bucket))
[ "def", "most_frequent_terms", "(", "self", ",", "depth", ")", ":", "counts", "=", "self", ".", "term_counts", "(", ")", "# Get the top X terms and the instance count of the last word.", "top_terms", "=", "set", "(", "list", "(", "counts", ".", "keys", "(", ")", ")", "[", ":", "depth", "]", ")", "end_count", "=", "list", "(", "counts", ".", "values", "(", ")", ")", "[", ":", "depth", "]", "[", "-", "1", "]", "# Merge in all other words with that appear that number of times, so", "# that we don't truncate the last bucket - eg, half of the words that", "# appear 5 times, but not the other half.", "bucket", "=", "self", ".", "term_count_buckets", "(", ")", "[", "end_count", "]", "return", "top_terms", ".", "union", "(", "set", "(", "bucket", ")", ")" ]
Get the X most frequent terms in the text, and then probe down to get any other terms that have the same count as the last term. Args: depth (int): The number of terms. Returns: set: The set of frequent terms.
[ "Get", "the", "X", "most", "frequent", "terms", "in", "the", "text", "and", "then", "probe", "down", "to", "get", "any", "other", "terms", "that", "have", "the", "same", "count", "as", "the", "last", "term", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L128-L152
davidmcclure/textplot
textplot/text.py
Text.unstem
def unstem(self, term): """ Given a stemmed term, get the most common unstemmed variant. Args: term (str): A stemmed term. Returns: str: The unstemmed token. """ originals = [] for i in self.terms[term]: originals.append(self.tokens[i]['unstemmed']) mode = Counter(originals).most_common(1) return mode[0][0]
python
def unstem(self, term): """ Given a stemmed term, get the most common unstemmed variant. Args: term (str): A stemmed term. Returns: str: The unstemmed token. """ originals = [] for i in self.terms[term]: originals.append(self.tokens[i]['unstemmed']) mode = Counter(originals).most_common(1) return mode[0][0]
[ "def", "unstem", "(", "self", ",", "term", ")", ":", "originals", "=", "[", "]", "for", "i", "in", "self", ".", "terms", "[", "term", "]", ":", "originals", ".", "append", "(", "self", ".", "tokens", "[", "i", "]", "[", "'unstemmed'", "]", ")", "mode", "=", "Counter", "(", "originals", ")", ".", "most_common", "(", "1", ")", "return", "mode", "[", "0", "]", "[", "0", "]" ]
Given a stemmed term, get the most common unstemmed variant. Args: term (str): A stemmed term. Returns: str: The unstemmed token.
[ "Given", "a", "stemmed", "term", "get", "the", "most", "common", "unstemmed", "variant", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L155-L172
davidmcclure/textplot
textplot/text.py
Text.kde
def kde(self, term, bandwidth=2000, samples=1000, kernel='gaussian'): """ Estimate the kernel density of the instances of term in the text. Args: term (str): A stemmed term. bandwidth (int): The kernel bandwidth. samples (int): The number of evenly-spaced sample points. kernel (str): The kernel function. Returns: np.array: The density estimate. """ # Get the offsets of the term instances. terms = np.array(self.terms[term])[:, np.newaxis] # Fit the density estimator on the terms. kde = KernelDensity(kernel=kernel, bandwidth=bandwidth).fit(terms) # Score an evely-spaced array of samples. x_axis = np.linspace(0, len(self.tokens), samples)[:, np.newaxis] scores = kde.score_samples(x_axis) # Scale the scores to integrate to 1. return np.exp(scores) * (len(self.tokens) / samples)
python
def kde(self, term, bandwidth=2000, samples=1000, kernel='gaussian'): """ Estimate the kernel density of the instances of term in the text. Args: term (str): A stemmed term. bandwidth (int): The kernel bandwidth. samples (int): The number of evenly-spaced sample points. kernel (str): The kernel function. Returns: np.array: The density estimate. """ # Get the offsets of the term instances. terms = np.array(self.terms[term])[:, np.newaxis] # Fit the density estimator on the terms. kde = KernelDensity(kernel=kernel, bandwidth=bandwidth).fit(terms) # Score an evely-spaced array of samples. x_axis = np.linspace(0, len(self.tokens), samples)[:, np.newaxis] scores = kde.score_samples(x_axis) # Scale the scores to integrate to 1. return np.exp(scores) * (len(self.tokens) / samples)
[ "def", "kde", "(", "self", ",", "term", ",", "bandwidth", "=", "2000", ",", "samples", "=", "1000", ",", "kernel", "=", "'gaussian'", ")", ":", "# Get the offsets of the term instances.", "terms", "=", "np", ".", "array", "(", "self", ".", "terms", "[", "term", "]", ")", "[", ":", ",", "np", ".", "newaxis", "]", "# Fit the density estimator on the terms.", "kde", "=", "KernelDensity", "(", "kernel", "=", "kernel", ",", "bandwidth", "=", "bandwidth", ")", ".", "fit", "(", "terms", ")", "# Score an evely-spaced array of samples.", "x_axis", "=", "np", ".", "linspace", "(", "0", ",", "len", "(", "self", ".", "tokens", ")", ",", "samples", ")", "[", ":", ",", "np", ".", "newaxis", "]", "scores", "=", "kde", ".", "score_samples", "(", "x_axis", ")", "# Scale the scores to integrate to 1.", "return", "np", ".", "exp", "(", "scores", ")", "*", "(", "len", "(", "self", ".", "tokens", ")", "/", "samples", ")" ]
Estimate the kernel density of the instances of term in the text. Args: term (str): A stemmed term. bandwidth (int): The kernel bandwidth. samples (int): The number of evenly-spaced sample points. kernel (str): The kernel function. Returns: np.array: The density estimate.
[ "Estimate", "the", "kernel", "density", "of", "the", "instances", "of", "term", "in", "the", "text", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L176-L202
davidmcclure/textplot
textplot/text.py
Text.score_intersect
def score_intersect(self, term1, term2, **kwargs): """ Compute the geometric area of the overlap between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float """ t1_kde = self.kde(term1, **kwargs) t2_kde = self.kde(term2, **kwargs) # Integrate the overlap. overlap = np.minimum(t1_kde, t2_kde) return np.trapz(overlap)
python
def score_intersect(self, term1, term2, **kwargs): """ Compute the geometric area of the overlap between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float """ t1_kde = self.kde(term1, **kwargs) t2_kde = self.kde(term2, **kwargs) # Integrate the overlap. overlap = np.minimum(t1_kde, t2_kde) return np.trapz(overlap)
[ "def", "score_intersect", "(", "self", ",", "term1", ",", "term2", ",", "*", "*", "kwargs", ")", ":", "t1_kde", "=", "self", ".", "kde", "(", "term1", ",", "*", "*", "kwargs", ")", "t2_kde", "=", "self", ".", "kde", "(", "term2", ",", "*", "*", "kwargs", ")", "# Integrate the overlap.", "overlap", "=", "np", ".", "minimum", "(", "t1_kde", ",", "t2_kde", ")", "return", "np", ".", "trapz", "(", "overlap", ")" ]
Compute the geometric area of the overlap between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float
[ "Compute", "the", "geometric", "area", "of", "the", "overlap", "between", "the", "kernel", "density", "estimates", "of", "two", "terms", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L205-L223
davidmcclure/textplot
textplot/text.py
Text.score_cosine
def score_cosine(self, term1, term2, **kwargs): """ Compute a weighting score based on the cosine distance between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float """ t1_kde = self.kde(term1, **kwargs) t2_kde = self.kde(term2, **kwargs) return 1-distance.cosine(t1_kde, t2_kde)
python
def score_cosine(self, term1, term2, **kwargs): """ Compute a weighting score based on the cosine distance between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float """ t1_kde = self.kde(term1, **kwargs) t2_kde = self.kde(term2, **kwargs) return 1-distance.cosine(t1_kde, t2_kde)
[ "def", "score_cosine", "(", "self", ",", "term1", ",", "term2", ",", "*", "*", "kwargs", ")", ":", "t1_kde", "=", "self", ".", "kde", "(", "term1", ",", "*", "*", "kwargs", ")", "t2_kde", "=", "self", ".", "kde", "(", "term2", ",", "*", "*", "kwargs", ")", "return", "1", "-", "distance", ".", "cosine", "(", "t1_kde", ",", "t2_kde", ")" ]
Compute a weighting score based on the cosine distance between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float
[ "Compute", "a", "weighting", "score", "based", "on", "the", "cosine", "distance", "between", "the", "kernel", "density", "estimates", "of", "two", "terms", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L226-L242
davidmcclure/textplot
textplot/text.py
Text.score_braycurtis
def score_braycurtis(self, term1, term2, **kwargs): """ Compute a weighting score based on the "City Block" distance between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float """ t1_kde = self.kde(term1, **kwargs) t2_kde = self.kde(term2, **kwargs) return 1-distance.braycurtis(t1_kde, t2_kde)
python
def score_braycurtis(self, term1, term2, **kwargs): """ Compute a weighting score based on the "City Block" distance between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float """ t1_kde = self.kde(term1, **kwargs) t2_kde = self.kde(term2, **kwargs) return 1-distance.braycurtis(t1_kde, t2_kde)
[ "def", "score_braycurtis", "(", "self", ",", "term1", ",", "term2", ",", "*", "*", "kwargs", ")", ":", "t1_kde", "=", "self", ".", "kde", "(", "term1", ",", "*", "*", "kwargs", ")", "t2_kde", "=", "self", ".", "kde", "(", "term2", ",", "*", "*", "kwargs", ")", "return", "1", "-", "distance", ".", "braycurtis", "(", "t1_kde", ",", "t2_kde", ")" ]
Compute a weighting score based on the "City Block" distance between the kernel density estimates of two terms. Args: term1 (str) term2 (str) Returns: float
[ "Compute", "a", "weighting", "score", "based", "on", "the", "City", "Block", "distance", "between", "the", "kernel", "density", "estimates", "of", "two", "terms", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L245-L261
davidmcclure/textplot
textplot/text.py
Text.plot_term_kdes
def plot_term_kdes(self, words, **kwargs): """ Plot kernel density estimates for multiple words. Args: words (list): A list of unstemmed terms. """ stem = PorterStemmer().stem for word in words: kde = self.kde(stem(word), **kwargs) plt.plot(kde) plt.show()
python
def plot_term_kdes(self, words, **kwargs): """ Plot kernel density estimates for multiple words. Args: words (list): A list of unstemmed terms. """ stem = PorterStemmer().stem for word in words: kde = self.kde(stem(word), **kwargs) plt.plot(kde) plt.show()
[ "def", "plot_term_kdes", "(", "self", ",", "words", ",", "*", "*", "kwargs", ")", ":", "stem", "=", "PorterStemmer", "(", ")", ".", "stem", "for", "word", "in", "words", ":", "kde", "=", "self", ".", "kde", "(", "stem", "(", "word", ")", ",", "*", "*", "kwargs", ")", "plt", ".", "plot", "(", "kde", ")", "plt", ".", "show", "(", ")" ]
Plot kernel density estimates for multiple words. Args: words (list): A list of unstemmed terms.
[ "Plot", "kernel", "density", "estimates", "for", "multiple", "words", "." ]
train
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L264-L279
stephrdev/django-mongoforms
mongoforms/fields.py
MongoFormFieldGenerator.generate
def generate(self, field_name, field): """Tries to lookup a matching formfield generator (lowercase field-classname) and raises a NotImplementedError of no generator can be found. """ if hasattr(self, 'generate_%s' % field.__class__.__name__.lower()): generator = getattr( self, 'generate_%s' % field.__class__.__name__.lower()) return generator( field_name, field, (field.verbose_name or field_name).capitalize()) else: raise NotImplementedError('%s is not supported by MongoForm' % \ field.__class__.__name__)
python
def generate(self, field_name, field): """Tries to lookup a matching formfield generator (lowercase field-classname) and raises a NotImplementedError of no generator can be found. """ if hasattr(self, 'generate_%s' % field.__class__.__name__.lower()): generator = getattr( self, 'generate_%s' % field.__class__.__name__.lower()) return generator( field_name, field, (field.verbose_name or field_name).capitalize()) else: raise NotImplementedError('%s is not supported by MongoForm' % \ field.__class__.__name__)
[ "def", "generate", "(", "self", ",", "field_name", ",", "field", ")", ":", "if", "hasattr", "(", "self", ",", "'generate_%s'", "%", "field", ".", "__class__", ".", "__name__", ".", "lower", "(", ")", ")", ":", "generator", "=", "getattr", "(", "self", ",", "'generate_%s'", "%", "field", ".", "__class__", ".", "__name__", ".", "lower", "(", ")", ")", "return", "generator", "(", "field_name", ",", "field", ",", "(", "field", ".", "verbose_name", "or", "field_name", ")", ".", "capitalize", "(", ")", ")", "else", ":", "raise", "NotImplementedError", "(", "'%s is not supported by MongoForm'", "%", "field", ".", "__class__", ".", "__name__", ")" ]
Tries to lookup a matching formfield generator (lowercase field-classname) and raises a NotImplementedError of no generator can be found.
[ "Tries", "to", "lookup", "a", "matching", "formfield", "generator", "(", "lowercase", "field", "-", "classname", ")", "and", "raises", "a", "NotImplementedError", "of", "no", "generator", "can", "be", "found", "." ]
train
https://github.com/stephrdev/django-mongoforms/blob/6fa46824c438555c703f293d682ca92710938985/mongoforms/fields.py#L49-L65
twisted/axiom
axiom/sequence.py
List._fixIndex
def _fixIndex(self, index, truncate=False): """ @param truncate: If true, negative indices which go past the beginning of the list will be evaluated as zero. For example:: >>> L = List([1,2,3,4,5]) >>> len(L) 5 >>> L._fixIndex(-9, truncate=True) 0 """ assert not isinstance(index, slice), 'slices are not supported (yet)' if index < 0: index += self.length if index < 0: if not truncate: raise IndexError('stored List index out of range') else: index = 0 return index
python
def _fixIndex(self, index, truncate=False): """ @param truncate: If true, negative indices which go past the beginning of the list will be evaluated as zero. For example:: >>> L = List([1,2,3,4,5]) >>> len(L) 5 >>> L._fixIndex(-9, truncate=True) 0 """ assert not isinstance(index, slice), 'slices are not supported (yet)' if index < 0: index += self.length if index < 0: if not truncate: raise IndexError('stored List index out of range') else: index = 0 return index
[ "def", "_fixIndex", "(", "self", ",", "index", ",", "truncate", "=", "False", ")", ":", "assert", "not", "isinstance", "(", "index", ",", "slice", ")", ",", "'slices are not supported (yet)'", "if", "index", "<", "0", ":", "index", "+=", "self", ".", "length", "if", "index", "<", "0", ":", "if", "not", "truncate", ":", "raise", "IndexError", "(", "'stored List index out of range'", ")", "else", ":", "index", "=", "0", "return", "index" ]
@param truncate: If true, negative indices which go past the beginning of the list will be evaluated as zero. For example:: >>> L = List([1,2,3,4,5]) >>> len(L) 5 >>> L._fixIndex(-9, truncate=True) 0
[ "@param", "truncate", ":", "If", "true", "negative", "indices", "which", "go", "past", "the", "beginning", "of", "the", "list", "will", "be", "evaluated", "as", "zero", ".", "For", "example", "::" ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/sequence.py#L40-L60
twisted/axiom
axiom/queryutil.py
overlapping
def overlapping(startAttribute, # X endAttribute, # Y startValue, # A endValue, # B ): """ Return an L{axiom.iaxiom.IComparison} (an object that can be passed as the 'comparison' argument to Store.query/.sum/.count) which will constrain a query against 2 attributes for ranges which overlap with the given arguments. For a database with Items of class O which represent values in this configuration:: X Y (a) (b) |-------------------| (c) (d) |--------| (e) (f) |--------| (g) (h) |---| (i) (j) |------| (k) (l) |-------------------------------------| (a) (l) |-----------------------------| (c) (b) |------------------------| (c) (a) |----| (b) (l) |---------| The query:: myStore.query( O, findOverlapping(O.X, O.Y, a, b)) Will return a generator of Items of class O which represent segments a-b, c-d, e-f, k-l, a-l, c-b, c-a and b-l, but NOT segments g-h or i-j. (NOTE: If you want to pass attributes of different classes for startAttribute and endAttribute, read the implementation of this method to discover the additional join clauses required. This may be eliminated some day so for now, consider this method undefined over multiple classes.) In the database where this query is run, for an item N, all values of N.startAttribute must be less than N.endAttribute. startValue must be less than endValue. """ assert startValue <= endValue return OR( AND(startAttribute >= startValue, startAttribute <= endValue), AND(endAttribute >= startValue, endAttribute <= endValue), AND(startAttribute <= startValue, endAttribute >= endValue) )
python
def overlapping(startAttribute, # X endAttribute, # Y startValue, # A endValue, # B ): """ Return an L{axiom.iaxiom.IComparison} (an object that can be passed as the 'comparison' argument to Store.query/.sum/.count) which will constrain a query against 2 attributes for ranges which overlap with the given arguments. For a database with Items of class O which represent values in this configuration:: X Y (a) (b) |-------------------| (c) (d) |--------| (e) (f) |--------| (g) (h) |---| (i) (j) |------| (k) (l) |-------------------------------------| (a) (l) |-----------------------------| (c) (b) |------------------------| (c) (a) |----| (b) (l) |---------| The query:: myStore.query( O, findOverlapping(O.X, O.Y, a, b)) Will return a generator of Items of class O which represent segments a-b, c-d, e-f, k-l, a-l, c-b, c-a and b-l, but NOT segments g-h or i-j. (NOTE: If you want to pass attributes of different classes for startAttribute and endAttribute, read the implementation of this method to discover the additional join clauses required. This may be eliminated some day so for now, consider this method undefined over multiple classes.) In the database where this query is run, for an item N, all values of N.startAttribute must be less than N.endAttribute. startValue must be less than endValue. """ assert startValue <= endValue return OR( AND(startAttribute >= startValue, startAttribute <= endValue), AND(endAttribute >= startValue, endAttribute <= endValue), AND(startAttribute <= startValue, endAttribute >= endValue) )
[ "def", "overlapping", "(", "startAttribute", ",", "# X", "endAttribute", ",", "# Y", "startValue", ",", "# A", "endValue", ",", "# B", ")", ":", "assert", "startValue", "<=", "endValue", "return", "OR", "(", "AND", "(", "startAttribute", ">=", "startValue", ",", "startAttribute", "<=", "endValue", ")", ",", "AND", "(", "endAttribute", ">=", "startValue", ",", "endAttribute", "<=", "endValue", ")", ",", "AND", "(", "startAttribute", "<=", "startValue", ",", "endAttribute", ">=", "endValue", ")", ")" ]
Return an L{axiom.iaxiom.IComparison} (an object that can be passed as the 'comparison' argument to Store.query/.sum/.count) which will constrain a query against 2 attributes for ranges which overlap with the given arguments. For a database with Items of class O which represent values in this configuration:: X Y (a) (b) |-------------------| (c) (d) |--------| (e) (f) |--------| (g) (h) |---| (i) (j) |------| (k) (l) |-------------------------------------| (a) (l) |-----------------------------| (c) (b) |------------------------| (c) (a) |----| (b) (l) |---------| The query:: myStore.query( O, findOverlapping(O.X, O.Y, a, b)) Will return a generator of Items of class O which represent segments a-b, c-d, e-f, k-l, a-l, c-b, c-a and b-l, but NOT segments g-h or i-j. (NOTE: If you want to pass attributes of different classes for startAttribute and endAttribute, read the implementation of this method to discover the additional join clauses required. This may be eliminated some day so for now, consider this method undefined over multiple classes.) In the database where this query is run, for an item N, all values of N.startAttribute must be less than N.endAttribute. startValue must be less than endValue.
[ "Return", "an", "L", "{", "axiom", ".", "iaxiom", ".", "IComparison", "}", "(", "an", "object", "that", "can", "be", "passed", "as", "the", "comparison", "argument", "to", "Store", ".", "query", "/", ".", "sum", "/", ".", "count", ")", "which", "will", "constrain", "a", "query", "against", "2", "attributes", "for", "ranges", "which", "overlap", "with", "the", "given", "arguments", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/queryutil.py#L21-L88
twisted/axiom
axiom/queryutil.py
_tupleCompare
def _tupleCompare(tuple1, ineq, tuple2, eq=lambda a,b: (a==b), ander=AND, orer=OR): """ Compare two 'in-database tuples'. Useful when sorting by a compound key and slicing into the middle of that query. """ orholder = [] for limit in range(len(tuple1)): eqconstraint = [ eq(elem1, elem2) for elem1, elem2 in zip(tuple1, tuple2)[:limit]] ineqconstraint = ineq(tuple1[limit], tuple2[limit]) orholder.append(ander(*(eqconstraint + [ineqconstraint]))) return orer(*orholder)
python
def _tupleCompare(tuple1, ineq, tuple2, eq=lambda a,b: (a==b), ander=AND, orer=OR): """ Compare two 'in-database tuples'. Useful when sorting by a compound key and slicing into the middle of that query. """ orholder = [] for limit in range(len(tuple1)): eqconstraint = [ eq(elem1, elem2) for elem1, elem2 in zip(tuple1, tuple2)[:limit]] ineqconstraint = ineq(tuple1[limit], tuple2[limit]) orholder.append(ander(*(eqconstraint + [ineqconstraint]))) return orer(*orholder)
[ "def", "_tupleCompare", "(", "tuple1", ",", "ineq", ",", "tuple2", ",", "eq", "=", "lambda", "a", ",", "b", ":", "(", "a", "==", "b", ")", ",", "ander", "=", "AND", ",", "orer", "=", "OR", ")", ":", "orholder", "=", "[", "]", "for", "limit", "in", "range", "(", "len", "(", "tuple1", ")", ")", ":", "eqconstraint", "=", "[", "eq", "(", "elem1", ",", "elem2", ")", "for", "elem1", ",", "elem2", "in", "zip", "(", "tuple1", ",", "tuple2", ")", "[", ":", "limit", "]", "]", "ineqconstraint", "=", "ineq", "(", "tuple1", "[", "limit", "]", ",", "tuple2", "[", "limit", "]", ")", "orholder", ".", "append", "(", "ander", "(", "*", "(", "eqconstraint", "+", "[", "ineqconstraint", "]", ")", ")", ")", "return", "orer", "(", "*", "orholder", ")" ]
Compare two 'in-database tuples'. Useful when sorting by a compound key and slicing into the middle of that query.
[ "Compare", "two", "in", "-", "database", "tuples", ".", "Useful", "when", "sorting", "by", "a", "compound", "key", "and", "slicing", "into", "the", "middle", "of", "that", "query", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/queryutil.py#L90-L105
ubc/ubcpi
ubcpi/ubcpi.py
truncate_rationale
def truncate_rationale(rationale, max_length=MAX_RATIONALE_SIZE_IN_EVENT): """ Truncates the rationale for analytics event emission if necessary Args: rationale (string): the string value of the rationale max_length (int): the max length for truncation Returns: truncated_value (string): the possibly truncated version of the rationale was_truncated (bool): returns true if the rationale is truncated """ if isinstance(rationale, basestring) and max_length is not None and len(rationale) > max_length: return rationale[0:max_length], True else: return rationale, False
python
def truncate_rationale(rationale, max_length=MAX_RATIONALE_SIZE_IN_EVENT): """ Truncates the rationale for analytics event emission if necessary Args: rationale (string): the string value of the rationale max_length (int): the max length for truncation Returns: truncated_value (string): the possibly truncated version of the rationale was_truncated (bool): returns true if the rationale is truncated """ if isinstance(rationale, basestring) and max_length is not None and len(rationale) > max_length: return rationale[0:max_length], True else: return rationale, False
[ "def", "truncate_rationale", "(", "rationale", ",", "max_length", "=", "MAX_RATIONALE_SIZE_IN_EVENT", ")", ":", "if", "isinstance", "(", "rationale", ",", "basestring", ")", "and", "max_length", "is", "not", "None", "and", "len", "(", "rationale", ")", ">", "max_length", ":", "return", "rationale", "[", "0", ":", "max_length", "]", ",", "True", "else", ":", "return", "rationale", ",", "False" ]
Truncates the rationale for analytics event emission if necessary Args: rationale (string): the string value of the rationale max_length (int): the max length for truncation Returns: truncated_value (string): the possibly truncated version of the rationale was_truncated (bool): returns true if the rationale is truncated
[ "Truncates", "the", "rationale", "for", "analytics", "event", "emission", "if", "necessary" ]
train
https://github.com/ubc/ubcpi/blob/7b6de03f93f3a4a8af4b92dfde7c69eeaf21f46e/ubcpi/ubcpi.py#L33-L49
ubc/ubcpi
ubcpi/ubcpi.py
validate_options
def validate_options(options): """ Validate the options that course author set up and return errors in a dict if there is any """ errors = [] if int(options['rationale_size']['min']) < 1: errors.append(_('Minimum Characters')) if int(options['rationale_size']['max']) < 0 or int(options['rationale_size']['max']) > MAX_RATIONALE_SIZE: errors.append(_('Maximum Characters')) if not any(error in [_('Minimum Characters'), _('Maximum Characters')] for error in errors) \ and int(options['rationale_size']['max']) <= int(options['rationale_size']['min']): errors += [_('Minimum Characters'), _('Maximum Characters')] try: if options['algo']['num_responses'] != '#' and int(options['algo']['num_responses']) < 0: errors.append(_('Number of Responses')) except ValueError: errors.append(_('Not an Integer')) if not errors: return None else: return {'options_error': _('Invalid Option(s): ') + ', '.join(errors)}
python
def validate_options(options): """ Validate the options that course author set up and return errors in a dict if there is any """ errors = [] if int(options['rationale_size']['min']) < 1: errors.append(_('Minimum Characters')) if int(options['rationale_size']['max']) < 0 or int(options['rationale_size']['max']) > MAX_RATIONALE_SIZE: errors.append(_('Maximum Characters')) if not any(error in [_('Minimum Characters'), _('Maximum Characters')] for error in errors) \ and int(options['rationale_size']['max']) <= int(options['rationale_size']['min']): errors += [_('Minimum Characters'), _('Maximum Characters')] try: if options['algo']['num_responses'] != '#' and int(options['algo']['num_responses']) < 0: errors.append(_('Number of Responses')) except ValueError: errors.append(_('Not an Integer')) if not errors: return None else: return {'options_error': _('Invalid Option(s): ') + ', '.join(errors)}
[ "def", "validate_options", "(", "options", ")", ":", "errors", "=", "[", "]", "if", "int", "(", "options", "[", "'rationale_size'", "]", "[", "'min'", "]", ")", "<", "1", ":", "errors", ".", "append", "(", "_", "(", "'Minimum Characters'", ")", ")", "if", "int", "(", "options", "[", "'rationale_size'", "]", "[", "'max'", "]", ")", "<", "0", "or", "int", "(", "options", "[", "'rationale_size'", "]", "[", "'max'", "]", ")", ">", "MAX_RATIONALE_SIZE", ":", "errors", ".", "append", "(", "_", "(", "'Maximum Characters'", ")", ")", "if", "not", "any", "(", "error", "in", "[", "_", "(", "'Minimum Characters'", ")", ",", "_", "(", "'Maximum Characters'", ")", "]", "for", "error", "in", "errors", ")", "and", "int", "(", "options", "[", "'rationale_size'", "]", "[", "'max'", "]", ")", "<=", "int", "(", "options", "[", "'rationale_size'", "]", "[", "'min'", "]", ")", ":", "errors", "+=", "[", "_", "(", "'Minimum Characters'", ")", ",", "_", "(", "'Maximum Characters'", ")", "]", "try", ":", "if", "options", "[", "'algo'", "]", "[", "'num_responses'", "]", "!=", "'#'", "and", "int", "(", "options", "[", "'algo'", "]", "[", "'num_responses'", "]", ")", "<", "0", ":", "errors", ".", "append", "(", "_", "(", "'Number of Responses'", ")", ")", "except", "ValueError", ":", "errors", ".", "append", "(", "_", "(", "'Not an Integer'", ")", ")", "if", "not", "errors", ":", "return", "None", "else", ":", "return", "{", "'options_error'", ":", "_", "(", "'Invalid Option(s): '", ")", "+", "', '", ".", "join", "(", "errors", ")", "}" ]
Validate the options that course author set up and return errors in a dict if there is any
[ "Validate", "the", "options", "that", "course", "author", "set", "up", "and", "return", "errors", "in", "a", "dict", "if", "there", "is", "any" ]
train
https://github.com/ubc/ubcpi/blob/7b6de03f93f3a4a8af4b92dfde7c69eeaf21f46e/ubcpi/ubcpi.py#L52-L74
ubc/ubcpi
ubcpi/ubcpi.py
MissingDataFetcherMixin.get_student_item_dict
def get_student_item_dict(self, anonymous_user_id=None): """Create a student_item_dict from our surrounding context. See also: submissions.api for details. Args: anonymous_user_id(str): A unique anonymous_user_id for (user, course) pair. Returns: (dict): The student item associated with this XBlock instance. This includes the student id, item id, and course id. """ item_id = self._serialize_opaque_key(self.scope_ids.usage_id) # This is not the real way course_ids should work, but this is a # temporary expediency for LMS integration if hasattr(self, "xmodule_runtime"): course_id = self.get_course_id() # pylint:disable=E1101 if anonymous_user_id: student_id = anonymous_user_id else: student_id = self.xmodule_runtime.anonymous_student_id # pylint:disable=E1101 else: course_id = "edX/Enchantment_101/April_1" if self.scope_ids.user_id is None: student_id = '' else: student_id = unicode(self.scope_ids.user_id) student_item_dict = dict( student_id=student_id, item_id=item_id, course_id=course_id, item_type='ubcpi' ) return student_item_dict
python
def get_student_item_dict(self, anonymous_user_id=None): """Create a student_item_dict from our surrounding context. See also: submissions.api for details. Args: anonymous_user_id(str): A unique anonymous_user_id for (user, course) pair. Returns: (dict): The student item associated with this XBlock instance. This includes the student id, item id, and course id. """ item_id = self._serialize_opaque_key(self.scope_ids.usage_id) # This is not the real way course_ids should work, but this is a # temporary expediency for LMS integration if hasattr(self, "xmodule_runtime"): course_id = self.get_course_id() # pylint:disable=E1101 if anonymous_user_id: student_id = anonymous_user_id else: student_id = self.xmodule_runtime.anonymous_student_id # pylint:disable=E1101 else: course_id = "edX/Enchantment_101/April_1" if self.scope_ids.user_id is None: student_id = '' else: student_id = unicode(self.scope_ids.user_id) student_item_dict = dict( student_id=student_id, item_id=item_id, course_id=course_id, item_type='ubcpi' ) return student_item_dict
[ "def", "get_student_item_dict", "(", "self", ",", "anonymous_user_id", "=", "None", ")", ":", "item_id", "=", "self", ".", "_serialize_opaque_key", "(", "self", ".", "scope_ids", ".", "usage_id", ")", "# This is not the real way course_ids should work, but this is a", "# temporary expediency for LMS integration", "if", "hasattr", "(", "self", ",", "\"xmodule_runtime\"", ")", ":", "course_id", "=", "self", ".", "get_course_id", "(", ")", "# pylint:disable=E1101", "if", "anonymous_user_id", ":", "student_id", "=", "anonymous_user_id", "else", ":", "student_id", "=", "self", ".", "xmodule_runtime", ".", "anonymous_student_id", "# pylint:disable=E1101", "else", ":", "course_id", "=", "\"edX/Enchantment_101/April_1\"", "if", "self", ".", "scope_ids", ".", "user_id", "is", "None", ":", "student_id", "=", "''", "else", ":", "student_id", "=", "unicode", "(", "self", ".", "scope_ids", ".", "user_id", ")", "student_item_dict", "=", "dict", "(", "student_id", "=", "student_id", ",", "item_id", "=", "item_id", ",", "course_id", "=", "course_id", ",", "item_type", "=", "'ubcpi'", ")", "return", "student_item_dict" ]
Create a student_item_dict from our surrounding context. See also: submissions.api for details. Args: anonymous_user_id(str): A unique anonymous_user_id for (user, course) pair. Returns: (dict): The student item associated with this XBlock instance. This includes the student id, item id, and course id.
[ "Create", "a", "student_item_dict", "from", "our", "surrounding", "context", "." ]
train
https://github.com/ubc/ubcpi/blob/7b6de03f93f3a4a8af4b92dfde7c69eeaf21f46e/ubcpi/ubcpi.py#L87-L123
ubc/ubcpi
ubcpi/persistence.py
get_answers_for_student
def get_answers_for_student(student_item): """ Retrieve answers from backend for a student and question Args: student_item (dict): The location of the problem this submission is associated with, as defined by a course, student, and item. Returns: Answers: answers for the student """ submissions = sub_api.get_submissions(student_item) if not submissions: return Answers() latest_submission = submissions[0] latest_answer_item = latest_submission.get('answer', {}) return Answers(latest_answer_item.get(ANSWER_LIST_KEY, []))
python
def get_answers_for_student(student_item): """ Retrieve answers from backend for a student and question Args: student_item (dict): The location of the problem this submission is associated with, as defined by a course, student, and item. Returns: Answers: answers for the student """ submissions = sub_api.get_submissions(student_item) if not submissions: return Answers() latest_submission = submissions[0] latest_answer_item = latest_submission.get('answer', {}) return Answers(latest_answer_item.get(ANSWER_LIST_KEY, []))
[ "def", "get_answers_for_student", "(", "student_item", ")", ":", "submissions", "=", "sub_api", ".", "get_submissions", "(", "student_item", ")", "if", "not", "submissions", ":", "return", "Answers", "(", ")", "latest_submission", "=", "submissions", "[", "0", "]", "latest_answer_item", "=", "latest_submission", ".", "get", "(", "'answer'", ",", "{", "}", ")", "return", "Answers", "(", "latest_answer_item", ".", "get", "(", "ANSWER_LIST_KEY", ",", "[", "]", ")", ")" ]
Retrieve answers from backend for a student and question Args: student_item (dict): The location of the problem this submission is associated with, as defined by a course, student, and item. Returns: Answers: answers for the student
[ "Retrieve", "answers", "from", "backend", "for", "a", "student", "and", "question" ]
train
https://github.com/ubc/ubcpi/blob/7b6de03f93f3a4a8af4b92dfde7c69eeaf21f46e/ubcpi/persistence.py#L25-L42
ubc/ubcpi
ubcpi/persistence.py
add_answer_for_student
def add_answer_for_student(student_item, vote, rationale): """ Add an answer for a student to the backend Args: student_item (dict): The location of the problem this submission is associated with, as defined by a course, student, and item. vote (int): the option that student voted for rationale (str): the reason why the student vote for the option """ answers = get_answers_for_student(student_item) answers.add_answer(vote, rationale) sub_api.create_submission(student_item, { ANSWER_LIST_KEY: answers.get_answers_as_list() })
python
def add_answer_for_student(student_item, vote, rationale): """ Add an answer for a student to the backend Args: student_item (dict): The location of the problem this submission is associated with, as defined by a course, student, and item. vote (int): the option that student voted for rationale (str): the reason why the student vote for the option """ answers = get_answers_for_student(student_item) answers.add_answer(vote, rationale) sub_api.create_submission(student_item, { ANSWER_LIST_KEY: answers.get_answers_as_list() })
[ "def", "add_answer_for_student", "(", "student_item", ",", "vote", ",", "rationale", ")", ":", "answers", "=", "get_answers_for_student", "(", "student_item", ")", "answers", ".", "add_answer", "(", "vote", ",", "rationale", ")", "sub_api", ".", "create_submission", "(", "student_item", ",", "{", "ANSWER_LIST_KEY", ":", "answers", ".", "get_answers_as_list", "(", ")", "}", ")" ]
Add an answer for a student to the backend Args: student_item (dict): The location of the problem this submission is associated with, as defined by a course, student, and item. vote (int): the option that student voted for rationale (str): the reason why the student vote for the option
[ "Add", "an", "answer", "for", "a", "student", "to", "the", "backend" ]
train
https://github.com/ubc/ubcpi/blob/7b6de03f93f3a4a8af4b92dfde7c69eeaf21f46e/ubcpi/persistence.py#L45-L60
ubc/ubcpi
ubcpi/persistence.py
Answers._safe_get
def _safe_get(self, revision, key): """ Get an answer data (vote or rationale) by revision Args: revision (int): the revision number for student answer, could be 0 (original) or 1 (revised) key (str); key for retrieve answer data, could be VOTE_KEY or RATIONALE_KEY Returns: the answer data or None if revision doesn't exists """ if self.has_revision(revision): return self.raw_answers[revision].get(key) else: return None
python
def _safe_get(self, revision, key): """ Get an answer data (vote or rationale) by revision Args: revision (int): the revision number for student answer, could be 0 (original) or 1 (revised) key (str); key for retrieve answer data, could be VOTE_KEY or RATIONALE_KEY Returns: the answer data or None if revision doesn't exists """ if self.has_revision(revision): return self.raw_answers[revision].get(key) else: return None
[ "def", "_safe_get", "(", "self", ",", "revision", ",", "key", ")", ":", "if", "self", ".", "has_revision", "(", "revision", ")", ":", "return", "self", ".", "raw_answers", "[", "revision", "]", ".", "get", "(", "key", ")", "else", ":", "return", "None" ]
Get an answer data (vote or rationale) by revision Args: revision (int): the revision number for student answer, could be 0 (original) or 1 (revised) key (str); key for retrieve answer data, could be VOTE_KEY or RATIONALE_KEY Returns: the answer data or None if revision doesn't exists
[ "Get", "an", "answer", "data", "(", "vote", "or", "rationale", ")", "by", "revision" ]
train
https://github.com/ubc/ubcpi/blob/7b6de03f93f3a4a8af4b92dfde7c69eeaf21f46e/ubcpi/persistence.py#L77-L93
ubc/ubcpi
ubcpi/persistence.py
Answers.add_answer
def add_answer(self, vote, rationale): """ Add an answer Args: vote (int): the option that student voted for rationale (str): the reason why the student vote for the option """ self.raw_answers.append({ VOTE_KEY: vote, RATIONALE_KEY: rationale, })
python
def add_answer(self, vote, rationale): """ Add an answer Args: vote (int): the option that student voted for rationale (str): the reason why the student vote for the option """ self.raw_answers.append({ VOTE_KEY: vote, RATIONALE_KEY: rationale, })
[ "def", "add_answer", "(", "self", ",", "vote", ",", "rationale", ")", ":", "self", ".", "raw_answers", ".", "append", "(", "{", "VOTE_KEY", ":", "vote", ",", "RATIONALE_KEY", ":", "rationale", ",", "}", ")" ]
Add an answer Args: vote (int): the option that student voted for rationale (str): the reason why the student vote for the option
[ "Add", "an", "answer" ]
train
https://github.com/ubc/ubcpi/blob/7b6de03f93f3a4a8af4b92dfde7c69eeaf21f46e/ubcpi/persistence.py#L134-L145
swharden/PyOriginTools
documentation/PyOrigin-examples/examples.py
exceptionToString
def exceptionToString(e,silent=False): """when you "except Exception as e", give me the e and I'll give you a string.""" exc_type, exc_obj, exc_tb = sys.exc_info() s=("\n"+"="*50+"\n") s+="EXCEPTION THROWN UNEXPECTEDLY\n" s+=" FILE: %s\n"%os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] s+=" LINE: %s\n"%exc_tb.tb_lineno s+=" TYPE: %s\n"%exc_type s+='-'*50+'\n' s+=traceback.format_exc() s=s.strip()+'\n'+"="*50+"\n" if not silent: print(s) return s
python
def exceptionToString(e,silent=False): """when you "except Exception as e", give me the e and I'll give you a string.""" exc_type, exc_obj, exc_tb = sys.exc_info() s=("\n"+"="*50+"\n") s+="EXCEPTION THROWN UNEXPECTEDLY\n" s+=" FILE: %s\n"%os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] s+=" LINE: %s\n"%exc_tb.tb_lineno s+=" TYPE: %s\n"%exc_type s+='-'*50+'\n' s+=traceback.format_exc() s=s.strip()+'\n'+"="*50+"\n" if not silent: print(s) return s
[ "def", "exceptionToString", "(", "e", ",", "silent", "=", "False", ")", ":", "exc_type", ",", "exc_obj", ",", "exc_tb", "=", "sys", ".", "exc_info", "(", ")", "s", "=", "(", "\"\\n\"", "+", "\"=\"", "*", "50", "+", "\"\\n\"", ")", "s", "+=", "\"EXCEPTION THROWN UNEXPECTEDLY\\n\"", "s", "+=", "\" FILE: %s\\n\"", "%", "os", ".", "path", ".", "split", "(", "exc_tb", ".", "tb_frame", ".", "f_code", ".", "co_filename", ")", "[", "1", "]", "s", "+=", "\" LINE: %s\\n\"", "%", "exc_tb", ".", "tb_lineno", "s", "+=", "\" TYPE: %s\\n\"", "%", "exc_type", "s", "+=", "'-'", "*", "50", "+", "'\\n'", "s", "+=", "traceback", ".", "format_exc", "(", ")", "s", "=", "s", ".", "strip", "(", ")", "+", "'\\n'", "+", "\"=\"", "*", "50", "+", "\"\\n\"", "if", "not", "silent", ":", "print", "(", "s", ")", "return", "s" ]
when you "except Exception as e", give me the e and I'll give you a string.
[ "when", "you", "except", "Exception", "as", "e", "give", "me", "the", "e", "and", "I", "ll", "give", "you", "a", "string", "." ]
train
https://github.com/swharden/PyOriginTools/blob/536fb8e11234ffdc27e26b1800e0358179ca7d26/documentation/PyOrigin-examples/examples.py#L38-L51
twisted/axiom
axiom/substore.py
SubStore.createNew
def createNew(cls, store, pathSegments): """ Create a new SubStore, allocating a new file space for it. """ if isinstance(pathSegments, basestring): raise ValueError( 'Received %r instead of a sequence' % (pathSegments,)) if store.dbdir is None: self = cls(store=store, storepath=None) else: storepath = store.newDirectory(*pathSegments) self = cls(store=store, storepath=storepath) self.open() self.close() return self
python
def createNew(cls, store, pathSegments): """ Create a new SubStore, allocating a new file space for it. """ if isinstance(pathSegments, basestring): raise ValueError( 'Received %r instead of a sequence' % (pathSegments,)) if store.dbdir is None: self = cls(store=store, storepath=None) else: storepath = store.newDirectory(*pathSegments) self = cls(store=store, storepath=storepath) self.open() self.close() return self
[ "def", "createNew", "(", "cls", ",", "store", ",", "pathSegments", ")", ":", "if", "isinstance", "(", "pathSegments", ",", "basestring", ")", ":", "raise", "ValueError", "(", "'Received %r instead of a sequence'", "%", "(", "pathSegments", ",", ")", ")", "if", "store", ".", "dbdir", "is", "None", ":", "self", "=", "cls", "(", "store", "=", "store", ",", "storepath", "=", "None", ")", "else", ":", "storepath", "=", "store", ".", "newDirectory", "(", "*", "pathSegments", ")", "self", "=", "cls", "(", "store", "=", "store", ",", "storepath", "=", "storepath", ")", "self", ".", "open", "(", ")", "self", ".", "close", "(", ")", "return", "self" ]
Create a new SubStore, allocating a new file space for it.
[ "Create", "a", "new", "SubStore", "allocating", "a", "new", "file", "space", "for", "it", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/substore.py#L25-L39
twisted/axiom
axiom/substore.py
SubStore.createStore
def createStore(self, debug, journalMode=None): """ Create the actual Store this Substore represents. """ if self.storepath is None: self.store._memorySubstores.append(self) # don't fall out of cache if self.store.filesdir is None: filesdir = None else: filesdir = (self.store.filesdir.child("_substore_files") .child(str(self.storeID)) .path) return Store(parent=self.store, filesdir=filesdir, idInParent=self.storeID, debug=debug, journalMode=journalMode) else: return Store(self.storepath.path, parent=self.store, idInParent=self.storeID, debug=debug, journalMode=journalMode)
python
def createStore(self, debug, journalMode=None): """ Create the actual Store this Substore represents. """ if self.storepath is None: self.store._memorySubstores.append(self) # don't fall out of cache if self.store.filesdir is None: filesdir = None else: filesdir = (self.store.filesdir.child("_substore_files") .child(str(self.storeID)) .path) return Store(parent=self.store, filesdir=filesdir, idInParent=self.storeID, debug=debug, journalMode=journalMode) else: return Store(self.storepath.path, parent=self.store, idInParent=self.storeID, debug=debug, journalMode=journalMode)
[ "def", "createStore", "(", "self", ",", "debug", ",", "journalMode", "=", "None", ")", ":", "if", "self", ".", "storepath", "is", "None", ":", "self", ".", "store", ".", "_memorySubstores", ".", "append", "(", "self", ")", "# don't fall out of cache", "if", "self", ".", "store", ".", "filesdir", "is", "None", ":", "filesdir", "=", "None", "else", ":", "filesdir", "=", "(", "self", ".", "store", ".", "filesdir", ".", "child", "(", "\"_substore_files\"", ")", ".", "child", "(", "str", "(", "self", ".", "storeID", ")", ")", ".", "path", ")", "return", "Store", "(", "parent", "=", "self", ".", "store", ",", "filesdir", "=", "filesdir", ",", "idInParent", "=", "self", ".", "storeID", ",", "debug", "=", "debug", ",", "journalMode", "=", "journalMode", ")", "else", ":", "return", "Store", "(", "self", ".", "storepath", ".", "path", ",", "parent", "=", "self", ".", "store", ",", "idInParent", "=", "self", ".", "storeID", ",", "debug", "=", "debug", ",", "journalMode", "=", "journalMode", ")" ]
Create the actual Store this Substore represents.
[ "Create", "the", "actual", "Store", "this", "Substore", "represents", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/substore.py#L62-L84
twisted/axiom
axiom/tags.py
upgradeCatalog1to2
def upgradeCatalog1to2(oldCatalog): """ Create _TagName instances which version 2 of Catalog automatically creates for use in determining the tagNames result, but which version 1 of Catalog did not create. """ newCatalog = oldCatalog.upgradeVersion('tag_catalog', 1, 2, tagCount=oldCatalog.tagCount) tags = newCatalog.store.query(Tag, Tag.catalog == newCatalog) tagNames = tags.getColumn("name").distinct() for t in tagNames: _TagName(store=newCatalog.store, catalog=newCatalog, name=t) return newCatalog
python
def upgradeCatalog1to2(oldCatalog): """ Create _TagName instances which version 2 of Catalog automatically creates for use in determining the tagNames result, but which version 1 of Catalog did not create. """ newCatalog = oldCatalog.upgradeVersion('tag_catalog', 1, 2, tagCount=oldCatalog.tagCount) tags = newCatalog.store.query(Tag, Tag.catalog == newCatalog) tagNames = tags.getColumn("name").distinct() for t in tagNames: _TagName(store=newCatalog.store, catalog=newCatalog, name=t) return newCatalog
[ "def", "upgradeCatalog1to2", "(", "oldCatalog", ")", ":", "newCatalog", "=", "oldCatalog", ".", "upgradeVersion", "(", "'tag_catalog'", ",", "1", ",", "2", ",", "tagCount", "=", "oldCatalog", ".", "tagCount", ")", "tags", "=", "newCatalog", ".", "store", ".", "query", "(", "Tag", ",", "Tag", ".", "catalog", "==", "newCatalog", ")", "tagNames", "=", "tags", ".", "getColumn", "(", "\"name\"", ")", ".", "distinct", "(", ")", "for", "t", "in", "tagNames", ":", "_TagName", "(", "store", "=", "newCatalog", ".", "store", ",", "catalog", "=", "newCatalog", ",", "name", "=", "t", ")", "return", "newCatalog" ]
Create _TagName instances which version 2 of Catalog automatically creates for use in determining the tagNames result, but which version 1 of Catalog did not create.
[ "Create", "_TagName", "instances", "which", "version", "2", "of", "Catalog", "automatically", "creates", "for", "use", "in", "determining", "the", "tagNames", "result", "but", "which", "version", "1", "of", "Catalog", "did", "not", "create", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/tags.py#L110-L122
twisted/axiom
axiom/tags.py
Catalog.tagNames
def tagNames(self): """ Return an iterator of unicode strings - the unique tag names which have been applied objects in this catalog. """ return self.store.query(_TagName, _TagName.catalog == self).getColumn("name")
python
def tagNames(self): """ Return an iterator of unicode strings - the unique tag names which have been applied objects in this catalog. """ return self.store.query(_TagName, _TagName.catalog == self).getColumn("name")
[ "def", "tagNames", "(", "self", ")", ":", "return", "self", ".", "store", ".", "query", "(", "_TagName", ",", "_TagName", ".", "catalog", "==", "self", ")", ".", "getColumn", "(", "\"name\"", ")" ]
Return an iterator of unicode strings - the unique tag names which have been applied objects in this catalog.
[ "Return", "an", "iterator", "of", "unicode", "strings", "-", "the", "unique", "tag", "names", "which", "have", "been", "applied", "objects", "in", "this", "catalog", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/tags.py#L83-L88
twisted/axiom
axiom/tags.py
Catalog.tagsOf
def tagsOf(self, obj): """ Return an iterator of unicode strings - the tag names which apply to the given object. """ return self.store.query( Tag, AND(Tag.catalog == self, Tag.object == obj)).getColumn("name")
python
def tagsOf(self, obj): """ Return an iterator of unicode strings - the tag names which apply to the given object. """ return self.store.query( Tag, AND(Tag.catalog == self, Tag.object == obj)).getColumn("name")
[ "def", "tagsOf", "(", "self", ",", "obj", ")", ":", "return", "self", ".", "store", ".", "query", "(", "Tag", ",", "AND", "(", "Tag", ".", "catalog", "==", "self", ",", "Tag", ".", "object", "==", "obj", ")", ")", ".", "getColumn", "(", "\"name\"", ")" ]
Return an iterator of unicode strings - the tag names which apply to the given object.
[ "Return", "an", "iterator", "of", "unicode", "strings", "-", "the", "tag", "names", "which", "apply", "to", "the", "given", "object", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/tags.py#L91-L99
twisted/axiom
axiom/attributes.py
SQLAttribute.loaded
def loaded(self, oself, dbval): """ This method is invoked when the item is loaded from the database, and when a transaction is reverted which restores this attribute's value. @param oself: an instance of an item which has this attribute. @param dbval: the underlying database value which was retrieved. """ setattr(oself, self.dbunderlying, dbval) delattr(oself, self.underlying)
python
def loaded(self, oself, dbval): """ This method is invoked when the item is loaded from the database, and when a transaction is reverted which restores this attribute's value. @param oself: an instance of an item which has this attribute. @param dbval: the underlying database value which was retrieved. """ setattr(oself, self.dbunderlying, dbval) delattr(oself, self.underlying)
[ "def", "loaded", "(", "self", ",", "oself", ",", "dbval", ")", ":", "setattr", "(", "oself", ",", "self", ".", "dbunderlying", ",", "dbval", ")", "delattr", "(", "oself", ",", "self", ".", "underlying", ")" ]
This method is invoked when the item is loaded from the database, and when a transaction is reverted which restores this attribute's value. @param oself: an instance of an item which has this attribute. @param dbval: the underlying database value which was retrieved.
[ "This", "method", "is", "invoked", "when", "the", "item", "is", "loaded", "from", "the", "database", "and", "when", "a", "transaction", "is", "reverted", "which", "restores", "this", "attribute", "s", "value", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/attributes.py#L459-L469
twisted/axiom
axiom/attributes.py
SQLAttribute._convertPyval
def _convertPyval(self, oself, pyval): """ Convert a Python value to a value suitable for inserting into the database. @param oself: The object on which this descriptor is an attribute. @param pyval: The value to be converted. @return: A value legal for this column in the database. """ # convert to dbval later, I guess? if pyval is None and not self.allowNone: raise TypeError("attribute [%s.%s = %s()] must not be None" % ( self.classname, self.attrname, self.__class__.__name__)) return self.infilter(pyval, oself, oself.store)
python
def _convertPyval(self, oself, pyval): """ Convert a Python value to a value suitable for inserting into the database. @param oself: The object on which this descriptor is an attribute. @param pyval: The value to be converted. @return: A value legal for this column in the database. """ # convert to dbval later, I guess? if pyval is None and not self.allowNone: raise TypeError("attribute [%s.%s = %s()] must not be None" % ( self.classname, self.attrname, self.__class__.__name__)) return self.infilter(pyval, oself, oself.store)
[ "def", "_convertPyval", "(", "self", ",", "oself", ",", "pyval", ")", ":", "# convert to dbval later, I guess?", "if", "pyval", "is", "None", "and", "not", "self", ".", "allowNone", ":", "raise", "TypeError", "(", "\"attribute [%s.%s = %s()] must not be None\"", "%", "(", "self", ".", "classname", ",", "self", ".", "attrname", ",", "self", ".", "__class__", ".", "__name__", ")", ")", "return", "self", ".", "infilter", "(", "pyval", ",", "oself", ",", "oself", ".", "store", ")" ]
Convert a Python value to a value suitable for inserting into the database. @param oself: The object on which this descriptor is an attribute. @param pyval: The value to be converted. @return: A value legal for this column in the database.
[ "Convert", "a", "Python", "value", "to", "a", "value", "suitable", "for", "inserting", "into", "the", "database", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/attributes.py#L473-L487
twisted/axiom
axiom/attributes.py
SequenceComparison._queryContainer
def _queryContainer(self, store): """ Generate and cache the subselect SQL and its arguments. Return the subselect SQL. """ if self._subselectSQL is None: sql, args = self.container._sqlAndArgs('SELECT', self.container._queryTarget) self._subselectSQL, self._subselectArgs = sql, args return self._subselectSQL
python
def _queryContainer(self, store): """ Generate and cache the subselect SQL and its arguments. Return the subselect SQL. """ if self._subselectSQL is None: sql, args = self.container._sqlAndArgs('SELECT', self.container._queryTarget) self._subselectSQL, self._subselectArgs = sql, args return self._subselectSQL
[ "def", "_queryContainer", "(", "self", ",", "store", ")", ":", "if", "self", ".", "_subselectSQL", "is", "None", ":", "sql", ",", "args", "=", "self", ".", "container", ".", "_sqlAndArgs", "(", "'SELECT'", ",", "self", ".", "container", ".", "_queryTarget", ")", "self", ".", "_subselectSQL", ",", "self", ".", "_subselectArgs", "=", "sql", ",", "args", "return", "self", ".", "_subselectSQL" ]
Generate and cache the subselect SQL and its arguments. Return the subselect SQL.
[ "Generate", "and", "cache", "the", "subselect", "SQL", "and", "its", "arguments", ".", "Return", "the", "subselect", "SQL", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/attributes.py#L729-L738
twisted/axiom
axiom/attributes.py
SequenceComparison._sequenceContainer
def _sequenceContainer(self, store): """ Smash whatever we got into a list and save the result in case we are executed multiple times. This keeps us from tripping up over generators and the like. """ if self._sequence is None: self._sequence = list(self.container) self._clause = ', '.join(['?'] * len(self._sequence)) return self._clause
python
def _sequenceContainer(self, store): """ Smash whatever we got into a list and save the result in case we are executed multiple times. This keeps us from tripping up over generators and the like. """ if self._sequence is None: self._sequence = list(self.container) self._clause = ', '.join(['?'] * len(self._sequence)) return self._clause
[ "def", "_sequenceContainer", "(", "self", ",", "store", ")", ":", "if", "self", ".", "_sequence", "is", "None", ":", "self", ".", "_sequence", "=", "list", "(", "self", ".", "container", ")", "self", ".", "_clause", "=", "', '", ".", "join", "(", "[", "'?'", "]", "*", "len", "(", "self", ".", "_sequence", ")", ")", "return", "self", ".", "_clause" ]
Smash whatever we got into a list and save the result in case we are executed multiple times. This keeps us from tripping up over generators and the like.
[ "Smash", "whatever", "we", "got", "into", "a", "list", "and", "save", "the", "result", "in", "case", "we", "are", "executed", "multiple", "times", ".", "This", "keeps", "us", "from", "tripping", "up", "over", "generators", "and", "the", "like", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/attributes.py#L751-L760
twisted/axiom
axiom/attributes.py
SequenceComparison._sequenceArgs
def _sequenceArgs(self, store): """ Filter each element of the data using the attribute type being tested for containment and hand back the resulting list. """ self._sequenceContainer(store) # Force _sequence to be valid return [self.attribute.infilter(pyval, None, store) for pyval in self._sequence]
python
def _sequenceArgs(self, store): """ Filter each element of the data using the attribute type being tested for containment and hand back the resulting list. """ self._sequenceContainer(store) # Force _sequence to be valid return [self.attribute.infilter(pyval, None, store) for pyval in self._sequence]
[ "def", "_sequenceArgs", "(", "self", ",", "store", ")", ":", "self", ".", "_sequenceContainer", "(", "store", ")", "# Force _sequence to be valid", "return", "[", "self", ".", "attribute", ".", "infilter", "(", "pyval", ",", "None", ",", "store", ")", "for", "pyval", "in", "self", ".", "_sequence", "]" ]
Filter each element of the data using the attribute type being tested for containment and hand back the resulting list.
[ "Filter", "each", "element", "of", "the", "data", "using", "the", "attribute", "type", "being", "tested", "for", "containment", "and", "hand", "back", "the", "resulting", "list", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/attributes.py#L763-L769
twisted/axiom
axiom/attributes.py
path.prepareInsert
def prepareInsert(self, oself, store): """ Prepare for insertion into the database by making the dbunderlying attribute of the item a relative pathname with respect to the store rather than an absolute pathname. """ if self.relative: fspath = self.__get__(oself) oself.__dirty__[self.attrname] = self, self.infilter(fspath, oself, store)
python
def prepareInsert(self, oself, store): """ Prepare for insertion into the database by making the dbunderlying attribute of the item a relative pathname with respect to the store rather than an absolute pathname. """ if self.relative: fspath = self.__get__(oself) oself.__dirty__[self.attrname] = self, self.infilter(fspath, oself, store)
[ "def", "prepareInsert", "(", "self", ",", "oself", ",", "store", ")", ":", "if", "self", ".", "relative", ":", "fspath", "=", "self", ".", "__get__", "(", "oself", ")", "oself", ".", "__dirty__", "[", "self", ".", "attrname", "]", "=", "self", ",", "self", ".", "infilter", "(", "fspath", ",", "oself", ",", "store", ")" ]
Prepare for insertion into the database by making the dbunderlying attribute of the item a relative pathname with respect to the store rather than an absolute pathname.
[ "Prepare", "for", "insertion", "into", "the", "database", "by", "making", "the", "dbunderlying", "attribute", "of", "the", "item", "a", "relative", "pathname", "with", "respect", "to", "the", "store", "rather", "than", "an", "absolute", "pathname", "." ]
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/attributes.py#L1021-L1029
upgrad/django-deletes
djangodeletes/softdeletes/models.py
SoftDeletable.restore
def restore(self, time=None): """ Undeletes the object. Returns True if undeleted, False if it was already not deleted """ if self.deleted: time = time if time else self.deleted_at if time == self.deleted_at: self.deleted = False self.save() return True else: return False return False
python
def restore(self, time=None): """ Undeletes the object. Returns True if undeleted, False if it was already not deleted """ if self.deleted: time = time if time else self.deleted_at if time == self.deleted_at: self.deleted = False self.save() return True else: return False return False
[ "def", "restore", "(", "self", ",", "time", "=", "None", ")", ":", "if", "self", ".", "deleted", ":", "time", "=", "time", "if", "time", "else", "self", ".", "deleted_at", "if", "time", "==", "self", ".", "deleted_at", ":", "self", ".", "deleted", "=", "False", "self", ".", "save", "(", ")", "return", "True", "else", ":", "return", "False", "return", "False" ]
Undeletes the object. Returns True if undeleted, False if it was already not deleted
[ "Undeletes", "the", "object", ".", "Returns", "True", "if", "undeleted", "False", "if", "it", "was", "already", "not", "deleted" ]
train
https://github.com/upgrad/django-deletes/blob/05cebc3323840badc67b926ec1ba2640d6cd12be/djangodeletes/softdeletes/models.py#L97-L109
upgrad/django-deletes
djangodeletes/softdeletes/models.py
SoftDeletable.full_restore
def full_restore(self, using=None): using = using or router.db_for_write(self.__class__, instance=self) restore_counter = Counter() if self.deleted: time = self.deleted_at else: return restore_counter self.collector = models.deletion.Collector(using=using) self.collector.collect([self]) for model, instances in self.collector.data.items(): instances_to_delete = sorted(instances, key=attrgetter("pk")) self.sort() for qs in self.collector.fast_deletes: # TODO make sure the queryset delete has been made a soft delete for qs_instance in qs: restore_counter.update([qs_instance._meta.model_name]) qs_instance.restore(time=time) for model, instances in self.collector.data.items(): for instance in instances: restore_counter.update([instance._meta.model_name]) instance.restore(time=time) return sum(restore_counter.values()), dict(restore_counter) """ Restores itself, as well as objects that might have been deleted along with it if cascade is the deletion strategy """ self.collector = models.deletion.Collector(using=using) self.collector.collect([self], keep_parents=keep_parents)
python
def full_restore(self, using=None): using = using or router.db_for_write(self.__class__, instance=self) restore_counter = Counter() if self.deleted: time = self.deleted_at else: return restore_counter self.collector = models.deletion.Collector(using=using) self.collector.collect([self]) for model, instances in self.collector.data.items(): instances_to_delete = sorted(instances, key=attrgetter("pk")) self.sort() for qs in self.collector.fast_deletes: # TODO make sure the queryset delete has been made a soft delete for qs_instance in qs: restore_counter.update([qs_instance._meta.model_name]) qs_instance.restore(time=time) for model, instances in self.collector.data.items(): for instance in instances: restore_counter.update([instance._meta.model_name]) instance.restore(time=time) return sum(restore_counter.values()), dict(restore_counter) """ Restores itself, as well as objects that might have been deleted along with it if cascade is the deletion strategy """ self.collector = models.deletion.Collector(using=using) self.collector.collect([self], keep_parents=keep_parents)
[ "def", "full_restore", "(", "self", ",", "using", "=", "None", ")", ":", "using", "=", "using", "or", "router", ".", "db_for_write", "(", "self", ".", "__class__", ",", "instance", "=", "self", ")", "restore_counter", "=", "Counter", "(", ")", "if", "self", ".", "deleted", ":", "time", "=", "self", ".", "deleted_at", "else", ":", "return", "restore_counter", "self", ".", "collector", "=", "models", ".", "deletion", ".", "Collector", "(", "using", "=", "using", ")", "self", ".", "collector", ".", "collect", "(", "[", "self", "]", ")", "for", "model", ",", "instances", "in", "self", ".", "collector", ".", "data", ".", "items", "(", ")", ":", "instances_to_delete", "=", "sorted", "(", "instances", ",", "key", "=", "attrgetter", "(", "\"pk\"", ")", ")", "self", ".", "sort", "(", ")", "for", "qs", "in", "self", ".", "collector", ".", "fast_deletes", ":", "# TODO make sure the queryset delete has been made a soft delete", "for", "qs_instance", "in", "qs", ":", "restore_counter", ".", "update", "(", "[", "qs_instance", ".", "_meta", ".", "model_name", "]", ")", "qs_instance", ".", "restore", "(", "time", "=", "time", ")", "for", "model", ",", "instances", "in", "self", ".", "collector", ".", "data", ".", "items", "(", ")", ":", "for", "instance", "in", "instances", ":", "restore_counter", ".", "update", "(", "[", "instance", ".", "_meta", ".", "model_name", "]", ")", "instance", ".", "restore", "(", "time", "=", "time", ")", "return", "sum", "(", "restore_counter", ".", "values", "(", ")", ")", ",", "dict", "(", "restore_counter", ")", "self", ".", "collector", "=", "models", ".", "deletion", ".", "Collector", "(", "using", "=", "using", ")", "self", ".", "collector", ".", "collect", "(", "[", "self", "]", ",", "keep_parents", "=", "keep_parents", ")" ]
Restores itself, as well as objects that might have been deleted along with it if cascade is the deletion strategy
[ "Restores", "itself", "as", "well", "as", "objects", "that", "might", "have", "been", "deleted", "along", "with", "it", "if", "cascade", "is", "the", "deletion", "strategy" ]
train
https://github.com/upgrad/django-deletes/blob/05cebc3323840badc67b926ec1ba2640d6cd12be/djangodeletes/softdeletes/models.py#L111-L144
skymill/automated-ebs-snapshots
automated_ebs_snapshots/connection_manager.py
connect_to_ec2
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None): """ Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ if access_key: # Connect using supplied credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region( region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: # Fetch instance metadata metadata = get_instance_metadata(timeout=1, num_retries=1) if metadata: try: region = metadata['placement']['availability-zone'][:-1] except KeyError: pass # Connect using env vars or boto credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region(region) if not connection: logger.error('An error occurred when connecting to EC2') sys.exit(1) return connection
python
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None): """ Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ if access_key: # Connect using supplied credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region( region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: # Fetch instance metadata metadata = get_instance_metadata(timeout=1, num_retries=1) if metadata: try: region = metadata['placement']['availability-zone'][:-1] except KeyError: pass # Connect using env vars or boto credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region(region) if not connection: logger.error('An error occurred when connecting to EC2') sys.exit(1) return connection
[ "def", "connect_to_ec2", "(", "region", "=", "'us-east-1'", ",", "access_key", "=", "None", ",", "secret_key", "=", "None", ")", ":", "if", "access_key", ":", "# Connect using supplied credentials", "logger", ".", "info", "(", "'Connecting to AWS EC2 in {}'", ".", "format", "(", "region", ")", ")", "connection", "=", "ec2", ".", "connect_to_region", "(", "region", ",", "aws_access_key_id", "=", "access_key", ",", "aws_secret_access_key", "=", "secret_key", ")", "else", ":", "# Fetch instance metadata", "metadata", "=", "get_instance_metadata", "(", "timeout", "=", "1", ",", "num_retries", "=", "1", ")", "if", "metadata", ":", "try", ":", "region", "=", "metadata", "[", "'placement'", "]", "[", "'availability-zone'", "]", "[", ":", "-", "1", "]", "except", "KeyError", ":", "pass", "# Connect using env vars or boto credentials", "logger", ".", "info", "(", "'Connecting to AWS EC2 in {}'", ".", "format", "(", "region", ")", ")", "connection", "=", "ec2", ".", "connect_to_region", "(", "region", ")", "if", "not", "connection", ":", "logger", ".", "error", "(", "'An error occurred when connecting to EC2'", ")", "sys", ".", "exit", "(", "1", ")", "return", "connection" ]
Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection
[ "Connect", "to", "AWS", "ec2" ]
train
https://github.com/skymill/automated-ebs-snapshots/blob/9595bc49d458f6ffb93430722757d2284e878fab/automated_ebs_snapshots/connection_manager.py#L11-L47
dmirecki/pyMorfologik
pymorfologik/parsing.py
DictParser.parse
def parse(self, output): """ Find stems for a given text. """ output = self._get_lines_with_stems(output) words = self._make_unique(output) return self._parse_for_simple_stems(words)
python
def parse(self, output): """ Find stems for a given text. """ output = self._get_lines_with_stems(output) words = self._make_unique(output) return self._parse_for_simple_stems(words)
[ "def", "parse", "(", "self", ",", "output", ")", ":", "output", "=", "self", ".", "_get_lines_with_stems", "(", "output", ")", "words", "=", "self", ".", "_make_unique", "(", "output", ")", "return", "self", ".", "_parse_for_simple_stems", "(", "words", ")" ]
Find stems for a given text.
[ "Find", "stems", "for", "a", "given", "text", "." ]
train
https://github.com/dmirecki/pyMorfologik/blob/e4d93a82e8b4c7a108f01e0456fbeb8024df0259/pymorfologik/parsing.py#L47-L53
OCHA-DAP/hdx-python-api
src/hdx/hdx_locations.py
Locations.validlocations
def validlocations(configuration=None): # type: () -> List[Dict] """ Read valid locations from HDX Args: configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: List[Dict]: A list of valid locations """ if Locations._validlocations is None: if configuration is None: configuration = Configuration.read() Locations._validlocations = configuration.call_remoteckan('group_list', {'all_fields': True}) return Locations._validlocations
python
def validlocations(configuration=None): # type: () -> List[Dict] """ Read valid locations from HDX Args: configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: List[Dict]: A list of valid locations """ if Locations._validlocations is None: if configuration is None: configuration = Configuration.read() Locations._validlocations = configuration.call_remoteckan('group_list', {'all_fields': True}) return Locations._validlocations
[ "def", "validlocations", "(", "configuration", "=", "None", ")", ":", "# type: () -> List[Dict]", "if", "Locations", ".", "_validlocations", "is", "None", ":", "if", "configuration", "is", "None", ":", "configuration", "=", "Configuration", ".", "read", "(", ")", "Locations", ".", "_validlocations", "=", "configuration", ".", "call_remoteckan", "(", "'group_list'", ",", "{", "'all_fields'", ":", "True", "}", ")", "return", "Locations", ".", "_validlocations" ]
Read valid locations from HDX Args: configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: List[Dict]: A list of valid locations
[ "Read", "valid", "locations", "from", "HDX" ]
train
https://github.com/OCHA-DAP/hdx-python-api/blob/212440f54f73805826a16db77dbcb6033b18a313/src/hdx/hdx_locations.py#L14-L29
OCHA-DAP/hdx-python-api
src/hdx/hdx_locations.py
Locations.get_location_from_HDX_code
def get_location_from_HDX_code(code, locations=None, configuration=None): # type: (str, Optional[List[Dict]], Optional[Configuration]) -> Optional[str] """Get location from HDX location code Args: code (str): code for which to get location name locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[str]: location name """ if locations is None: locations = Locations.validlocations(configuration) for locdict in locations: if code.upper() == locdict['name'].upper(): return locdict['title'] return None
python
def get_location_from_HDX_code(code, locations=None, configuration=None): # type: (str, Optional[List[Dict]], Optional[Configuration]) -> Optional[str] """Get location from HDX location code Args: code (str): code for which to get location name locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[str]: location name """ if locations is None: locations = Locations.validlocations(configuration) for locdict in locations: if code.upper() == locdict['name'].upper(): return locdict['title'] return None
[ "def", "get_location_from_HDX_code", "(", "code", ",", "locations", "=", "None", ",", "configuration", "=", "None", ")", ":", "# type: (str, Optional[List[Dict]], Optional[Configuration]) -> Optional[str]", "if", "locations", "is", "None", ":", "locations", "=", "Locations", ".", "validlocations", "(", "configuration", ")", "for", "locdict", "in", "locations", ":", "if", "code", ".", "upper", "(", ")", "==", "locdict", "[", "'name'", "]", ".", "upper", "(", ")", ":", "return", "locdict", "[", "'title'", "]", "return", "None" ]
Get location from HDX location code Args: code (str): code for which to get location name locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[str]: location name
[ "Get", "location", "from", "HDX", "location", "code" ]
train
https://github.com/OCHA-DAP/hdx-python-api/blob/212440f54f73805826a16db77dbcb6033b18a313/src/hdx/hdx_locations.py#L46-L63
OCHA-DAP/hdx-python-api
src/hdx/hdx_locations.py
Locations.get_HDX_code_from_location
def get_HDX_code_from_location(location, locations=None, configuration=None): # type: (str, Optional[List[Dict]], Optional[Configuration]) -> Optional[str] """Get HDX code for location Args: location (str): Location for which to get HDX code locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[str]: HDX code or None """ if locations is None: locations = Locations.validlocations(configuration) locationupper = location.upper() for locdict in locations: locationcode = locdict['name'].upper() if locationupper == locationcode: return locationcode for locdict in locations: if locationupper == locdict['title'].upper(): return locdict['name'].upper() return None
python
def get_HDX_code_from_location(location, locations=None, configuration=None): # type: (str, Optional[List[Dict]], Optional[Configuration]) -> Optional[str] """Get HDX code for location Args: location (str): Location for which to get HDX code locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[str]: HDX code or None """ if locations is None: locations = Locations.validlocations(configuration) locationupper = location.upper() for locdict in locations: locationcode = locdict['name'].upper() if locationupper == locationcode: return locationcode for locdict in locations: if locationupper == locdict['title'].upper(): return locdict['name'].upper() return None
[ "def", "get_HDX_code_from_location", "(", "location", ",", "locations", "=", "None", ",", "configuration", "=", "None", ")", ":", "# type: (str, Optional[List[Dict]], Optional[Configuration]) -> Optional[str]", "if", "locations", "is", "None", ":", "locations", "=", "Locations", ".", "validlocations", "(", "configuration", ")", "locationupper", "=", "location", ".", "upper", "(", ")", "for", "locdict", "in", "locations", ":", "locationcode", "=", "locdict", "[", "'name'", "]", ".", "upper", "(", ")", "if", "locationupper", "==", "locationcode", ":", "return", "locationcode", "for", "locdict", "in", "locations", ":", "if", "locationupper", "==", "locdict", "[", "'title'", "]", ".", "upper", "(", ")", ":", "return", "locdict", "[", "'name'", "]", ".", "upper", "(", ")", "return", "None" ]
Get HDX code for location Args: location (str): Location for which to get HDX code locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[str]: HDX code or None
[ "Get", "HDX", "code", "for", "location" ]
train
https://github.com/OCHA-DAP/hdx-python-api/blob/212440f54f73805826a16db77dbcb6033b18a313/src/hdx/hdx_locations.py#L66-L89
OCHA-DAP/hdx-python-api
src/hdx/hdx_locations.py
Locations.get_HDX_code_from_location_partial
def get_HDX_code_from_location_partial(location, locations=None, configuration=None): # type: (str, Optional[List[Dict]], Optional[Configuration]) -> Tuple[Optional[str], bool] """Get HDX code for location Args: location (str): Location for which to get HDX code locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Tuple[Optional[str], bool]: HDX code and if the match is exact or (None, False) for no match """ hdx_code = Locations.get_HDX_code_from_location(location, locations, configuration) if hdx_code is not None: return hdx_code, True if locations is None: locations = Locations.validlocations(configuration) locationupper = location.upper() for locdict in locations: locationname = locdict['title'].upper() if locationupper in locationname or locationname in locationupper: return locdict['name'].upper(), False return None, False
python
def get_HDX_code_from_location_partial(location, locations=None, configuration=None): # type: (str, Optional[List[Dict]], Optional[Configuration]) -> Tuple[Optional[str], bool] """Get HDX code for location Args: location (str): Location for which to get HDX code locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Tuple[Optional[str], bool]: HDX code and if the match is exact or (None, False) for no match """ hdx_code = Locations.get_HDX_code_from_location(location, locations, configuration) if hdx_code is not None: return hdx_code, True if locations is None: locations = Locations.validlocations(configuration) locationupper = location.upper() for locdict in locations: locationname = locdict['title'].upper() if locationupper in locationname or locationname in locationupper: return locdict['name'].upper(), False return None, False
[ "def", "get_HDX_code_from_location_partial", "(", "location", ",", "locations", "=", "None", ",", "configuration", "=", "None", ")", ":", "# type: (str, Optional[List[Dict]], Optional[Configuration]) -> Tuple[Optional[str], bool]", "hdx_code", "=", "Locations", ".", "get_HDX_code_from_location", "(", "location", ",", "locations", ",", "configuration", ")", "if", "hdx_code", "is", "not", "None", ":", "return", "hdx_code", ",", "True", "if", "locations", "is", "None", ":", "locations", "=", "Locations", ".", "validlocations", "(", "configuration", ")", "locationupper", "=", "location", ".", "upper", "(", ")", "for", "locdict", "in", "locations", ":", "locationname", "=", "locdict", "[", "'title'", "]", ".", "upper", "(", ")", "if", "locationupper", "in", "locationname", "or", "locationname", "in", "locationupper", ":", "return", "locdict", "[", "'name'", "]", ".", "upper", "(", ")", ",", "False", "return", "None", ",", "False" ]
Get HDX code for location Args: location (str): Location for which to get HDX code locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Tuple[Optional[str], bool]: HDX code and if the match is exact or (None, False) for no match
[ "Get", "HDX", "code", "for", "location" ]
train
https://github.com/OCHA-DAP/hdx-python-api/blob/212440f54f73805826a16db77dbcb6033b18a313/src/hdx/hdx_locations.py#L92-L117
libyal/dtfabric
scripts/validate-definitions.py
Main
def Main(): """The main program function. Returns: bool: True if successful or False if not. """ argument_parser = argparse.ArgumentParser( description='Validates dtFabric format definitions.') argument_parser.add_argument( 'source', nargs='?', action='store', metavar='PATH', default=None, help=( 'path of the file or directory containing the dtFabric format ' 'definitions.')) options = argument_parser.parse_args() if not options.source: print('Source value is missing.') print('') argument_parser.print_help() print('') return False if not os.path.exists(options.source): print('No such file: {0:s}'.format(options.source)) print('') return False logging.basicConfig( level=logging.INFO, format='[%(levelname)s] %(message)s') source_is_directory = os.path.isdir(options.source) validator = DefinitionsValidator() if source_is_directory: source_description = os.path.join(options.source, '*.yaml') else: source_description = options.source print('Validating dtFabric definitions in: {0:s}'.format(source_description)) if source_is_directory: result = validator.CheckDirectory(options.source) else: result = validator.CheckFile(options.source) if not result: print('FAILURE') else: print('SUCCESS') return result
python
def Main(): """The main program function. Returns: bool: True if successful or False if not. """ argument_parser = argparse.ArgumentParser( description='Validates dtFabric format definitions.') argument_parser.add_argument( 'source', nargs='?', action='store', metavar='PATH', default=None, help=( 'path of the file or directory containing the dtFabric format ' 'definitions.')) options = argument_parser.parse_args() if not options.source: print('Source value is missing.') print('') argument_parser.print_help() print('') return False if not os.path.exists(options.source): print('No such file: {0:s}'.format(options.source)) print('') return False logging.basicConfig( level=logging.INFO, format='[%(levelname)s] %(message)s') source_is_directory = os.path.isdir(options.source) validator = DefinitionsValidator() if source_is_directory: source_description = os.path.join(options.source, '*.yaml') else: source_description = options.source print('Validating dtFabric definitions in: {0:s}'.format(source_description)) if source_is_directory: result = validator.CheckDirectory(options.source) else: result = validator.CheckFile(options.source) if not result: print('FAILURE') else: print('SUCCESS') return result
[ "def", "Main", "(", ")", ":", "argument_parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Validates dtFabric format definitions.'", ")", "argument_parser", ".", "add_argument", "(", "'source'", ",", "nargs", "=", "'?'", ",", "action", "=", "'store'", ",", "metavar", "=", "'PATH'", ",", "default", "=", "None", ",", "help", "=", "(", "'path of the file or directory containing the dtFabric format '", "'definitions.'", ")", ")", "options", "=", "argument_parser", ".", "parse_args", "(", ")", "if", "not", "options", ".", "source", ":", "print", "(", "'Source value is missing.'", ")", "print", "(", "''", ")", "argument_parser", ".", "print_help", "(", ")", "print", "(", "''", ")", "return", "False", "if", "not", "os", ".", "path", ".", "exists", "(", "options", ".", "source", ")", ":", "print", "(", "'No such file: {0:s}'", ".", "format", "(", "options", ".", "source", ")", ")", "print", "(", "''", ")", "return", "False", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "INFO", ",", "format", "=", "'[%(levelname)s] %(message)s'", ")", "source_is_directory", "=", "os", ".", "path", ".", "isdir", "(", "options", ".", "source", ")", "validator", "=", "DefinitionsValidator", "(", ")", "if", "source_is_directory", ":", "source_description", "=", "os", ".", "path", ".", "join", "(", "options", ".", "source", ",", "'*.yaml'", ")", "else", ":", "source_description", "=", "options", ".", "source", "print", "(", "'Validating dtFabric definitions in: {0:s}'", ".", "format", "(", "source_description", ")", ")", "if", "source_is_directory", ":", "result", "=", "validator", ".", "CheckDirectory", "(", "options", ".", "source", ")", "else", ":", "result", "=", "validator", ".", "CheckFile", "(", "options", ".", "source", ")", "if", "not", "result", ":", "print", "(", "'FAILURE'", ")", "else", ":", "print", "(", "'SUCCESS'", ")", "return", "result" ]
The main program function. Returns: bool: True if successful or False if not.
[ "The", "main", "program", "function", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/scripts/validate-definitions.py#L77-L130
libyal/dtfabric
scripts/validate-definitions.py
DefinitionsValidator.CheckDirectory
def CheckDirectory(self, path, extension='yaml'): """Validates definition files in a directory. Args: path (str): path of the definition file. extension (Optional[str]): extension of the filenames to read. Returns: bool: True if the directory contains valid definitions. """ result = True if extension: glob_spec = os.path.join(path, '*.{0:s}'.format(extension)) else: glob_spec = os.path.join(path, '*') for definition_file in sorted(glob.glob(glob_spec)): if not self.CheckFile(definition_file): result = False return result
python
def CheckDirectory(self, path, extension='yaml'): """Validates definition files in a directory. Args: path (str): path of the definition file. extension (Optional[str]): extension of the filenames to read. Returns: bool: True if the directory contains valid definitions. """ result = True if extension: glob_spec = os.path.join(path, '*.{0:s}'.format(extension)) else: glob_spec = os.path.join(path, '*') for definition_file in sorted(glob.glob(glob_spec)): if not self.CheckFile(definition_file): result = False return result
[ "def", "CheckDirectory", "(", "self", ",", "path", ",", "extension", "=", "'yaml'", ")", ":", "result", "=", "True", "if", "extension", ":", "glob_spec", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'*.{0:s}'", ".", "format", "(", "extension", ")", ")", "else", ":", "glob_spec", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'*'", ")", "for", "definition_file", "in", "sorted", "(", "glob", ".", "glob", "(", "glob_spec", ")", ")", ":", "if", "not", "self", ".", "CheckFile", "(", "definition_file", ")", ":", "result", "=", "False", "return", "result" ]
Validates definition files in a directory. Args: path (str): path of the definition file. extension (Optional[str]): extension of the filenames to read. Returns: bool: True if the directory contains valid definitions.
[ "Validates", "definition", "files", "in", "a", "directory", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/scripts/validate-definitions.py#L22-L43
libyal/dtfabric
scripts/validate-definitions.py
DefinitionsValidator.CheckFile
def CheckFile(self, path): """Validates the definition in a file. Args: path (str): path of the definition file. Returns: bool: True if the file contains valid definitions. """ print('Checking: {0:s}'.format(path)) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() result = False try: definitions_reader.ReadFile(definitions_registry, path) result = True except KeyError as exception: logging.warning(( 'Unable to register data type definition in file: {0:s} with ' 'error: {1:s}').format(path, exception)) except errors.FormatError as exception: logging.warning( 'Unable to validate file: {0:s} with error: {1:s}'.format( path, exception)) return result
python
def CheckFile(self, path): """Validates the definition in a file. Args: path (str): path of the definition file. Returns: bool: True if the file contains valid definitions. """ print('Checking: {0:s}'.format(path)) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() result = False try: definitions_reader.ReadFile(definitions_registry, path) result = True except KeyError as exception: logging.warning(( 'Unable to register data type definition in file: {0:s} with ' 'error: {1:s}').format(path, exception)) except errors.FormatError as exception: logging.warning( 'Unable to validate file: {0:s} with error: {1:s}'.format( path, exception)) return result
[ "def", "CheckFile", "(", "self", ",", "path", ")", ":", "print", "(", "'Checking: {0:s}'", ".", "format", "(", "path", ")", ")", "definitions_registry", "=", "registry", ".", "DataTypeDefinitionsRegistry", "(", ")", "definitions_reader", "=", "reader", ".", "YAMLDataTypeDefinitionsFileReader", "(", ")", "result", "=", "False", "try", ":", "definitions_reader", ".", "ReadFile", "(", "definitions_registry", ",", "path", ")", "result", "=", "True", "except", "KeyError", "as", "exception", ":", "logging", ".", "warning", "(", "(", "'Unable to register data type definition in file: {0:s} with '", "'error: {1:s}'", ")", ".", "format", "(", "path", ",", "exception", ")", ")", "except", "errors", ".", "FormatError", "as", "exception", ":", "logging", ".", "warning", "(", "'Unable to validate file: {0:s} with error: {1:s}'", ".", "format", "(", "path", ",", "exception", ")", ")", "return", "result" ]
Validates the definition in a file. Args: path (str): path of the definition file. Returns: bool: True if the file contains valid definitions.
[ "Validates", "the", "definition", "in", "a", "file", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/scripts/validate-definitions.py#L45-L74
dlanger/inlinestyler
inlinestyler/converter.py
Conversion.styleattribute
def styleattribute(self, element): """ returns css.CSSStyleDeclaration of inline styles, for html: @style """ css_text = element.get('style') if css_text: return cssutils.css.CSSStyleDeclaration(cssText=css_text) else: return None
python
def styleattribute(self, element): """ returns css.CSSStyleDeclaration of inline styles, for html: @style """ css_text = element.get('style') if css_text: return cssutils.css.CSSStyleDeclaration(cssText=css_text) else: return None
[ "def", "styleattribute", "(", "self", ",", "element", ")", ":", "css_text", "=", "element", ".", "get", "(", "'style'", ")", "if", "css_text", ":", "return", "cssutils", ".", "css", ".", "CSSStyleDeclaration", "(", "cssText", "=", "css_text", ")", "else", ":", "return", "None" ]
returns css.CSSStyleDeclaration of inline styles, for html: @style
[ "returns", "css", ".", "CSSStyleDeclaration", "of", "inline", "styles", "for", "html", ":" ]
train
https://github.com/dlanger/inlinestyler/blob/335c4fbab892f0ed67466a6beaea6a91f395ad12/inlinestyler/converter.py#L66-L74
skymill/automated-ebs-snapshots
automated_ebs_snapshots/config_file_parser.py
get_configuration
def get_configuration(filename): """ Read configuration file :type filename: str :param filename: Path to the configuration file """ logger.debug('Reading configuration from {}'.format(filename)) conf = SafeConfigParser() conf.read(filename) if not conf: logger.error('Configuration file {} not found'.format(filename)) sys.exit(1) if not conf.has_section('general'): logger.error('Missing [general] section in the configuration file') sys.exit(1) try: config = { 'access-key-id': conf.get('general', 'access-key-id'), 'secret-access-key': conf.get('general', 'secret-access-key'), 'region': conf.get('general', 'region'), } except NoOptionError as err: logger.error('Error in config file: {}'.format(err)) sys.exit(1) return config
python
def get_configuration(filename): """ Read configuration file :type filename: str :param filename: Path to the configuration file """ logger.debug('Reading configuration from {}'.format(filename)) conf = SafeConfigParser() conf.read(filename) if not conf: logger.error('Configuration file {} not found'.format(filename)) sys.exit(1) if not conf.has_section('general'): logger.error('Missing [general] section in the configuration file') sys.exit(1) try: config = { 'access-key-id': conf.get('general', 'access-key-id'), 'secret-access-key': conf.get('general', 'secret-access-key'), 'region': conf.get('general', 'region'), } except NoOptionError as err: logger.error('Error in config file: {}'.format(err)) sys.exit(1) return config
[ "def", "get_configuration", "(", "filename", ")", ":", "logger", ".", "debug", "(", "'Reading configuration from {}'", ".", "format", "(", "filename", ")", ")", "conf", "=", "SafeConfigParser", "(", ")", "conf", ".", "read", "(", "filename", ")", "if", "not", "conf", ":", "logger", ".", "error", "(", "'Configuration file {} not found'", ".", "format", "(", "filename", ")", ")", "sys", ".", "exit", "(", "1", ")", "if", "not", "conf", ".", "has_section", "(", "'general'", ")", ":", "logger", ".", "error", "(", "'Missing [general] section in the configuration file'", ")", "sys", ".", "exit", "(", "1", ")", "try", ":", "config", "=", "{", "'access-key-id'", ":", "conf", ".", "get", "(", "'general'", ",", "'access-key-id'", ")", ",", "'secret-access-key'", ":", "conf", ".", "get", "(", "'general'", ",", "'secret-access-key'", ")", ",", "'region'", ":", "conf", ".", "get", "(", "'general'", ",", "'region'", ")", ",", "}", "except", "NoOptionError", "as", "err", ":", "logger", ".", "error", "(", "'Error in config file: {}'", ".", "format", "(", "err", ")", ")", "sys", ".", "exit", "(", "1", ")", "return", "config" ]
Read configuration file :type filename: str :param filename: Path to the configuration file
[ "Read", "configuration", "file" ]
train
https://github.com/skymill/automated-ebs-snapshots/blob/9595bc49d458f6ffb93430722757d2284e878fab/automated_ebs_snapshots/config_file_parser.py#L9-L37
dlanger/inlinestyler
inlinestyler/utils.py
inline_css
def inline_css(html_message, encoding='unicode'): """ Inlines all CSS in an HTML string Given an HTML document with CSS declared in the HEAD, inlines it into the applicable elements. Used primarily in the preparation of styled emails. Arguments: html_message -- a string of HTML, including CSS """ document = etree.HTML(html_message) converter = Conversion() converter.perform(document, html_message, '', encoding=encoding) return converter.convertedHTML
python
def inline_css(html_message, encoding='unicode'): """ Inlines all CSS in an HTML string Given an HTML document with CSS declared in the HEAD, inlines it into the applicable elements. Used primarily in the preparation of styled emails. Arguments: html_message -- a string of HTML, including CSS """ document = etree.HTML(html_message) converter = Conversion() converter.perform(document, html_message, '', encoding=encoding) return converter.convertedHTML
[ "def", "inline_css", "(", "html_message", ",", "encoding", "=", "'unicode'", ")", ":", "document", "=", "etree", ".", "HTML", "(", "html_message", ")", "converter", "=", "Conversion", "(", ")", "converter", ".", "perform", "(", "document", ",", "html_message", ",", "''", ",", "encoding", "=", "encoding", ")", "return", "converter", ".", "convertedHTML" ]
Inlines all CSS in an HTML string Given an HTML document with CSS declared in the HEAD, inlines it into the applicable elements. Used primarily in the preparation of styled emails. Arguments: html_message -- a string of HTML, including CSS
[ "Inlines", "all", "CSS", "in", "an", "HTML", "string" ]
train
https://github.com/dlanger/inlinestyler/blob/335c4fbab892f0ed67466a6beaea6a91f395ad12/inlinestyler/utils.py#L4-L18
libyal/dtfabric
dtfabric/runtime/data_maps.py
StorageDataTypeMap._CheckByteStreamSize
def _CheckByteStreamSize(self, byte_stream, byte_offset, data_type_size): """Checks if the byte stream is large enough for the data type. Args: byte_stream (bytes): byte stream. byte_offset (int): offset into the byte stream where to start. data_type_size (int): data type size. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the size of the byte stream cannot be determined. """ try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < data_type_size: raise errors.ByteStreamTooSmallError( 'Byte stream too small requested: {0:d} available: {1:d}'.format( data_type_size, byte_stream_size))
python
def _CheckByteStreamSize(self, byte_stream, byte_offset, data_type_size): """Checks if the byte stream is large enough for the data type. Args: byte_stream (bytes): byte stream. byte_offset (int): offset into the byte stream where to start. data_type_size (int): data type size. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the size of the byte stream cannot be determined. """ try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < data_type_size: raise errors.ByteStreamTooSmallError( 'Byte stream too small requested: {0:d} available: {1:d}'.format( data_type_size, byte_stream_size))
[ "def", "_CheckByteStreamSize", "(", "self", ",", "byte_stream", ",", "byte_offset", ",", "data_type_size", ")", ":", "try", ":", "byte_stream_size", "=", "len", "(", "byte_stream", ")", "except", "Exception", "as", "exception", ":", "raise", "errors", ".", "MappingError", "(", "exception", ")", "if", "byte_stream_size", "-", "byte_offset", "<", "data_type_size", ":", "raise", "errors", ".", "ByteStreamTooSmallError", "(", "'Byte stream too small requested: {0:d} available: {1:d}'", ".", "format", "(", "data_type_size", ",", "byte_stream_size", ")", ")" ]
Checks if the byte stream is large enough for the data type. Args: byte_stream (bytes): byte stream. byte_offset (int): offset into the byte stream where to start. data_type_size (int): data type size. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the size of the byte stream cannot be determined.
[ "Checks", "if", "the", "byte", "stream", "is", "large", "enough", "for", "the", "data", "type", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L149-L170
libyal/dtfabric
dtfabric/runtime/data_maps.py
StorageDataTypeMap._GetByteStreamOperation
def _GetByteStreamOperation(self): """Retrieves the byte stream operation. Returns: ByteStreamOperation: byte stream operation or None if unable to determine. """ byte_order_string = self.GetStructByteOrderString() format_string = self.GetStructFormatString() # pylint: disable=assignment-from-none if not format_string: return None format_string = ''.join([byte_order_string, format_string]) return byte_operations.StructOperation(format_string)
python
def _GetByteStreamOperation(self): """Retrieves the byte stream operation. Returns: ByteStreamOperation: byte stream operation or None if unable to determine. """ byte_order_string = self.GetStructByteOrderString() format_string = self.GetStructFormatString() # pylint: disable=assignment-from-none if not format_string: return None format_string = ''.join([byte_order_string, format_string]) return byte_operations.StructOperation(format_string)
[ "def", "_GetByteStreamOperation", "(", "self", ")", ":", "byte_order_string", "=", "self", ".", "GetStructByteOrderString", "(", ")", "format_string", "=", "self", ".", "GetStructFormatString", "(", ")", "# pylint: disable=assignment-from-none", "if", "not", "format_string", ":", "return", "None", "format_string", "=", "''", ".", "join", "(", "[", "byte_order_string", ",", "format_string", "]", ")", "return", "byte_operations", ".", "StructOperation", "(", "format_string", ")" ]
Retrieves the byte stream operation. Returns: ByteStreamOperation: byte stream operation or None if unable to determine.
[ "Retrieves", "the", "byte", "stream", "operation", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L172-L184
libyal/dtfabric
dtfabric/runtime/data_maps.py
StorageDataTypeMap.GetStructByteOrderString
def GetStructByteOrderString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._data_type_definition: return None return self._BYTE_ORDER_STRINGS.get( self._data_type_definition.byte_order, None)
python
def GetStructByteOrderString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._data_type_definition: return None return self._BYTE_ORDER_STRINGS.get( self._data_type_definition.byte_order, None)
[ "def", "GetStructByteOrderString", "(", "self", ")", ":", "if", "not", "self", ".", "_data_type_definition", ":", "return", "None", "return", "self", ".", "_BYTE_ORDER_STRINGS", ".", "get", "(", "self", ".", "_data_type_definition", ".", "byte_order", ",", "None", ")" ]
Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined.
[ "Retrieves", "the", "Python", "struct", "format", "string", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L186-L197
libyal/dtfabric
dtfabric/runtime/data_maps.py
PrimitiveDataTypeMap.FoldByteStream
def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: value = self.FoldValue(mapped_value) return self._operation.WriteTo(tuple([value])) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string)
python
def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: value = self.FoldValue(mapped_value) return self._operation.WriteTo(tuple([value])) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string)
[ "def", "FoldByteStream", "(", "self", ",", "mapped_value", ",", "*", "*", "unused_kwargs", ")", ":", "try", ":", "value", "=", "self", ".", "FoldValue", "(", "mapped_value", ")", "return", "self", ".", "_operation", ".", "WriteTo", "(", "tuple", "(", "[", "value", "]", ")", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to write: {0:s} to byte stream with error: {1!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "exception", ")", "raise", "errors", ".", "FoldingError", "(", "error_string", ")" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L253-L274
libyal/dtfabric
dtfabric/runtime/data_maps.py
PrimitiveDataTypeMap.MapByteStream
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_value = self.MapValue(*struct_tuple) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = data_type_size return mapped_value
python
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_value = self.MapValue(*struct_tuple) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = data_type_size return mapped_value
[ "def", "MapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "data_type_size", "=", "self", ".", "_data_type_definition", ".", "GetByteSize", "(", ")", "self", ".", "_CheckByteStreamSize", "(", "byte_stream", ",", "byte_offset", ",", "data_type_size", ")", "try", ":", "struct_tuple", "=", "self", ".", "_operation", ".", "ReadFrom", "(", "byte_stream", "[", "byte_offset", ":", "]", ")", "mapped_value", "=", "self", ".", "MapValue", "(", "*", "struct_tuple", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: {2!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ",", "exception", ")", "raise", "errors", ".", "MappingError", "(", "error_string", ")", "if", "context", ":", "context", ".", "byte_size", "=", "data_type_size", "return", "mapped_value" ]
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "the", "data", "type", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L290-L323
libyal/dtfabric
dtfabric/runtime/data_maps.py
BooleanMap.FoldValue
def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ if value is False and self._data_type_definition.false_value is not None: return self._data_type_definition.false_value if value is True and self._data_type_definition.true_value is not None: return self._data_type_definition.true_value raise ValueError('No matching True and False values')
python
def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ if value is False and self._data_type_definition.false_value is not None: return self._data_type_definition.false_value if value is True and self._data_type_definition.true_value is not None: return self._data_type_definition.true_value raise ValueError('No matching True and False values')
[ "def", "FoldValue", "(", "self", ",", "value", ")", ":", "if", "value", "is", "False", "and", "self", ".", "_data_type_definition", ".", "false_value", "is", "not", "None", ":", "return", "self", ".", "_data_type_definition", ".", "false_value", "if", "value", "is", "True", "and", "self", ".", "_data_type_definition", ".", "true_value", "is", "not", "None", ":", "return", "self", ".", "_data_type_definition", ".", "true_value", "raise", "ValueError", "(", "'No matching True and False values'", ")" ]
Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value.
[ "Folds", "the", "data", "type", "into", "a", "value", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L378-L396
libyal/dtfabric
dtfabric/runtime/data_maps.py
IntegerMap.GetStructFormatString
def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._data_type_definition.format == definitions.FORMAT_UNSIGNED: return self._FORMAT_STRINGS_UNSIGNED.get( self._data_type_definition.size, None) return self._FORMAT_STRINGS_SIGNED.get( self._data_type_definition.size, None)
python
def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._data_type_definition.format == definitions.FORMAT_UNSIGNED: return self._FORMAT_STRINGS_UNSIGNED.get( self._data_type_definition.size, None) return self._FORMAT_STRINGS_SIGNED.get( self._data_type_definition.size, None)
[ "def", "GetStructFormatString", "(", "self", ")", ":", "if", "self", ".", "_data_type_definition", ".", "format", "==", "definitions", ".", "FORMAT_UNSIGNED", ":", "return", "self", ".", "_FORMAT_STRINGS_UNSIGNED", ".", "get", "(", "self", ".", "_data_type_definition", ".", "size", ",", "None", ")", "return", "self", ".", "_FORMAT_STRINGS_SIGNED", ".", "get", "(", "self", ".", "_data_type_definition", ".", "size", ",", "None", ")" ]
Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined.
[ "Retrieves", "the", "Python", "struct", "format", "string", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L515-L527
libyal/dtfabric
dtfabric/runtime/data_maps.py
UUIDMap.FoldByteStream
def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ value = None try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: value = mapped_value.bytes elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: value = mapped_value.bytes_le except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string) return value
python
def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ value = None try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: value = mapped_value.bytes elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: value = mapped_value.bytes_le except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string) return value
[ "def", "FoldByteStream", "(", "self", ",", "mapped_value", ",", "*", "*", "unused_kwargs", ")", ":", "value", "=", "None", "try", ":", "if", "self", ".", "_byte_order", "==", "definitions", ".", "BYTE_ORDER_BIG_ENDIAN", ":", "value", "=", "mapped_value", ".", "bytes", "elif", "self", ".", "_byte_order", "==", "definitions", ".", "BYTE_ORDER_LITTLE_ENDIAN", ":", "value", "=", "mapped_value", ".", "bytes_le", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to write: {0:s} to byte stream with error: {1!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "exception", ")", "raise", "errors", ".", "FoldingError", "(", "error_string", ")", "return", "value" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L544-L571
libyal/dtfabric
dtfabric/runtime/data_maps.py
UUIDMap.MapByteStream
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: uuid.UUID: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: mapped_value = uuid.UUID( bytes=byte_stream[byte_offset:byte_offset + 16]) elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: mapped_value = uuid.UUID( bytes_le=byte_stream[byte_offset:byte_offset + 16]) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = data_type_size return mapped_value
python
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: uuid.UUID: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: mapped_value = uuid.UUID( bytes=byte_stream[byte_offset:byte_offset + 16]) elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: mapped_value = uuid.UUID( bytes_le=byte_stream[byte_offset:byte_offset + 16]) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = data_type_size return mapped_value
[ "def", "MapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "data_type_size", "=", "self", ".", "_data_type_definition", ".", "GetByteSize", "(", ")", "self", ".", "_CheckByteStreamSize", "(", "byte_stream", ",", "byte_offset", ",", "data_type_size", ")", "try", ":", "if", "self", ".", "_byte_order", "==", "definitions", ".", "BYTE_ORDER_BIG_ENDIAN", ":", "mapped_value", "=", "uuid", ".", "UUID", "(", "bytes", "=", "byte_stream", "[", "byte_offset", ":", "byte_offset", "+", "16", "]", ")", "elif", "self", ".", "_byte_order", "==", "definitions", ".", "BYTE_ORDER_LITTLE_ENDIAN", ":", "mapped_value", "=", "uuid", ".", "UUID", "(", "bytes_le", "=", "byte_stream", "[", "byte_offset", ":", "byte_offset", "+", "16", "]", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: {2!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ",", "exception", ")", "raise", "errors", ".", "MappingError", "(", "error_string", ")", "if", "context", ":", "context", ".", "byte_size", "=", "data_type_size", "return", "mapped_value" ]
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: uuid.UUID: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "the", "data", "type", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L573-L610
libyal/dtfabric
dtfabric/runtime/data_maps.py
ElementSequenceDataTypeMap._CalculateElementsDataSize
def _CalculateElementsDataSize(self, context): """Calculates the elements data size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: the elements data size or None if not available. """ elements_data_size = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) elif self._HasNumberOfElements(): element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements = self._EvaluateNumberOfElements(context) elements_data_size = number_of_elements * element_byte_size return elements_data_size
python
def _CalculateElementsDataSize(self, context): """Calculates the elements data size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: the elements data size or None if not available. """ elements_data_size = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) elif self._HasNumberOfElements(): element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements = self._EvaluateNumberOfElements(context) elements_data_size = number_of_elements * element_byte_size return elements_data_size
[ "def", "_CalculateElementsDataSize", "(", "self", ",", "context", ")", ":", "elements_data_size", "=", "None", "if", "self", ".", "_HasElementsDataSize", "(", ")", ":", "elements_data_size", "=", "self", ".", "_EvaluateElementsDataSize", "(", "context", ")", "elif", "self", ".", "_HasNumberOfElements", "(", ")", ":", "element_byte_size", "=", "self", ".", "_element_data_type_definition", ".", "GetByteSize", "(", ")", "if", "element_byte_size", "is", "not", "None", ":", "number_of_elements", "=", "self", ".", "_EvaluateNumberOfElements", "(", "context", ")", "elements_data_size", "=", "number_of_elements", "*", "element_byte_size", "return", "elements_data_size" ]
Calculates the elements data size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: the elements data size or None if not available.
[ "Calculates", "the", "elements", "data", "size", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L636-L657
libyal/dtfabric
dtfabric/runtime/data_maps.py
ElementSequenceDataTypeMap._EvaluateElementsDataSize
def _EvaluateElementsDataSize(self, context): """Evaluates elements data size. Args: context (DataTypeMapContext): data type map context. Returns: int: elements data size. Raises: MappingError: if the elements data size cannot be determined. """ elements_data_size = None if self._data_type_definition.elements_data_size: elements_data_size = self._data_type_definition.elements_data_size elif self._data_type_definition.elements_data_size_expression: expression = self._data_type_definition.elements_data_size_expression namespace = {} if context and context.values: namespace.update(context.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: elements_data_size = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to determine elements data size with error: {0!s}'.format( exception)) if elements_data_size is None or elements_data_size < 0: raise errors.MappingError( 'Invalid elements data size: {0!s}'.format(elements_data_size)) return elements_data_size
python
def _EvaluateElementsDataSize(self, context): """Evaluates elements data size. Args: context (DataTypeMapContext): data type map context. Returns: int: elements data size. Raises: MappingError: if the elements data size cannot be determined. """ elements_data_size = None if self._data_type_definition.elements_data_size: elements_data_size = self._data_type_definition.elements_data_size elif self._data_type_definition.elements_data_size_expression: expression = self._data_type_definition.elements_data_size_expression namespace = {} if context and context.values: namespace.update(context.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: elements_data_size = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to determine elements data size with error: {0!s}'.format( exception)) if elements_data_size is None or elements_data_size < 0: raise errors.MappingError( 'Invalid elements data size: {0!s}'.format(elements_data_size)) return elements_data_size
[ "def", "_EvaluateElementsDataSize", "(", "self", ",", "context", ")", ":", "elements_data_size", "=", "None", "if", "self", ".", "_data_type_definition", ".", "elements_data_size", ":", "elements_data_size", "=", "self", ".", "_data_type_definition", ".", "elements_data_size", "elif", "self", ".", "_data_type_definition", ".", "elements_data_size_expression", ":", "expression", "=", "self", ".", "_data_type_definition", ".", "elements_data_size_expression", "namespace", "=", "{", "}", "if", "context", "and", "context", ".", "values", ":", "namespace", ".", "update", "(", "context", ".", "values", ")", "# Make sure __builtins__ contains an empty dictionary.", "namespace", "[", "'__builtins__'", "]", "=", "{", "}", "try", ":", "elements_data_size", "=", "eval", "(", "expression", ",", "namespace", ")", "# pylint: disable=eval-used", "except", "Exception", "as", "exception", ":", "raise", "errors", ".", "MappingError", "(", "'Unable to determine elements data size with error: {0!s}'", ".", "format", "(", "exception", ")", ")", "if", "elements_data_size", "is", "None", "or", "elements_data_size", "<", "0", ":", "raise", "errors", ".", "MappingError", "(", "'Invalid elements data size: {0!s}'", ".", "format", "(", "elements_data_size", ")", ")", "return", "elements_data_size" ]
Evaluates elements data size. Args: context (DataTypeMapContext): data type map context. Returns: int: elements data size. Raises: MappingError: if the elements data size cannot be determined.
[ "Evaluates", "elements", "data", "size", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L659-L694
libyal/dtfabric
dtfabric/runtime/data_maps.py
ElementSequenceDataTypeMap._EvaluateNumberOfElements
def _EvaluateNumberOfElements(self, context): """Evaluates number of elements. Args: context (DataTypeMapContext): data type map context. Returns: int: number of elements. Raises: MappingError: if the number of elements cannot be determined. """ number_of_elements = None if self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements elif self._data_type_definition.number_of_elements_expression: expression = self._data_type_definition.number_of_elements_expression namespace = {} if context and context.values: namespace.update(context.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: number_of_elements = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to determine number of elements with error: {0!s}'.format( exception)) if number_of_elements is None or number_of_elements < 0: raise errors.MappingError( 'Invalid number of elements: {0!s}'.format(number_of_elements)) return number_of_elements
python
def _EvaluateNumberOfElements(self, context): """Evaluates number of elements. Args: context (DataTypeMapContext): data type map context. Returns: int: number of elements. Raises: MappingError: if the number of elements cannot be determined. """ number_of_elements = None if self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements elif self._data_type_definition.number_of_elements_expression: expression = self._data_type_definition.number_of_elements_expression namespace = {} if context and context.values: namespace.update(context.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: number_of_elements = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to determine number of elements with error: {0!s}'.format( exception)) if number_of_elements is None or number_of_elements < 0: raise errors.MappingError( 'Invalid number of elements: {0!s}'.format(number_of_elements)) return number_of_elements
[ "def", "_EvaluateNumberOfElements", "(", "self", ",", "context", ")", ":", "number_of_elements", "=", "None", "if", "self", ".", "_data_type_definition", ".", "number_of_elements", ":", "number_of_elements", "=", "self", ".", "_data_type_definition", ".", "number_of_elements", "elif", "self", ".", "_data_type_definition", ".", "number_of_elements_expression", ":", "expression", "=", "self", ".", "_data_type_definition", ".", "number_of_elements_expression", "namespace", "=", "{", "}", "if", "context", "and", "context", ".", "values", ":", "namespace", ".", "update", "(", "context", ".", "values", ")", "# Make sure __builtins__ contains an empty dictionary.", "namespace", "[", "'__builtins__'", "]", "=", "{", "}", "try", ":", "number_of_elements", "=", "eval", "(", "expression", ",", "namespace", ")", "# pylint: disable=eval-used", "except", "Exception", "as", "exception", ":", "raise", "errors", ".", "MappingError", "(", "'Unable to determine number of elements with error: {0!s}'", ".", "format", "(", "exception", ")", ")", "if", "number_of_elements", "is", "None", "or", "number_of_elements", "<", "0", ":", "raise", "errors", ".", "MappingError", "(", "'Invalid number of elements: {0!s}'", ".", "format", "(", "number_of_elements", ")", ")", "return", "number_of_elements" ]
Evaluates number of elements. Args: context (DataTypeMapContext): data type map context. Returns: int: number of elements. Raises: MappingError: if the number of elements cannot be determined.
[ "Evaluates", "number", "of", "elements", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L696-L731
libyal/dtfabric
dtfabric/runtime/data_maps.py
ElementSequenceDataTypeMap._GetElementDataTypeDefinition
def _GetElementDataTypeDefinition(self, data_type_definition): """Retrieves the element data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeDefinition: element data type definition. Raises: FormatError: if the element data type cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') element_data_type_definition = getattr( data_type_definition, 'element_data_type_definition', None) if not element_data_type_definition: raise errors.FormatError( 'Invalid data type definition missing element') return element_data_type_definition
python
def _GetElementDataTypeDefinition(self, data_type_definition): """Retrieves the element data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeDefinition: element data type definition. Raises: FormatError: if the element data type cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') element_data_type_definition = getattr( data_type_definition, 'element_data_type_definition', None) if not element_data_type_definition: raise errors.FormatError( 'Invalid data type definition missing element') return element_data_type_definition
[ "def", "_GetElementDataTypeDefinition", "(", "self", ",", "data_type_definition", ")", ":", "if", "not", "data_type_definition", ":", "raise", "errors", ".", "FormatError", "(", "'Missing data type definition'", ")", "element_data_type_definition", "=", "getattr", "(", "data_type_definition", ",", "'element_data_type_definition'", ",", "None", ")", "if", "not", "element_data_type_definition", ":", "raise", "errors", ".", "FormatError", "(", "'Invalid data type definition missing element'", ")", "return", "element_data_type_definition" ]
Retrieves the element data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeDefinition: element data type definition. Raises: FormatError: if the element data type cannot be determined from the data type definition.
[ "Retrieves", "the", "element", "data", "type", "definition", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L733-L755
libyal/dtfabric
dtfabric/runtime/data_maps.py
ElementSequenceDataTypeMap.GetSizeHint
def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ context_state = getattr(context, 'state', {}) elements_data_size = self.GetByteSize() if elements_data_size: return elements_data_size try: elements_data_size = self._CalculateElementsDataSize(context) except errors.MappingError: pass if elements_data_size is None and self._HasElementsTerminator(): size_hints = context_state.get('size_hints', {}) size_hint = size_hints.get(self._data_type_definition.name, None) elements_data_size = 0 if size_hint: elements_data_size = size_hint.byte_size if not size_hint or not size_hint.is_complete: elements_data_size += self._element_data_type_definition.GetByteSize() return elements_data_size
python
def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ context_state = getattr(context, 'state', {}) elements_data_size = self.GetByteSize() if elements_data_size: return elements_data_size try: elements_data_size = self._CalculateElementsDataSize(context) except errors.MappingError: pass if elements_data_size is None and self._HasElementsTerminator(): size_hints = context_state.get('size_hints', {}) size_hint = size_hints.get(self._data_type_definition.name, None) elements_data_size = 0 if size_hint: elements_data_size = size_hint.byte_size if not size_hint or not size_hint.is_complete: elements_data_size += self._element_data_type_definition.GetByteSize() return elements_data_size
[ "def", "GetSizeHint", "(", "self", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "context_state", "=", "getattr", "(", "context", ",", "'state'", ",", "{", "}", ")", "elements_data_size", "=", "self", ".", "GetByteSize", "(", ")", "if", "elements_data_size", ":", "return", "elements_data_size", "try", ":", "elements_data_size", "=", "self", ".", "_CalculateElementsDataSize", "(", "context", ")", "except", "errors", ".", "MappingError", ":", "pass", "if", "elements_data_size", "is", "None", "and", "self", ".", "_HasElementsTerminator", "(", ")", ":", "size_hints", "=", "context_state", ".", "get", "(", "'size_hints'", ",", "{", "}", ")", "size_hint", "=", "size_hints", ".", "get", "(", "self", ".", "_data_type_definition", ".", "name", ",", "None", ")", "elements_data_size", "=", "0", "if", "size_hint", ":", "elements_data_size", "=", "size_hint", ".", "byte_size", "if", "not", "size_hint", "or", "not", "size_hint", ".", "is_complete", ":", "elements_data_size", "+=", "self", ".", "_element_data_type_definition", ".", "GetByteSize", "(", ")", "return", "elements_data_size" ]
Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None.
[ "Retrieves", "a", "hint", "about", "the", "size", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L800-L833
libyal/dtfabric
dtfabric/runtime/data_maps.py
SequenceMap._CompositeMapByteStream
def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ elements_data_size = None elements_terminator = None number_of_elements = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements, _ = divmod(elements_data_size, element_byte_size) else: elements_terminator = ( self._element_data_type_definition.elements_terminator) elif self._HasElementsTerminator(): elements_terminator = self._data_type_definition.elements_terminator elif self._HasNumberOfElements(): number_of_elements = self._EvaluateNumberOfElements(context) if elements_terminator is None and number_of_elements is None: raise errors.MappingError( 'Unable to determine element terminator or number of elements') context_state = getattr(context, 'state', {}) elements_data_offset = context_state.get('elements_data_offset', 0) element_index = context_state.get('element_index', 0) element_value = None mapped_values = context_state.get('mapped_values', []) size_hints = context_state.get('size_hints', {}) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext() try: while byte_stream[byte_offset:]: if (number_of_elements is not None and element_index == number_of_elements): break if (elements_data_size is not None and elements_data_offset >= elements_data_size): break element_value = self._element_data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext) byte_offset += subcontext.byte_size elements_data_offset += subcontext.byte_size element_index += 1 mapped_values.append(element_value) if (elements_terminator is not None and element_value == elements_terminator): break except errors.ByteStreamTooSmallError as exception: context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values raise errors.ByteStreamTooSmallError(exception) except Exception as exception: raise errors.MappingError(exception) if number_of_elements is not None and element_index != number_of_elements: context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: missing element: {2:d}').format( self._data_type_definition.name, byte_offset, element_index - 1) raise errors.ByteStreamTooSmallError(error_string) if (elements_terminator is not None and element_value != elements_terminator and ( elements_data_size is None or elements_data_offset < elements_data_size)): byte_stream_size = len(byte_stream) size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( byte_stream_size - byte_offset) context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values context_state['size_hints'] = size_hints error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: unable to find elements terminator').format( self._data_type_definition.name, byte_offset) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = elements_data_offset context.state = {} return tuple(mapped_values)
python
def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ elements_data_size = None elements_terminator = None number_of_elements = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements, _ = divmod(elements_data_size, element_byte_size) else: elements_terminator = ( self._element_data_type_definition.elements_terminator) elif self._HasElementsTerminator(): elements_terminator = self._data_type_definition.elements_terminator elif self._HasNumberOfElements(): number_of_elements = self._EvaluateNumberOfElements(context) if elements_terminator is None and number_of_elements is None: raise errors.MappingError( 'Unable to determine element terminator or number of elements') context_state = getattr(context, 'state', {}) elements_data_offset = context_state.get('elements_data_offset', 0) element_index = context_state.get('element_index', 0) element_value = None mapped_values = context_state.get('mapped_values', []) size_hints = context_state.get('size_hints', {}) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext() try: while byte_stream[byte_offset:]: if (number_of_elements is not None and element_index == number_of_elements): break if (elements_data_size is not None and elements_data_offset >= elements_data_size): break element_value = self._element_data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext) byte_offset += subcontext.byte_size elements_data_offset += subcontext.byte_size element_index += 1 mapped_values.append(element_value) if (elements_terminator is not None and element_value == elements_terminator): break except errors.ByteStreamTooSmallError as exception: context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values raise errors.ByteStreamTooSmallError(exception) except Exception as exception: raise errors.MappingError(exception) if number_of_elements is not None and element_index != number_of_elements: context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: missing element: {2:d}').format( self._data_type_definition.name, byte_offset, element_index - 1) raise errors.ByteStreamTooSmallError(error_string) if (elements_terminator is not None and element_value != elements_terminator and ( elements_data_size is None or elements_data_offset < elements_data_size)): byte_stream_size = len(byte_stream) size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( byte_stream_size - byte_offset) context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values context_state['size_hints'] = size_hints error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: unable to find elements terminator').format( self._data_type_definition.name, byte_offset) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = elements_data_offset context.state = {} return tuple(mapped_values)
[ "def", "_CompositeMapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "elements_data_size", "=", "None", "elements_terminator", "=", "None", "number_of_elements", "=", "None", "if", "self", ".", "_HasElementsDataSize", "(", ")", ":", "elements_data_size", "=", "self", ".", "_EvaluateElementsDataSize", "(", "context", ")", "element_byte_size", "=", "self", ".", "_element_data_type_definition", ".", "GetByteSize", "(", ")", "if", "element_byte_size", "is", "not", "None", ":", "number_of_elements", ",", "_", "=", "divmod", "(", "elements_data_size", ",", "element_byte_size", ")", "else", ":", "elements_terminator", "=", "(", "self", ".", "_element_data_type_definition", ".", "elements_terminator", ")", "elif", "self", ".", "_HasElementsTerminator", "(", ")", ":", "elements_terminator", "=", "self", ".", "_data_type_definition", ".", "elements_terminator", "elif", "self", ".", "_HasNumberOfElements", "(", ")", ":", "number_of_elements", "=", "self", ".", "_EvaluateNumberOfElements", "(", "context", ")", "if", "elements_terminator", "is", "None", "and", "number_of_elements", "is", "None", ":", "raise", "errors", ".", "MappingError", "(", "'Unable to determine element terminator or number of elements'", ")", "context_state", "=", "getattr", "(", "context", ",", "'state'", ",", "{", "}", ")", "elements_data_offset", "=", "context_state", ".", "get", "(", "'elements_data_offset'", ",", "0", ")", "element_index", "=", "context_state", ".", "get", "(", "'element_index'", ",", "0", ")", "element_value", "=", "None", "mapped_values", "=", "context_state", ".", "get", "(", "'mapped_values'", ",", "[", "]", ")", "size_hints", "=", "context_state", ".", "get", "(", "'size_hints'", ",", "{", "}", ")", "subcontext", "=", "context_state", ".", "get", "(", "'context'", ",", "None", ")", "if", "not", "subcontext", ":", "subcontext", "=", "DataTypeMapContext", "(", ")", "try", ":", "while", "byte_stream", "[", "byte_offset", ":", "]", ":", "if", "(", "number_of_elements", "is", "not", "None", "and", "element_index", "==", "number_of_elements", ")", ":", "break", "if", "(", "elements_data_size", "is", "not", "None", "and", "elements_data_offset", ">=", "elements_data_size", ")", ":", "break", "element_value", "=", "self", ".", "_element_data_type_map", ".", "MapByteStream", "(", "byte_stream", ",", "byte_offset", "=", "byte_offset", ",", "context", "=", "subcontext", ")", "byte_offset", "+=", "subcontext", ".", "byte_size", "elements_data_offset", "+=", "subcontext", ".", "byte_size", "element_index", "+=", "1", "mapped_values", ".", "append", "(", "element_value", ")", "if", "(", "elements_terminator", "is", "not", "None", "and", "element_value", "==", "elements_terminator", ")", ":", "break", "except", "errors", ".", "ByteStreamTooSmallError", "as", "exception", ":", "context_state", "[", "'context'", "]", "=", "subcontext", "context_state", "[", "'elements_data_offset'", "]", "=", "elements_data_offset", "context_state", "[", "'element_index'", "]", "=", "element_index", "context_state", "[", "'mapped_values'", "]", "=", "mapped_values", "raise", "errors", ".", "ByteStreamTooSmallError", "(", "exception", ")", "except", "Exception", "as", "exception", ":", "raise", "errors", ".", "MappingError", "(", "exception", ")", "if", "number_of_elements", "is", "not", "None", "and", "element_index", "!=", "number_of_elements", ":", "context_state", "[", "'context'", "]", "=", "subcontext", "context_state", "[", "'elements_data_offset'", "]", "=", "elements_data_offset", "context_state", "[", "'element_index'", "]", "=", "element_index", "context_state", "[", "'mapped_values'", "]", "=", "mapped_values", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: missing element: {2:d}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ",", "element_index", "-", "1", ")", "raise", "errors", ".", "ByteStreamTooSmallError", "(", "error_string", ")", "if", "(", "elements_terminator", "is", "not", "None", "and", "element_value", "!=", "elements_terminator", "and", "(", "elements_data_size", "is", "None", "or", "elements_data_offset", "<", "elements_data_size", ")", ")", ":", "byte_stream_size", "=", "len", "(", "byte_stream", ")", "size_hints", "[", "self", ".", "_data_type_definition", ".", "name", "]", "=", "DataTypeMapSizeHint", "(", "byte_stream_size", "-", "byte_offset", ")", "context_state", "[", "'context'", "]", "=", "subcontext", "context_state", "[", "'elements_data_offset'", "]", "=", "elements_data_offset", "context_state", "[", "'element_index'", "]", "=", "element_index", "context_state", "[", "'mapped_values'", "]", "=", "mapped_values", "context_state", "[", "'size_hints'", "]", "=", "size_hints", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: unable to find elements terminator'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ")", "raise", "errors", ".", "ByteStreamTooSmallError", "(", "error_string", ")", "if", "context", ":", "context", ".", "byte_size", "=", "elements_data_offset", "context", ".", "state", "=", "{", "}", "return", "tuple", "(", "mapped_values", ")" ]
Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "a", "sequence", "of", "composite", "data", "types", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L909-L1031
libyal/dtfabric
dtfabric/runtime/data_maps.py
SequenceMap._LinearFoldByteStream
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: return self._operation.WriteTo(mapped_value) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string)
python
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: return self._operation.WriteTo(mapped_value) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string)
[ "def", "_LinearFoldByteStream", "(", "self", ",", "mapped_value", ",", "*", "*", "unused_kwargs", ")", ":", "try", ":", "return", "self", ".", "_operation", ".", "WriteTo", "(", "mapped_value", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to write: {0:s} to byte stream with error: {1!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "exception", ")", "raise", "errors", ".", "FoldingError", "(", "error_string", ")" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1033-L1053
libyal/dtfabric
dtfabric/runtime/data_maps.py
SequenceMap._LinearMapByteStream
def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ elements_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_values = map(self._element_data_type_map.MapValue, struct_tuple) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = elements_data_size return tuple(mapped_values)
python
def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ elements_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_values = map(self._element_data_type_map.MapValue, struct_tuple) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = elements_data_size return tuple(mapped_values)
[ "def", "_LinearMapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "elements_data_size", "=", "self", ".", "_data_type_definition", ".", "GetByteSize", "(", ")", "self", ".", "_CheckByteStreamSize", "(", "byte_stream", ",", "byte_offset", ",", "elements_data_size", ")", "try", ":", "struct_tuple", "=", "self", ".", "_operation", ".", "ReadFrom", "(", "byte_stream", "[", "byte_offset", ":", "]", ")", "mapped_values", "=", "map", "(", "self", ".", "_element_data_type_map", ".", "MapValue", ",", "struct_tuple", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: {2!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ",", "exception", ")", "raise", "errors", ".", "MappingError", "(", "error_string", ")", "if", "context", ":", "context", ".", "byte_size", "=", "elements_data_size", "return", "tuple", "(", "mapped_values", ")" ]
Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "a", "data", "type", "sequence", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1055-L1088
libyal/dtfabric
dtfabric/runtime/data_maps.py
SequenceMap.GetStructFormatString
def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._element_data_type_map: return None number_of_elements = None if self._data_type_definition.elements_data_size: element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is None: return None number_of_elements, _ = divmod( self._data_type_definition.elements_data_size, element_byte_size) elif self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements format_string = self._element_data_type_map.GetStructFormatString() if not number_of_elements or not format_string: return None return '{0:d}{1:s}'.format(number_of_elements, format_string)
python
def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._element_data_type_map: return None number_of_elements = None if self._data_type_definition.elements_data_size: element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is None: return None number_of_elements, _ = divmod( self._data_type_definition.elements_data_size, element_byte_size) elif self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements format_string = self._element_data_type_map.GetStructFormatString() if not number_of_elements or not format_string: return None return '{0:d}{1:s}'.format(number_of_elements, format_string)
[ "def", "GetStructFormatString", "(", "self", ")", ":", "if", "not", "self", ".", "_element_data_type_map", ":", "return", "None", "number_of_elements", "=", "None", "if", "self", ".", "_data_type_definition", ".", "elements_data_size", ":", "element_byte_size", "=", "self", ".", "_element_data_type_definition", ".", "GetByteSize", "(", ")", "if", "element_byte_size", "is", "None", ":", "return", "None", "number_of_elements", ",", "_", "=", "divmod", "(", "self", ".", "_data_type_definition", ".", "elements_data_size", ",", "element_byte_size", ")", "elif", "self", ".", "_data_type_definition", ".", "number_of_elements", ":", "number_of_elements", "=", "self", ".", "_data_type_definition", ".", "number_of_elements", "format_string", "=", "self", ".", "_element_data_type_map", ".", "GetStructFormatString", "(", ")", "if", "not", "number_of_elements", "or", "not", "format_string", ":", "return", "None", "return", "'{0:d}{1:s}'", ".", "format", "(", "number_of_elements", ",", "format_string", ")" ]
Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined.
[ "Retrieves", "the", "Python", "struct", "format", "string", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1105-L1131
libyal/dtfabric
dtfabric/runtime/data_maps.py
StreamMap.FoldByteStream
def FoldByteStream(self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: if elements_data_size != len(mapped_value): raise errors.FoldingError( 'Mismatch between elements data size and mapped value size') elif not self._HasElementsTerminator(): raise errors.FoldingError('Unable to determine elements data size') else: elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) if mapped_value[-elements_terminator_size:] != elements_terminator: mapped_value = b''.join([mapped_value, elements_terminator]) return mapped_value
python
def FoldByteStream(self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: if elements_data_size != len(mapped_value): raise errors.FoldingError( 'Mismatch between elements data size and mapped value size') elif not self._HasElementsTerminator(): raise errors.FoldingError('Unable to determine elements data size') else: elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) if mapped_value[-elements_terminator_size:] != elements_terminator: mapped_value = b''.join([mapped_value, elements_terminator]) return mapped_value
[ "def", "FoldByteStream", "(", "self", ",", "mapped_value", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "elements_data_size", "=", "self", ".", "_CalculateElementsDataSize", "(", "context", ")", "if", "elements_data_size", "is", "not", "None", ":", "if", "elements_data_size", "!=", "len", "(", "mapped_value", ")", ":", "raise", "errors", ".", "FoldingError", "(", "'Mismatch between elements data size and mapped value size'", ")", "elif", "not", "self", ".", "_HasElementsTerminator", "(", ")", ":", "raise", "errors", ".", "FoldingError", "(", "'Unable to determine elements data size'", ")", "else", ":", "elements_terminator", "=", "self", ".", "_data_type_definition", ".", "elements_terminator", "elements_terminator_size", "=", "len", "(", "elements_terminator", ")", "if", "mapped_value", "[", "-", "elements_terminator_size", ":", "]", "!=", "elements_terminator", ":", "mapped_value", "=", "b''", ".", "join", "(", "[", "mapped_value", ",", "elements_terminator", "]", ")", "return", "mapped_value" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1175-L1204
libyal/dtfabric
dtfabric/runtime/data_maps.py
StreamMap.MapByteStream
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) size_hints = context_state.get('size_hints', {}) elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size) elif not self._HasElementsTerminator(): raise errors.MappingError( 'Unable to determine elements data size and missing elements ' 'terminator') else: byte_stream_size = len(byte_stream) element_byte_size = self._element_data_type_definition.GetByteSize() elements_data_offset = byte_offset next_elements_data_offset = elements_data_offset + element_byte_size elements_terminator = self._data_type_definition.elements_terminator element_value = byte_stream[ elements_data_offset:next_elements_data_offset] while byte_stream[elements_data_offset:]: elements_data_offset = next_elements_data_offset if element_value == elements_terminator: elements_data_size = elements_data_offset - byte_offset break next_elements_data_offset += element_byte_size element_value = byte_stream[ elements_data_offset:next_elements_data_offset] if element_value != elements_terminator: size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( byte_stream_size - byte_offset) context_state['size_hints'] = size_hints error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: unable to find elements terminator').format( self._data_type_definition.name, byte_offset) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = elements_data_size size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( elements_data_size, is_complete=True) context_state['size_hints'] = size_hints return byte_stream[byte_offset:byte_offset + elements_data_size]
python
def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) size_hints = context_state.get('size_hints', {}) elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size) elif not self._HasElementsTerminator(): raise errors.MappingError( 'Unable to determine elements data size and missing elements ' 'terminator') else: byte_stream_size = len(byte_stream) element_byte_size = self._element_data_type_definition.GetByteSize() elements_data_offset = byte_offset next_elements_data_offset = elements_data_offset + element_byte_size elements_terminator = self._data_type_definition.elements_terminator element_value = byte_stream[ elements_data_offset:next_elements_data_offset] while byte_stream[elements_data_offset:]: elements_data_offset = next_elements_data_offset if element_value == elements_terminator: elements_data_size = elements_data_offset - byte_offset break next_elements_data_offset += element_byte_size element_value = byte_stream[ elements_data_offset:next_elements_data_offset] if element_value != elements_terminator: size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( byte_stream_size - byte_offset) context_state['size_hints'] = size_hints error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: unable to find elements terminator').format( self._data_type_definition.name, byte_offset) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = elements_data_size size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( elements_data_size, is_complete=True) context_state['size_hints'] = size_hints return byte_stream[byte_offset:byte_offset + elements_data_size]
[ "def", "MapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "context_state", "=", "getattr", "(", "context", ",", "'state'", ",", "{", "}", ")", "size_hints", "=", "context_state", ".", "get", "(", "'size_hints'", ",", "{", "}", ")", "elements_data_size", "=", "self", ".", "_CalculateElementsDataSize", "(", "context", ")", "if", "elements_data_size", "is", "not", "None", ":", "self", ".", "_CheckByteStreamSize", "(", "byte_stream", ",", "byte_offset", ",", "elements_data_size", ")", "elif", "not", "self", ".", "_HasElementsTerminator", "(", ")", ":", "raise", "errors", ".", "MappingError", "(", "'Unable to determine elements data size and missing elements '", "'terminator'", ")", "else", ":", "byte_stream_size", "=", "len", "(", "byte_stream", ")", "element_byte_size", "=", "self", ".", "_element_data_type_definition", ".", "GetByteSize", "(", ")", "elements_data_offset", "=", "byte_offset", "next_elements_data_offset", "=", "elements_data_offset", "+", "element_byte_size", "elements_terminator", "=", "self", ".", "_data_type_definition", ".", "elements_terminator", "element_value", "=", "byte_stream", "[", "elements_data_offset", ":", "next_elements_data_offset", "]", "while", "byte_stream", "[", "elements_data_offset", ":", "]", ":", "elements_data_offset", "=", "next_elements_data_offset", "if", "element_value", "==", "elements_terminator", ":", "elements_data_size", "=", "elements_data_offset", "-", "byte_offset", "break", "next_elements_data_offset", "+=", "element_byte_size", "element_value", "=", "byte_stream", "[", "elements_data_offset", ":", "next_elements_data_offset", "]", "if", "element_value", "!=", "elements_terminator", ":", "size_hints", "[", "self", ".", "_data_type_definition", ".", "name", "]", "=", "DataTypeMapSizeHint", "(", "byte_stream_size", "-", "byte_offset", ")", "context_state", "[", "'size_hints'", "]", "=", "size_hints", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: unable to find elements terminator'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ")", "raise", "errors", ".", "ByteStreamTooSmallError", "(", "error_string", ")", "if", "context", ":", "context", ".", "byte_size", "=", "elements_data_size", "size_hints", "[", "self", ".", "_data_type_definition", ".", "name", "]", "=", "DataTypeMapSizeHint", "(", "elements_data_size", ",", "is_complete", "=", "True", ")", "context_state", "[", "'size_hints'", "]", "=", "size_hints", "return", "byte_stream", "[", "byte_offset", ":", "byte_offset", "+", "elements_data_size", "]" ]
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "the", "data", "type", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1219-L1289
libyal/dtfabric
dtfabric/runtime/data_maps.py
PaddingMap.MapByteStream
def MapByteStream(self, byte_stream, byte_offset=0, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ return byte_stream[byte_offset:byte_offset + self.byte_size]
python
def MapByteStream(self, byte_stream, byte_offset=0, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ return byte_stream[byte_offset:byte_offset + self.byte_size]
[ "def", "MapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "*", "*", "unused_kwargs", ")", ":", "return", "byte_stream", "[", "byte_offset", ":", "byte_offset", "+", "self", ".", "byte_size", "]" ]
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "the", "data", "type", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1355-L1369
libyal/dtfabric
dtfabric/runtime/data_maps.py
StringMap.FoldByteStream
def FoldByteStream(self, mapped_value, **kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: byte_stream = mapped_value.encode(self._data_type_definition.encoding) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.MappingError(error_string) return super(StringMap, self).FoldByteStream(byte_stream, **kwargs)
python
def FoldByteStream(self, mapped_value, **kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: byte_stream = mapped_value.encode(self._data_type_definition.encoding) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.MappingError(error_string) return super(StringMap, self).FoldByteStream(byte_stream, **kwargs)
[ "def", "FoldByteStream", "(", "self", ",", "mapped_value", ",", "*", "*", "kwargs", ")", ":", "try", ":", "byte_stream", "=", "mapped_value", ".", "encode", "(", "self", ".", "_data_type_definition", ".", "encoding", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to write: {0:s} to byte stream with error: {1!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "exception", ")", "raise", "errors", ".", "MappingError", "(", "error_string", ")", "return", "super", "(", "StringMap", ",", "self", ")", ".", "FoldByteStream", "(", "byte_stream", ",", "*", "*", "kwargs", ")" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1391-L1413
libyal/dtfabric
dtfabric/runtime/data_maps.py
StringMap.MapByteStream
def MapByteStream(self, byte_stream, byte_offset=0, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: str: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ byte_stream = super(StringMap, self).MapByteStream( byte_stream, byte_offset=byte_offset, **kwargs) if self._HasElementsTerminator(): # Remove the elements terminator and any trailing data from # the byte stream. elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) byte_offset = 0 byte_stream_size = len(byte_stream) while byte_offset < byte_stream_size: end_offset = byte_offset + elements_terminator_size if byte_stream[byte_offset:end_offset] == elements_terminator: break byte_offset += elements_terminator_size byte_stream = byte_stream[:byte_offset] try: return byte_stream.decode(self._data_type_definition.encoding) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string)
python
def MapByteStream(self, byte_stream, byte_offset=0, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: str: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ byte_stream = super(StringMap, self).MapByteStream( byte_stream, byte_offset=byte_offset, **kwargs) if self._HasElementsTerminator(): # Remove the elements terminator and any trailing data from # the byte stream. elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) byte_offset = 0 byte_stream_size = len(byte_stream) while byte_offset < byte_stream_size: end_offset = byte_offset + elements_terminator_size if byte_stream[byte_offset:end_offset] == elements_terminator: break byte_offset += elements_terminator_size byte_stream = byte_stream[:byte_offset] try: return byte_stream.decode(self._data_type_definition.encoding) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string)
[ "def", "MapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "*", "*", "kwargs", ")", ":", "byte_stream", "=", "super", "(", "StringMap", ",", "self", ")", ".", "MapByteStream", "(", "byte_stream", ",", "byte_offset", "=", "byte_offset", ",", "*", "*", "kwargs", ")", "if", "self", ".", "_HasElementsTerminator", "(", ")", ":", "# Remove the elements terminator and any trailing data from", "# the byte stream.", "elements_terminator", "=", "self", ".", "_data_type_definition", ".", "elements_terminator", "elements_terminator_size", "=", "len", "(", "elements_terminator", ")", "byte_offset", "=", "0", "byte_stream_size", "=", "len", "(", "byte_stream", ")", "while", "byte_offset", "<", "byte_stream_size", ":", "end_offset", "=", "byte_offset", "+", "elements_terminator_size", "if", "byte_stream", "[", "byte_offset", ":", "end_offset", "]", "==", "elements_terminator", ":", "break", "byte_offset", "+=", "elements_terminator_size", "byte_stream", "=", "byte_stream", "[", ":", "byte_offset", "]", "try", ":", "return", "byte_stream", ".", "decode", "(", "self", ".", "_data_type_definition", ".", "encoding", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: {2!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ",", "exception", ")", "raise", "errors", ".", "MappingError", "(", "error_string", ")" ]
Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: str: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "the", "data", "type", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1415-L1458
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap._CheckCompositeMap
def _CheckCompositeMap(self, data_type_definition): """Determines if the data type definition needs a composite map. Args: data_type_definition (DataTypeDefinition): structure data type definition. Returns: bool: True if a composite map is needed, False otherwise. Raises: FormatError: if a composite map is needed cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') is_composite_map = False last_member_byte_order = data_type_definition.byte_order for member_definition in members: if member_definition.IsComposite(): is_composite_map = True break # TODO: check for padding type # TODO: determine if padding type can be defined as linear if (last_member_byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order != definitions.BYTE_ORDER_NATIVE and last_member_byte_order != member_definition.byte_order): is_composite_map = True break last_member_byte_order = member_definition.byte_order return is_composite_map
python
def _CheckCompositeMap(self, data_type_definition): """Determines if the data type definition needs a composite map. Args: data_type_definition (DataTypeDefinition): structure data type definition. Returns: bool: True if a composite map is needed, False otherwise. Raises: FormatError: if a composite map is needed cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') is_composite_map = False last_member_byte_order = data_type_definition.byte_order for member_definition in members: if member_definition.IsComposite(): is_composite_map = True break # TODO: check for padding type # TODO: determine if padding type can be defined as linear if (last_member_byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order != definitions.BYTE_ORDER_NATIVE and last_member_byte_order != member_definition.byte_order): is_composite_map = True break last_member_byte_order = member_definition.byte_order return is_composite_map
[ "def", "_CheckCompositeMap", "(", "self", ",", "data_type_definition", ")", ":", "if", "not", "data_type_definition", ":", "raise", "errors", ".", "FormatError", "(", "'Missing data type definition'", ")", "members", "=", "getattr", "(", "data_type_definition", ",", "'members'", ",", "None", ")", "if", "not", "members", ":", "raise", "errors", ".", "FormatError", "(", "'Invalid data type definition missing members'", ")", "is_composite_map", "=", "False", "last_member_byte_order", "=", "data_type_definition", ".", "byte_order", "for", "member_definition", "in", "members", ":", "if", "member_definition", ".", "IsComposite", "(", ")", ":", "is_composite_map", "=", "True", "break", "# TODO: check for padding type", "# TODO: determine if padding type can be defined as linear", "if", "(", "last_member_byte_order", "!=", "definitions", ".", "BYTE_ORDER_NATIVE", "and", "member_definition", ".", "byte_order", "!=", "definitions", ".", "BYTE_ORDER_NATIVE", "and", "last_member_byte_order", "!=", "member_definition", ".", "byte_order", ")", ":", "is_composite_map", "=", "True", "break", "last_member_byte_order", "=", "member_definition", ".", "byte_order", "return", "is_composite_map" ]
Determines if the data type definition needs a composite map. Args: data_type_definition (DataTypeDefinition): structure data type definition. Returns: bool: True if a composite map is needed, False otherwise. Raises: FormatError: if a composite map is needed cannot be determined from the data type definition.
[ "Determines", "if", "the", "data", "type", "definition", "needs", "a", "composite", "map", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1498-L1536
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap._CompositeFoldByteStream
def _CompositeFoldByteStream( self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext(values={ type(mapped_value).__name__: mapped_value}) data_attributes = [] for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] member_value = getattr(mapped_value, attribute_name, None) if data_type_map is None or member_value is None: continue member_data = data_type_map.FoldByteStream( member_value, context=subcontext) if member_data is None: return None data_attributes.append(member_data) if context: context.state = {} return b''.join(data_attributes)
python
def _CompositeFoldByteStream( self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext(values={ type(mapped_value).__name__: mapped_value}) data_attributes = [] for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] member_value = getattr(mapped_value, attribute_name, None) if data_type_map is None or member_value is None: continue member_data = data_type_map.FoldByteStream( member_value, context=subcontext) if member_data is None: return None data_attributes.append(member_data) if context: context.state = {} return b''.join(data_attributes)
[ "def", "_CompositeFoldByteStream", "(", "self", ",", "mapped_value", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "context_state", "=", "getattr", "(", "context", ",", "'state'", ",", "{", "}", ")", "attribute_index", "=", "context_state", ".", "get", "(", "'attribute_index'", ",", "0", ")", "subcontext", "=", "context_state", ".", "get", "(", "'context'", ",", "None", ")", "if", "not", "subcontext", ":", "subcontext", "=", "DataTypeMapContext", "(", "values", "=", "{", "type", "(", "mapped_value", ")", ".", "__name__", ":", "mapped_value", "}", ")", "data_attributes", "=", "[", "]", "for", "attribute_index", "in", "range", "(", "attribute_index", ",", "self", ".", "_number_of_attributes", ")", ":", "attribute_name", "=", "self", ".", "_attribute_names", "[", "attribute_index", "]", "data_type_map", "=", "self", ".", "_data_type_maps", "[", "attribute_index", "]", "member_value", "=", "getattr", "(", "mapped_value", ",", "attribute_name", ",", "None", ")", "if", "data_type_map", "is", "None", "or", "member_value", "is", "None", ":", "continue", "member_data", "=", "data_type_map", ".", "FoldByteStream", "(", "member_value", ",", "context", "=", "subcontext", ")", "if", "member_data", "is", "None", ":", "return", "None", "data_attributes", ".", "append", "(", "member_data", ")", "if", "context", ":", "context", ".", "state", "=", "{", "}", "return", "b''", ".", "join", "(", "data_attributes", ")" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1538-L1582
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap._CompositeMapByteStream
def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) mapped_values = context_state.get('mapped_values', None) subcontext = context_state.get('context', None) if not mapped_values: mapped_values = self._structure_values_class() if not subcontext: subcontext = DataTypeMapContext(values={ type(mapped_values).__name__: mapped_values}) members_data_size = 0 for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] member_definition = self._data_type_definition.members[attribute_index] condition = getattr(member_definition, 'condition', None) if condition: namespace = dict(subcontext.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: condition_result = eval(condition, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to evaluate condition with error: {0!s}'.format( exception)) if not isinstance(condition_result, bool): raise errors.MappingError( 'Condition does not result in a boolean value') if not condition_result: continue if isinstance(member_definition, data_types.PaddingDefinition): _, byte_size = divmod( members_data_size, member_definition.alignment_size) if byte_size > 0: byte_size = member_definition.alignment_size - byte_size data_type_map.byte_size = byte_size try: value = data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext) setattr(mapped_values, attribute_name, value) except errors.ByteStreamTooSmallError as exception: context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values raise errors.ByteStreamTooSmallError(exception) except Exception as exception: raise errors.MappingError(exception) supported_values = getattr(member_definition, 'values', None) if supported_values and value not in supported_values: raise errors.MappingError( 'Value: {0!s} not in supported values: {1:s}'.format( value, ', '.join([ '{0!s}'.format(value) for value in supported_values]))) byte_offset += subcontext.byte_size members_data_size += subcontext.byte_size if attribute_index != (self._number_of_attributes - 1): context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: missing attribute: {2:d}').format( self._data_type_definition.name, byte_offset, attribute_index) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = members_data_size context.state = {} return mapped_values
python
def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) mapped_values = context_state.get('mapped_values', None) subcontext = context_state.get('context', None) if not mapped_values: mapped_values = self._structure_values_class() if not subcontext: subcontext = DataTypeMapContext(values={ type(mapped_values).__name__: mapped_values}) members_data_size = 0 for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] member_definition = self._data_type_definition.members[attribute_index] condition = getattr(member_definition, 'condition', None) if condition: namespace = dict(subcontext.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: condition_result = eval(condition, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to evaluate condition with error: {0!s}'.format( exception)) if not isinstance(condition_result, bool): raise errors.MappingError( 'Condition does not result in a boolean value') if not condition_result: continue if isinstance(member_definition, data_types.PaddingDefinition): _, byte_size = divmod( members_data_size, member_definition.alignment_size) if byte_size > 0: byte_size = member_definition.alignment_size - byte_size data_type_map.byte_size = byte_size try: value = data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext) setattr(mapped_values, attribute_name, value) except errors.ByteStreamTooSmallError as exception: context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values raise errors.ByteStreamTooSmallError(exception) except Exception as exception: raise errors.MappingError(exception) supported_values = getattr(member_definition, 'values', None) if supported_values and value not in supported_values: raise errors.MappingError( 'Value: {0!s} not in supported values: {1:s}'.format( value, ', '.join([ '{0!s}'.format(value) for value in supported_values]))) byte_offset += subcontext.byte_size members_data_size += subcontext.byte_size if attribute_index != (self._number_of_attributes - 1): context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: missing attribute: {2:d}').format( self._data_type_definition.name, byte_offset, attribute_index) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = members_data_size context.state = {} return mapped_values
[ "def", "_CompositeMapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "context_state", "=", "getattr", "(", "context", ",", "'state'", ",", "{", "}", ")", "attribute_index", "=", "context_state", ".", "get", "(", "'attribute_index'", ",", "0", ")", "mapped_values", "=", "context_state", ".", "get", "(", "'mapped_values'", ",", "None", ")", "subcontext", "=", "context_state", ".", "get", "(", "'context'", ",", "None", ")", "if", "not", "mapped_values", ":", "mapped_values", "=", "self", ".", "_structure_values_class", "(", ")", "if", "not", "subcontext", ":", "subcontext", "=", "DataTypeMapContext", "(", "values", "=", "{", "type", "(", "mapped_values", ")", ".", "__name__", ":", "mapped_values", "}", ")", "members_data_size", "=", "0", "for", "attribute_index", "in", "range", "(", "attribute_index", ",", "self", ".", "_number_of_attributes", ")", ":", "attribute_name", "=", "self", ".", "_attribute_names", "[", "attribute_index", "]", "data_type_map", "=", "self", ".", "_data_type_maps", "[", "attribute_index", "]", "member_definition", "=", "self", ".", "_data_type_definition", ".", "members", "[", "attribute_index", "]", "condition", "=", "getattr", "(", "member_definition", ",", "'condition'", ",", "None", ")", "if", "condition", ":", "namespace", "=", "dict", "(", "subcontext", ".", "values", ")", "# Make sure __builtins__ contains an empty dictionary.", "namespace", "[", "'__builtins__'", "]", "=", "{", "}", "try", ":", "condition_result", "=", "eval", "(", "condition", ",", "namespace", ")", "# pylint: disable=eval-used", "except", "Exception", "as", "exception", ":", "raise", "errors", ".", "MappingError", "(", "'Unable to evaluate condition with error: {0!s}'", ".", "format", "(", "exception", ")", ")", "if", "not", "isinstance", "(", "condition_result", ",", "bool", ")", ":", "raise", "errors", ".", "MappingError", "(", "'Condition does not result in a boolean value'", ")", "if", "not", "condition_result", ":", "continue", "if", "isinstance", "(", "member_definition", ",", "data_types", ".", "PaddingDefinition", ")", ":", "_", ",", "byte_size", "=", "divmod", "(", "members_data_size", ",", "member_definition", ".", "alignment_size", ")", "if", "byte_size", ">", "0", ":", "byte_size", "=", "member_definition", ".", "alignment_size", "-", "byte_size", "data_type_map", ".", "byte_size", "=", "byte_size", "try", ":", "value", "=", "data_type_map", ".", "MapByteStream", "(", "byte_stream", ",", "byte_offset", "=", "byte_offset", ",", "context", "=", "subcontext", ")", "setattr", "(", "mapped_values", ",", "attribute_name", ",", "value", ")", "except", "errors", ".", "ByteStreamTooSmallError", "as", "exception", ":", "context_state", "[", "'attribute_index'", "]", "=", "attribute_index", "context_state", "[", "'context'", "]", "=", "subcontext", "context_state", "[", "'mapped_values'", "]", "=", "mapped_values", "raise", "errors", ".", "ByteStreamTooSmallError", "(", "exception", ")", "except", "Exception", "as", "exception", ":", "raise", "errors", ".", "MappingError", "(", "exception", ")", "supported_values", "=", "getattr", "(", "member_definition", ",", "'values'", ",", "None", ")", "if", "supported_values", "and", "value", "not", "in", "supported_values", ":", "raise", "errors", ".", "MappingError", "(", "'Value: {0!s} not in supported values: {1:s}'", ".", "format", "(", "value", ",", "', '", ".", "join", "(", "[", "'{0!s}'", ".", "format", "(", "value", ")", "for", "value", "in", "supported_values", "]", ")", ")", ")", "byte_offset", "+=", "subcontext", ".", "byte_size", "members_data_size", "+=", "subcontext", ".", "byte_size", "if", "attribute_index", "!=", "(", "self", ".", "_number_of_attributes", "-", "1", ")", ":", "context_state", "[", "'attribute_index'", "]", "=", "attribute_index", "context_state", "[", "'context'", "]", "=", "subcontext", "context_state", "[", "'mapped_values'", "]", "=", "mapped_values", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: missing attribute: {2:d}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ",", "attribute_index", ")", "raise", "errors", ".", "ByteStreamTooSmallError", "(", "error_string", ")", "if", "context", ":", "context", ".", "byte_size", "=", "members_data_size", "context", ".", "state", "=", "{", "}", "return", "mapped_values" ]
Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "a", "sequence", "of", "composite", "data", "types", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1584-L1686
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap._GetAttributeNames
def _GetAttributeNames(self, data_type_definition): """Determines the attribute (or field) names of the members. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: list[str]: attribute names. Raises: FormatError: if the attribute names cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') attribute_names = [] for member_definition in data_type_definition.members: attribute_names.append(member_definition.name) return attribute_names
python
def _GetAttributeNames(self, data_type_definition): """Determines the attribute (or field) names of the members. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: list[str]: attribute names. Raises: FormatError: if the attribute names cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') attribute_names = [] for member_definition in data_type_definition.members: attribute_names.append(member_definition.name) return attribute_names
[ "def", "_GetAttributeNames", "(", "self", ",", "data_type_definition", ")", ":", "if", "not", "data_type_definition", ":", "raise", "errors", ".", "FormatError", "(", "'Missing data type definition'", ")", "attribute_names", "=", "[", "]", "for", "member_definition", "in", "data_type_definition", ".", "members", ":", "attribute_names", ".", "append", "(", "member_definition", ".", "name", ")", "return", "attribute_names" ]
Determines the attribute (or field) names of the members. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: list[str]: attribute names. Raises: FormatError: if the attribute names cannot be determined from the data type definition.
[ "Determines", "the", "attribute", "(", "or", "field", ")", "names", "of", "the", "members", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1688-L1708
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap._GetMemberDataTypeMaps
def _GetMemberDataTypeMaps(self, data_type_definition, data_type_map_cache): """Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. data_type_map_cache (dict[str, DataTypeMap]): cached data type maps. Returns: list[DataTypeMap]: member data type maps. Raises: FormatError: if the data type maps cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') data_type_maps = [] members_data_size = 0 for member_definition in members: if isinstance(member_definition, data_types.MemberDataTypeDefinition): member_definition = member_definition.member_data_type_definition if (data_type_definition.byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order == definitions.BYTE_ORDER_NATIVE): # Make a copy of the data type definition where byte-order can be # safely changed. member_definition = copy.copy(member_definition) member_definition.name = '_{0:s}_{1:s}'.format( data_type_definition.name, member_definition.name) member_definition.byte_order = data_type_definition.byte_order if member_definition.name not in data_type_map_cache: data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( member_definition) data_type_map_cache[member_definition.name] = data_type_map data_type_map = data_type_map_cache[member_definition.name] if members_data_size is not None: if not isinstance(member_definition, data_types.PaddingDefinition): byte_size = member_definition.GetByteSize() else: _, byte_size = divmod( members_data_size, member_definition.alignment_size) if byte_size > 0: byte_size = member_definition.alignment_size - byte_size data_type_map.byte_size = byte_size if byte_size is None: members_data_size = None else: members_data_size += byte_size data_type_maps.append(data_type_map) return data_type_maps
python
def _GetMemberDataTypeMaps(self, data_type_definition, data_type_map_cache): """Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. data_type_map_cache (dict[str, DataTypeMap]): cached data type maps. Returns: list[DataTypeMap]: member data type maps. Raises: FormatError: if the data type maps cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') data_type_maps = [] members_data_size = 0 for member_definition in members: if isinstance(member_definition, data_types.MemberDataTypeDefinition): member_definition = member_definition.member_data_type_definition if (data_type_definition.byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order == definitions.BYTE_ORDER_NATIVE): # Make a copy of the data type definition where byte-order can be # safely changed. member_definition = copy.copy(member_definition) member_definition.name = '_{0:s}_{1:s}'.format( data_type_definition.name, member_definition.name) member_definition.byte_order = data_type_definition.byte_order if member_definition.name not in data_type_map_cache: data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( member_definition) data_type_map_cache[member_definition.name] = data_type_map data_type_map = data_type_map_cache[member_definition.name] if members_data_size is not None: if not isinstance(member_definition, data_types.PaddingDefinition): byte_size = member_definition.GetByteSize() else: _, byte_size = divmod( members_data_size, member_definition.alignment_size) if byte_size > 0: byte_size = member_definition.alignment_size - byte_size data_type_map.byte_size = byte_size if byte_size is None: members_data_size = None else: members_data_size += byte_size data_type_maps.append(data_type_map) return data_type_maps
[ "def", "_GetMemberDataTypeMaps", "(", "self", ",", "data_type_definition", ",", "data_type_map_cache", ")", ":", "if", "not", "data_type_definition", ":", "raise", "errors", ".", "FormatError", "(", "'Missing data type definition'", ")", "members", "=", "getattr", "(", "data_type_definition", ",", "'members'", ",", "None", ")", "if", "not", "members", ":", "raise", "errors", ".", "FormatError", "(", "'Invalid data type definition missing members'", ")", "data_type_maps", "=", "[", "]", "members_data_size", "=", "0", "for", "member_definition", "in", "members", ":", "if", "isinstance", "(", "member_definition", ",", "data_types", ".", "MemberDataTypeDefinition", ")", ":", "member_definition", "=", "member_definition", ".", "member_data_type_definition", "if", "(", "data_type_definition", ".", "byte_order", "!=", "definitions", ".", "BYTE_ORDER_NATIVE", "and", "member_definition", ".", "byte_order", "==", "definitions", ".", "BYTE_ORDER_NATIVE", ")", ":", "# Make a copy of the data type definition where byte-order can be", "# safely changed.", "member_definition", "=", "copy", ".", "copy", "(", "member_definition", ")", "member_definition", ".", "name", "=", "'_{0:s}_{1:s}'", ".", "format", "(", "data_type_definition", ".", "name", ",", "member_definition", ".", "name", ")", "member_definition", ".", "byte_order", "=", "data_type_definition", ".", "byte_order", "if", "member_definition", ".", "name", "not", "in", "data_type_map_cache", ":", "data_type_map", "=", "DataTypeMapFactory", ".", "CreateDataTypeMapByType", "(", "member_definition", ")", "data_type_map_cache", "[", "member_definition", ".", "name", "]", "=", "data_type_map", "data_type_map", "=", "data_type_map_cache", "[", "member_definition", ".", "name", "]", "if", "members_data_size", "is", "not", "None", ":", "if", "not", "isinstance", "(", "member_definition", ",", "data_types", ".", "PaddingDefinition", ")", ":", "byte_size", "=", "member_definition", ".", "GetByteSize", "(", ")", "else", ":", "_", ",", "byte_size", "=", "divmod", "(", "members_data_size", ",", "member_definition", ".", "alignment_size", ")", "if", "byte_size", ">", "0", ":", "byte_size", "=", "member_definition", ".", "alignment_size", "-", "byte_size", "data_type_map", ".", "byte_size", "=", "byte_size", "if", "byte_size", "is", "None", ":", "members_data_size", "=", "None", "else", ":", "members_data_size", "+=", "byte_size", "data_type_maps", ".", "append", "(", "data_type_map", ")", "return", "data_type_maps" ]
Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. data_type_map_cache (dict[str, DataTypeMap]): cached data type maps. Returns: list[DataTypeMap]: member data type maps. Raises: FormatError: if the data type maps cannot be determined from the data type definition.
[ "Retrieves", "the", "member", "data", "type", "maps", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1710-L1771
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap._LinearFoldByteStream
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: attribute_values = [ getattr(mapped_value, attribute_name, None) for attribute_name in self._attribute_names] attribute_values = [ value for value in attribute_values if value is not None] return self._operation.WriteTo(tuple(attribute_values)) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string)
python
def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: attribute_values = [ getattr(mapped_value, attribute_name, None) for attribute_name in self._attribute_names] attribute_values = [ value for value in attribute_values if value is not None] return self._operation.WriteTo(tuple(attribute_values)) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string)
[ "def", "_LinearFoldByteStream", "(", "self", ",", "mapped_value", ",", "*", "*", "unused_kwargs", ")", ":", "try", ":", "attribute_values", "=", "[", "getattr", "(", "mapped_value", ",", "attribute_name", ",", "None", ")", "for", "attribute_name", "in", "self", ".", "_attribute_names", "]", "attribute_values", "=", "[", "value", "for", "value", "in", "attribute_values", "if", "value", "is", "not", "None", "]", "return", "self", ".", "_operation", ".", "WriteTo", "(", "tuple", "(", "attribute_values", ")", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to write: {0:s} to byte stream with error: {1!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "exception", ")", "raise", "errors", ".", "FoldingError", "(", "error_string", ")" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1773-L1798
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap._LinearMapByteStream
def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ members_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, members_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) struct_values = [] for attribute_index, value in enumerate(struct_tuple): data_type_map = self._data_type_maps[attribute_index] member_definition = self._data_type_definition.members[attribute_index] value = data_type_map.MapValue(value) supported_values = getattr(member_definition, 'values', None) if supported_values and value not in supported_values: raise errors.MappingError( 'Value: {0!s} not in supported values: {1:s}'.format( value, ', '.join([ '{0!s}'.format(value) for value in supported_values]))) struct_values.append(value) mapped_value = self._structure_values_class(*struct_values) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = members_data_size return mapped_value
python
def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ members_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, members_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) struct_values = [] for attribute_index, value in enumerate(struct_tuple): data_type_map = self._data_type_maps[attribute_index] member_definition = self._data_type_definition.members[attribute_index] value = data_type_map.MapValue(value) supported_values = getattr(member_definition, 'values', None) if supported_values and value not in supported_values: raise errors.MappingError( 'Value: {0!s} not in supported values: {1:s}'.format( value, ', '.join([ '{0!s}'.format(value) for value in supported_values]))) struct_values.append(value) mapped_value = self._structure_values_class(*struct_values) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = members_data_size return mapped_value
[ "def", "_LinearMapByteStream", "(", "self", ",", "byte_stream", ",", "byte_offset", "=", "0", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "members_data_size", "=", "self", ".", "_data_type_definition", ".", "GetByteSize", "(", ")", "self", ".", "_CheckByteStreamSize", "(", "byte_stream", ",", "byte_offset", ",", "members_data_size", ")", "try", ":", "struct_tuple", "=", "self", ".", "_operation", ".", "ReadFrom", "(", "byte_stream", "[", "byte_offset", ":", "]", ")", "struct_values", "=", "[", "]", "for", "attribute_index", ",", "value", "in", "enumerate", "(", "struct_tuple", ")", ":", "data_type_map", "=", "self", ".", "_data_type_maps", "[", "attribute_index", "]", "member_definition", "=", "self", ".", "_data_type_definition", ".", "members", "[", "attribute_index", "]", "value", "=", "data_type_map", ".", "MapValue", "(", "value", ")", "supported_values", "=", "getattr", "(", "member_definition", ",", "'values'", ",", "None", ")", "if", "supported_values", "and", "value", "not", "in", "supported_values", ":", "raise", "errors", ".", "MappingError", "(", "'Value: {0!s} not in supported values: {1:s}'", ".", "format", "(", "value", ",", "', '", ".", "join", "(", "[", "'{0!s}'", ".", "format", "(", "value", ")", "for", "value", "in", "supported_values", "]", ")", ")", ")", "struct_values", ".", "append", "(", "value", ")", "mapped_value", "=", "self", ".", "_structure_values_class", "(", "*", "struct_values", ")", "except", "Exception", "as", "exception", ":", "error_string", "=", "(", "'Unable to read: {0:s} from byte stream at offset: {1:d} '", "'with error: {2!s}'", ")", ".", "format", "(", "self", ".", "_data_type_definition", ".", "name", ",", "byte_offset", ",", "exception", ")", "raise", "errors", ".", "MappingError", "(", "error_string", ")", "if", "context", ":", "context", ".", "byte_size", "=", "members_data_size", "return", "mapped_value" ]
Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream.
[ "Maps", "a", "data", "type", "sequence", "on", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1800-L1849
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap.GetSizeHint
def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ context_state = getattr(context, 'state', {}) subcontext = context_state.get('context', None) if not subcontext: mapped_values = context_state.get('mapped_values', None) subcontext = DataTypeMapContext(values={ type(mapped_values).__name__: mapped_values}) size_hint = 0 for data_type_map in self._data_type_maps: data_type_size = data_type_map.GetSizeHint(context=subcontext) if data_type_size is None: break size_hint += data_type_size return size_hint
python
def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ context_state = getattr(context, 'state', {}) subcontext = context_state.get('context', None) if not subcontext: mapped_values = context_state.get('mapped_values', None) subcontext = DataTypeMapContext(values={ type(mapped_values).__name__: mapped_values}) size_hint = 0 for data_type_map in self._data_type_maps: data_type_size = data_type_map.GetSizeHint(context=subcontext) if data_type_size is None: break size_hint += data_type_size return size_hint
[ "def", "GetSizeHint", "(", "self", ",", "context", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "context_state", "=", "getattr", "(", "context", ",", "'state'", ",", "{", "}", ")", "subcontext", "=", "context_state", ".", "get", "(", "'context'", ",", "None", ")", "if", "not", "subcontext", ":", "mapped_values", "=", "context_state", ".", "get", "(", "'mapped_values'", ",", "None", ")", "subcontext", "=", "DataTypeMapContext", "(", "values", "=", "{", "type", "(", "mapped_values", ")", ".", "__name__", ":", "mapped_values", "}", ")", "size_hint", "=", "0", "for", "data_type_map", "in", "self", ".", "_data_type_maps", ":", "data_type_size", "=", "data_type_map", ".", "GetSizeHint", "(", "context", "=", "subcontext", ")", "if", "data_type_size", "is", "None", ":", "break", "size_hint", "+=", "data_type_size", "return", "size_hint" ]
Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None.
[ "Retrieves", "a", "hint", "about", "the", "size", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1874-L1900
libyal/dtfabric
dtfabric/runtime/data_maps.py
StructureMap.GetStructFormatString
def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._format_string is None and self._data_type_maps: format_strings = [] for member_data_type_map in self._data_type_maps: if member_data_type_map is None: return None member_format_string = member_data_type_map.GetStructFormatString() if member_format_string is None: return None format_strings.append(member_format_string) self._format_string = ''.join(format_strings) return self._format_string
python
def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._format_string is None and self._data_type_maps: format_strings = [] for member_data_type_map in self._data_type_maps: if member_data_type_map is None: return None member_format_string = member_data_type_map.GetStructFormatString() if member_format_string is None: return None format_strings.append(member_format_string) self._format_string = ''.join(format_strings) return self._format_string
[ "def", "GetStructFormatString", "(", "self", ")", ":", "if", "self", ".", "_format_string", "is", "None", "and", "self", ".", "_data_type_maps", ":", "format_strings", "=", "[", "]", "for", "member_data_type_map", "in", "self", ".", "_data_type_maps", ":", "if", "member_data_type_map", "is", "None", ":", "return", "None", "member_format_string", "=", "member_data_type_map", ".", "GetStructFormatString", "(", ")", "if", "member_format_string", "is", "None", ":", "return", "None", "format_strings", ".", "append", "(", "member_format_string", ")", "self", ".", "_format_string", "=", "''", ".", "join", "(", "format_strings", ")", "return", "self", ".", "_format_string" ]
Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined.
[ "Retrieves", "the", "Python", "struct", "format", "string", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1902-L1923
libyal/dtfabric
dtfabric/runtime/data_maps.py
SemanticDataTypeMap.FoldByteStream
def FoldByteStream(self, mapped_value, **unused_kwargs): # pylint: disable=redundant-returns-doc """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ raise errors.FoldingError( 'Unable to fold {0:s} data type into byte stream'.format( self._data_type_definition.TYPE_INDICATOR))
python
def FoldByteStream(self, mapped_value, **unused_kwargs): # pylint: disable=redundant-returns-doc """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ raise errors.FoldingError( 'Unable to fold {0:s} data type into byte stream'.format( self._data_type_definition.TYPE_INDICATOR))
[ "def", "FoldByteStream", "(", "self", ",", "mapped_value", ",", "*", "*", "unused_kwargs", ")", ":", "# pylint: disable=redundant-returns-doc", "raise", "errors", ".", "FoldingError", "(", "'Unable to fold {0:s} data type into byte stream'", ".", "format", "(", "self", ".", "_data_type_definition", ".", "TYPE_INDICATOR", ")", ")" ]
Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream.
[ "Folds", "the", "data", "type", "into", "a", "byte", "stream", "." ]
train
https://github.com/libyal/dtfabric/blob/0d2b5719fa257f6e5c661a406737ebcf8c8db266/dtfabric/runtime/data_maps.py#L1944-L1959