partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
Filesystem.parent
|
Return parent of *index*.
|
source/riffle/model.py
|
def parent(self, index):
'''Return parent of *index*.'''
if not index.isValid():
return QModelIndex()
item = index.internalPointer()
if not item:
return QModelIndex()
parent = item.parent
if not parent or parent == self.root:
return QModelIndex()
return self.createIndex(parent.row, 0, parent)
|
def parent(self, index):
'''Return parent of *index*.'''
if not index.isValid():
return QModelIndex()
item = index.internalPointer()
if not item:
return QModelIndex()
parent = item.parent
if not parent or parent == self.root:
return QModelIndex()
return self.createIndex(parent.row, 0, parent)
|
[
"Return",
"parent",
"of",
"*",
"index",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L381-L394
|
[
"def",
"parent",
"(",
"self",
",",
"index",
")",
":",
"if",
"not",
"index",
".",
"isValid",
"(",
")",
":",
"return",
"QModelIndex",
"(",
")",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"if",
"not",
"item",
":",
"return",
"QModelIndex",
"(",
")",
"parent",
"=",
"item",
".",
"parent",
"if",
"not",
"parent",
"or",
"parent",
"==",
"self",
".",
"root",
":",
"return",
"QModelIndex",
"(",
")",
"return",
"self",
".",
"createIndex",
"(",
"parent",
".",
"row",
",",
"0",
",",
"parent",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
Filesystem.data
|
Return data for *index* according to *role*.
|
source/riffle/model.py
|
def data(self, index, role):
'''Return data for *index* according to *role*.'''
if not index.isValid():
return None
column = index.column()
item = index.internalPointer()
if role == self.ITEM_ROLE:
return item
elif role == Qt.DisplayRole:
if column == 0:
return item.name
elif column == 1:
if item.size:
return item.size
elif column == 2:
return item.type
elif column == 3:
if item.modified is not None:
return item.modified.strftime('%c')
elif role == Qt.DecorationRole:
if column == 0:
return self.iconFactory.icon(item)
elif role == Qt.TextAlignmentRole:
if column == 1:
return Qt.AlignRight
else:
return Qt.AlignLeft
return None
|
def data(self, index, role):
'''Return data for *index* according to *role*.'''
if not index.isValid():
return None
column = index.column()
item = index.internalPointer()
if role == self.ITEM_ROLE:
return item
elif role == Qt.DisplayRole:
if column == 0:
return item.name
elif column == 1:
if item.size:
return item.size
elif column == 2:
return item.type
elif column == 3:
if item.modified is not None:
return item.modified.strftime('%c')
elif role == Qt.DecorationRole:
if column == 0:
return self.iconFactory.icon(item)
elif role == Qt.TextAlignmentRole:
if column == 1:
return Qt.AlignRight
else:
return Qt.AlignLeft
return None
|
[
"Return",
"data",
"for",
"*",
"index",
"*",
"according",
"to",
"*",
"role",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L404-L438
|
[
"def",
"data",
"(",
"self",
",",
"index",
",",
"role",
")",
":",
"if",
"not",
"index",
".",
"isValid",
"(",
")",
":",
"return",
"None",
"column",
"=",
"index",
".",
"column",
"(",
")",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"if",
"role",
"==",
"self",
".",
"ITEM_ROLE",
":",
"return",
"item",
"elif",
"role",
"==",
"Qt",
".",
"DisplayRole",
":",
"if",
"column",
"==",
"0",
":",
"return",
"item",
".",
"name",
"elif",
"column",
"==",
"1",
":",
"if",
"item",
".",
"size",
":",
"return",
"item",
".",
"size",
"elif",
"column",
"==",
"2",
":",
"return",
"item",
".",
"type",
"elif",
"column",
"==",
"3",
":",
"if",
"item",
".",
"modified",
"is",
"not",
"None",
":",
"return",
"item",
".",
"modified",
".",
"strftime",
"(",
"'%c'",
")",
"elif",
"role",
"==",
"Qt",
".",
"DecorationRole",
":",
"if",
"column",
"==",
"0",
":",
"return",
"self",
".",
"iconFactory",
".",
"icon",
"(",
"item",
")",
"elif",
"role",
"==",
"Qt",
".",
"TextAlignmentRole",
":",
"if",
"column",
"==",
"1",
":",
"return",
"Qt",
".",
"AlignRight",
"else",
":",
"return",
"Qt",
".",
"AlignLeft",
"return",
"None"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
Filesystem.headerData
|
Return label for *section* according to *orientation* and *role*.
|
source/riffle/model.py
|
def headerData(self, section, orientation, role):
'''Return label for *section* according to *orientation* and *role*.'''
if orientation == Qt.Horizontal:
if section < len(self.columns):
column = self.columns[section]
if role == Qt.DisplayRole:
return column
return None
|
def headerData(self, section, orientation, role):
'''Return label for *section* according to *orientation* and *role*.'''
if orientation == Qt.Horizontal:
if section < len(self.columns):
column = self.columns[section]
if role == Qt.DisplayRole:
return column
return None
|
[
"Return",
"label",
"for",
"*",
"section",
"*",
"according",
"to",
"*",
"orientation",
"*",
"and",
"*",
"role",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L440-L448
|
[
"def",
"headerData",
"(",
"self",
",",
"section",
",",
"orientation",
",",
"role",
")",
":",
"if",
"orientation",
"==",
"Qt",
".",
"Horizontal",
":",
"if",
"section",
"<",
"len",
"(",
"self",
".",
"columns",
")",
":",
"column",
"=",
"self",
".",
"columns",
"[",
"section",
"]",
"if",
"role",
"==",
"Qt",
".",
"DisplayRole",
":",
"return",
"column",
"return",
"None"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
Filesystem.hasChildren
|
Return if *index* has children.
Optimised to avoid loading children at this stage.
|
source/riffle/model.py
|
def hasChildren(self, index):
'''Return if *index* has children.
Optimised to avoid loading children at this stage.
'''
if not index.isValid():
item = self.root
else:
item = index.internalPointer()
if not item:
return False
return item.mayHaveChildren()
|
def hasChildren(self, index):
'''Return if *index* has children.
Optimised to avoid loading children at this stage.
'''
if not index.isValid():
item = self.root
else:
item = index.internalPointer()
if not item:
return False
return item.mayHaveChildren()
|
[
"Return",
"if",
"*",
"index",
"*",
"has",
"children",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L450-L463
|
[
"def",
"hasChildren",
"(",
"self",
",",
"index",
")",
":",
"if",
"not",
"index",
".",
"isValid",
"(",
")",
":",
"item",
"=",
"self",
".",
"root",
"else",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"if",
"not",
"item",
":",
"return",
"False",
"return",
"item",
".",
"mayHaveChildren",
"(",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
Filesystem.canFetchMore
|
Return if more data available for *index*.
|
source/riffle/model.py
|
def canFetchMore(self, index):
'''Return if more data available for *index*.'''
if not index.isValid():
item = self.root
else:
item = index.internalPointer()
return item.canFetchMore()
|
def canFetchMore(self, index):
'''Return if more data available for *index*.'''
if not index.isValid():
item = self.root
else:
item = index.internalPointer()
return item.canFetchMore()
|
[
"Return",
"if",
"more",
"data",
"available",
"for",
"*",
"index",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L465-L472
|
[
"def",
"canFetchMore",
"(",
"self",
",",
"index",
")",
":",
"if",
"not",
"index",
".",
"isValid",
"(",
")",
":",
"item",
"=",
"self",
".",
"root",
"else",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"return",
"item",
".",
"canFetchMore",
"(",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
Filesystem.fetchMore
|
Fetch additional data under *index*.
|
source/riffle/model.py
|
def fetchMore(self, index):
'''Fetch additional data under *index*.'''
if not index.isValid():
item = self.root
else:
item = index.internalPointer()
if item.canFetchMore():
startIndex = len(item.children)
additionalChildren = item.fetchChildren()
endIndex = startIndex + len(additionalChildren) - 1
if endIndex >= startIndex:
self.beginInsertRows(index, startIndex, endIndex)
for newChild in additionalChildren:
item.addChild(newChild)
self.endInsertRows()
|
def fetchMore(self, index):
'''Fetch additional data under *index*.'''
if not index.isValid():
item = self.root
else:
item = index.internalPointer()
if item.canFetchMore():
startIndex = len(item.children)
additionalChildren = item.fetchChildren()
endIndex = startIndex + len(additionalChildren) - 1
if endIndex >= startIndex:
self.beginInsertRows(index, startIndex, endIndex)
for newChild in additionalChildren:
item.addChild(newChild)
self.endInsertRows()
|
[
"Fetch",
"additional",
"data",
"under",
"*",
"index",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L474-L489
|
[
"def",
"fetchMore",
"(",
"self",
",",
"index",
")",
":",
"if",
"not",
"index",
".",
"isValid",
"(",
")",
":",
"item",
"=",
"self",
".",
"root",
"else",
":",
"item",
"=",
"index",
".",
"internalPointer",
"(",
")",
"if",
"item",
".",
"canFetchMore",
"(",
")",
":",
"startIndex",
"=",
"len",
"(",
"item",
".",
"children",
")",
"additionalChildren",
"=",
"item",
".",
"fetchChildren",
"(",
")",
"endIndex",
"=",
"startIndex",
"+",
"len",
"(",
"additionalChildren",
")",
"-",
"1",
"if",
"endIndex",
">=",
"startIndex",
":",
"self",
".",
"beginInsertRows",
"(",
"index",
",",
"startIndex",
",",
"endIndex",
")",
"for",
"newChild",
"in",
"additionalChildren",
":",
"item",
".",
"addChild",
"(",
"newChild",
")",
"self",
".",
"endInsertRows",
"(",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
FilesystemSortProxy.lessThan
|
Return ordering of *left* vs *right*.
|
source/riffle/model.py
|
def lessThan(self, left, right):
'''Return ordering of *left* vs *right*.'''
sourceModel = self.sourceModel()
if sourceModel:
leftItem = sourceModel.item(left)
rightItem = sourceModel.item(right)
if (isinstance(leftItem, Directory)
and not isinstance(rightItem, Directory)):
return self.sortOrder() == Qt.AscendingOrder
elif (not isinstance(leftItem, Directory)
and isinstance(rightItem, Directory)):
return self.sortOrder() == Qt.DescendingOrder
return super(FilesystemSortProxy, self).lessThan(left, right)
|
def lessThan(self, left, right):
'''Return ordering of *left* vs *right*.'''
sourceModel = self.sourceModel()
if sourceModel:
leftItem = sourceModel.item(left)
rightItem = sourceModel.item(right)
if (isinstance(leftItem, Directory)
and not isinstance(rightItem, Directory)):
return self.sortOrder() == Qt.AscendingOrder
elif (not isinstance(leftItem, Directory)
and isinstance(rightItem, Directory)):
return self.sortOrder() == Qt.DescendingOrder
return super(FilesystemSortProxy, self).lessThan(left, right)
|
[
"Return",
"ordering",
"of",
"*",
"left",
"*",
"vs",
"*",
"right",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L501-L516
|
[
"def",
"lessThan",
"(",
"self",
",",
"left",
",",
"right",
")",
":",
"sourceModel",
"=",
"self",
".",
"sourceModel",
"(",
")",
"if",
"sourceModel",
":",
"leftItem",
"=",
"sourceModel",
".",
"item",
"(",
"left",
")",
"rightItem",
"=",
"sourceModel",
".",
"item",
"(",
"right",
")",
"if",
"(",
"isinstance",
"(",
"leftItem",
",",
"Directory",
")",
"and",
"not",
"isinstance",
"(",
"rightItem",
",",
"Directory",
")",
")",
":",
"return",
"self",
".",
"sortOrder",
"(",
")",
"==",
"Qt",
".",
"AscendingOrder",
"elif",
"(",
"not",
"isinstance",
"(",
"leftItem",
",",
"Directory",
")",
"and",
"isinstance",
"(",
"rightItem",
",",
"Directory",
")",
")",
":",
"return",
"self",
".",
"sortOrder",
"(",
")",
"==",
"Qt",
".",
"DescendingOrder",
"return",
"super",
"(",
"FilesystemSortProxy",
",",
"self",
")",
".",
"lessThan",
"(",
"left",
",",
"right",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
FilesystemSortProxy.pathIndex
|
Return index of item with *path*.
|
source/riffle/model.py
|
def pathIndex(self, path):
'''Return index of item with *path*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return QModelIndex()
return self.mapFromSource(sourceModel.pathIndex(path))
|
def pathIndex(self, path):
'''Return index of item with *path*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return QModelIndex()
return self.mapFromSource(sourceModel.pathIndex(path))
|
[
"Return",
"index",
"of",
"item",
"with",
"*",
"path",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L536-L542
|
[
"def",
"pathIndex",
"(",
"self",
",",
"path",
")",
":",
"sourceModel",
"=",
"self",
".",
"sourceModel",
"(",
")",
"if",
"not",
"sourceModel",
":",
"return",
"QModelIndex",
"(",
")",
"return",
"self",
".",
"mapFromSource",
"(",
"sourceModel",
".",
"pathIndex",
"(",
"path",
")",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
FilesystemSortProxy.item
|
Return item at *index*.
|
source/riffle/model.py
|
def item(self, index):
'''Return item at *index*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return None
return sourceModel.item(self.mapToSource(index))
|
def item(self, index):
'''Return item at *index*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return None
return sourceModel.item(self.mapToSource(index))
|
[
"Return",
"item",
"at",
"*",
"index",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L544-L551
|
[
"def",
"item",
"(",
"self",
",",
"index",
")",
":",
"sourceModel",
"=",
"self",
".",
"sourceModel",
"(",
")",
"if",
"not",
"sourceModel",
":",
"return",
"None",
"return",
"sourceModel",
".",
"item",
"(",
"self",
".",
"mapToSource",
"(",
"index",
")",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
FilesystemSortProxy.icon
|
Return icon for index.
|
source/riffle/model.py
|
def icon(self, index):
'''Return icon for index.'''
sourceModel = self.sourceModel()
if not sourceModel:
return None
return sourceModel.icon(self.mapToSource(index))
|
def icon(self, index):
'''Return icon for index.'''
sourceModel = self.sourceModel()
if not sourceModel:
return None
return sourceModel.icon(self.mapToSource(index))
|
[
"Return",
"icon",
"for",
"index",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L553-L559
|
[
"def",
"icon",
"(",
"self",
",",
"index",
")",
":",
"sourceModel",
"=",
"self",
".",
"sourceModel",
"(",
")",
"if",
"not",
"sourceModel",
":",
"return",
"None",
"return",
"sourceModel",
".",
"icon",
"(",
"self",
".",
"mapToSource",
"(",
"index",
")",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
FilesystemSortProxy.hasChildren
|
Return if *index* has children.
|
source/riffle/model.py
|
def hasChildren(self, index):
'''Return if *index* has children.'''
sourceModel = self.sourceModel()
if not sourceModel:
return False
return sourceModel.hasChildren(self.mapToSource(index))
|
def hasChildren(self, index):
'''Return if *index* has children.'''
sourceModel = self.sourceModel()
if not sourceModel:
return False
return sourceModel.hasChildren(self.mapToSource(index))
|
[
"Return",
"if",
"*",
"index",
"*",
"has",
"children",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L561-L568
|
[
"def",
"hasChildren",
"(",
"self",
",",
"index",
")",
":",
"sourceModel",
"=",
"self",
".",
"sourceModel",
"(",
")",
"if",
"not",
"sourceModel",
":",
"return",
"False",
"return",
"sourceModel",
".",
"hasChildren",
"(",
"self",
".",
"mapToSource",
"(",
"index",
")",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
FilesystemSortProxy.canFetchMore
|
Return if more data available for *index*.
|
source/riffle/model.py
|
def canFetchMore(self, index):
'''Return if more data available for *index*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return False
return sourceModel.canFetchMore(self.mapToSource(index))
|
def canFetchMore(self, index):
'''Return if more data available for *index*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return False
return sourceModel.canFetchMore(self.mapToSource(index))
|
[
"Return",
"if",
"more",
"data",
"available",
"for",
"*",
"index",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L570-L577
|
[
"def",
"canFetchMore",
"(",
"self",
",",
"index",
")",
":",
"sourceModel",
"=",
"self",
".",
"sourceModel",
"(",
")",
"if",
"not",
"sourceModel",
":",
"return",
"False",
"return",
"sourceModel",
".",
"canFetchMore",
"(",
"self",
".",
"mapToSource",
"(",
"index",
")",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
FilesystemSortProxy.fetchMore
|
Fetch additional data under *index*.
|
source/riffle/model.py
|
def fetchMore(self, index):
'''Fetch additional data under *index*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return False
return sourceModel.fetchMore(self.mapToSource(index))
|
def fetchMore(self, index):
'''Fetch additional data under *index*.'''
sourceModel = self.sourceModel()
if not sourceModel:
return False
return sourceModel.fetchMore(self.mapToSource(index))
|
[
"Fetch",
"additional",
"data",
"under",
"*",
"index",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/model.py#L579-L586
|
[
"def",
"fetchMore",
"(",
"self",
",",
"index",
")",
":",
"sourceModel",
"=",
"self",
".",
"sourceModel",
"(",
")",
"if",
"not",
"sourceModel",
":",
"return",
"False",
"return",
"sourceModel",
".",
"fetchMore",
"(",
"self",
".",
"mapToSource",
"(",
"index",
")",
")"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
IconFactory.icon
|
Return appropriate icon for *specification*.
*specification* should be either:
* An instance of :py:class:`riffle.model.Item`
* One of the defined icon types (:py:class:`IconType`)
|
source/riffle/icon_factory.py
|
def icon(self, specification):
'''Return appropriate icon for *specification*.
*specification* should be either:
* An instance of :py:class:`riffle.model.Item`
* One of the defined icon types (:py:class:`IconType`)
'''
if isinstance(specification, riffle.model.Item):
specification = self.type(specification)
icon = None
if specification == IconType.Computer:
icon = QtGui.QIcon(':riffle/icon/computer')
elif specification == IconType.Mount:
icon = QtGui.QIcon(':riffle/icon/drive')
elif specification == IconType.Directory:
icon = QtGui.QIcon(':riffle/icon/folder')
elif specification == IconType.File:
icon = QtGui.QIcon(':riffle/icon/file')
elif specification == IconType.Collection:
icon = QtGui.QIcon(':riffle/icon/collection')
return icon
|
def icon(self, specification):
'''Return appropriate icon for *specification*.
*specification* should be either:
* An instance of :py:class:`riffle.model.Item`
* One of the defined icon types (:py:class:`IconType`)
'''
if isinstance(specification, riffle.model.Item):
specification = self.type(specification)
icon = None
if specification == IconType.Computer:
icon = QtGui.QIcon(':riffle/icon/computer')
elif specification == IconType.Mount:
icon = QtGui.QIcon(':riffle/icon/drive')
elif specification == IconType.Directory:
icon = QtGui.QIcon(':riffle/icon/folder')
elif specification == IconType.File:
icon = QtGui.QIcon(':riffle/icon/file')
elif specification == IconType.Collection:
icon = QtGui.QIcon(':riffle/icon/collection')
return icon
|
[
"Return",
"appropriate",
"icon",
"for",
"*",
"specification",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/icon_factory.py#L25-L54
|
[
"def",
"icon",
"(",
"self",
",",
"specification",
")",
":",
"if",
"isinstance",
"(",
"specification",
",",
"riffle",
".",
"model",
".",
"Item",
")",
":",
"specification",
"=",
"self",
".",
"type",
"(",
"specification",
")",
"icon",
"=",
"None",
"if",
"specification",
"==",
"IconType",
".",
"Computer",
":",
"icon",
"=",
"QtGui",
".",
"QIcon",
"(",
"':riffle/icon/computer'",
")",
"elif",
"specification",
"==",
"IconType",
".",
"Mount",
":",
"icon",
"=",
"QtGui",
".",
"QIcon",
"(",
"':riffle/icon/drive'",
")",
"elif",
"specification",
"==",
"IconType",
".",
"Directory",
":",
"icon",
"=",
"QtGui",
".",
"QIcon",
"(",
"':riffle/icon/folder'",
")",
"elif",
"specification",
"==",
"IconType",
".",
"File",
":",
"icon",
"=",
"QtGui",
".",
"QIcon",
"(",
"':riffle/icon/file'",
")",
"elif",
"specification",
"==",
"IconType",
".",
"Collection",
":",
"icon",
"=",
"QtGui",
".",
"QIcon",
"(",
"':riffle/icon/collection'",
")",
"return",
"icon"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
IconFactory.type
|
Return appropriate icon type for *item*.
|
source/riffle/icon_factory.py
|
def type(self, item):
'''Return appropriate icon type for *item*.'''
iconType = IconType.Unknown
if isinstance(item, riffle.model.Computer):
iconType = IconType.Computer
elif isinstance(item, riffle.model.Mount):
iconType = IconType.Mount
elif isinstance(item, riffle.model.Directory):
iconType = IconType.Directory
elif isinstance(item, riffle.model.File):
iconType = IconType.File
elif isinstance(item, riffle.model.Collection):
iconType = IconType.Collection
return iconType
|
def type(self, item):
'''Return appropriate icon type for *item*.'''
iconType = IconType.Unknown
if isinstance(item, riffle.model.Computer):
iconType = IconType.Computer
elif isinstance(item, riffle.model.Mount):
iconType = IconType.Mount
elif isinstance(item, riffle.model.Directory):
iconType = IconType.Directory
elif isinstance(item, riffle.model.File):
iconType = IconType.File
elif isinstance(item, riffle.model.Collection):
iconType = IconType.Collection
return iconType
|
[
"Return",
"appropriate",
"icon",
"type",
"for",
"*",
"item",
"*",
"."
] |
4degrees/riffle
|
python
|
https://github.com/4degrees/riffle/blob/e5a0d908df8c93ff1ee7abdda8875fd1667df53d/source/riffle/icon_factory.py#L56-L75
|
[
"def",
"type",
"(",
"self",
",",
"item",
")",
":",
"iconType",
"=",
"IconType",
".",
"Unknown",
"if",
"isinstance",
"(",
"item",
",",
"riffle",
".",
"model",
".",
"Computer",
")",
":",
"iconType",
"=",
"IconType",
".",
"Computer",
"elif",
"isinstance",
"(",
"item",
",",
"riffle",
".",
"model",
".",
"Mount",
")",
":",
"iconType",
"=",
"IconType",
".",
"Mount",
"elif",
"isinstance",
"(",
"item",
",",
"riffle",
".",
"model",
".",
"Directory",
")",
":",
"iconType",
"=",
"IconType",
".",
"Directory",
"elif",
"isinstance",
"(",
"item",
",",
"riffle",
".",
"model",
".",
"File",
")",
":",
"iconType",
"=",
"IconType",
".",
"File",
"elif",
"isinstance",
"(",
"item",
",",
"riffle",
".",
"model",
".",
"Collection",
")",
":",
"iconType",
"=",
"IconType",
".",
"Collection",
"return",
"iconType"
] |
e5a0d908df8c93ff1ee7abdda8875fd1667df53d
|
test
|
call
|
Run an external command in a separate process and detach it from the current process. Excepting
`stdout`, `stderr`, and `stdin` all file descriptors are closed after forking. If `daemonize`
is True then the parent process exits. All stdio is redirected to `os.devnull` unless
specified. The `preexec_fn`, `shell`, `cwd`, and `env` parameters are the same as their `Popen`
counterparts. Return the PID of the child process if not daemonized.
|
detach.py
|
def call(args, stdout=None, stderr=None, stdin=None, daemonize=False,
preexec_fn=None, shell=False, cwd=None, env=None):
"""
Run an external command in a separate process and detach it from the current process. Excepting
`stdout`, `stderr`, and `stdin` all file descriptors are closed after forking. If `daemonize`
is True then the parent process exits. All stdio is redirected to `os.devnull` unless
specified. The `preexec_fn`, `shell`, `cwd`, and `env` parameters are the same as their `Popen`
counterparts. Return the PID of the child process if not daemonized.
"""
stream = lambda s, m: s is None and os.open(os.devnull, m) or s
stdout = stream(stdout, os.O_WRONLY)
stderr = stream(stderr, os.O_WRONLY)
stdin = stream(stdin, os.O_RDONLY)
shared_pid = Value('i', 0)
pid = os.fork()
if pid > 0:
os.waitpid(pid, 0)
child_pid = shared_pid.value
del shared_pid
if daemonize:
sys.exit(0)
return child_pid
else:
os.setsid()
proc = subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, close_fds=True,
preexec_fn=preexec_fn, shell=shell, cwd=cwd, env=env)
shared_pid.value = proc.pid
os._exit(0)
|
def call(args, stdout=None, stderr=None, stdin=None, daemonize=False,
preexec_fn=None, shell=False, cwd=None, env=None):
"""
Run an external command in a separate process and detach it from the current process. Excepting
`stdout`, `stderr`, and `stdin` all file descriptors are closed after forking. If `daemonize`
is True then the parent process exits. All stdio is redirected to `os.devnull` unless
specified. The `preexec_fn`, `shell`, `cwd`, and `env` parameters are the same as their `Popen`
counterparts. Return the PID of the child process if not daemonized.
"""
stream = lambda s, m: s is None and os.open(os.devnull, m) or s
stdout = stream(stdout, os.O_WRONLY)
stderr = stream(stderr, os.O_WRONLY)
stdin = stream(stdin, os.O_RDONLY)
shared_pid = Value('i', 0)
pid = os.fork()
if pid > 0:
os.waitpid(pid, 0)
child_pid = shared_pid.value
del shared_pid
if daemonize:
sys.exit(0)
return child_pid
else:
os.setsid()
proc = subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, close_fds=True,
preexec_fn=preexec_fn, shell=shell, cwd=cwd, env=env)
shared_pid.value = proc.pid
os._exit(0)
|
[
"Run",
"an",
"external",
"command",
"in",
"a",
"separate",
"process",
"and",
"detach",
"it",
"from",
"the",
"current",
"process",
".",
"Excepting",
"stdout",
"stderr",
"and",
"stdin",
"all",
"file",
"descriptors",
"are",
"closed",
"after",
"forking",
".",
"If",
"daemonize",
"is",
"True",
"then",
"the",
"parent",
"process",
"exits",
".",
"All",
"stdio",
"is",
"redirected",
"to",
"os",
".",
"devnull",
"unless",
"specified",
".",
"The",
"preexec_fn",
"shell",
"cwd",
"and",
"env",
"parameters",
"are",
"the",
"same",
"as",
"their",
"Popen",
"counterparts",
".",
"Return",
"the",
"PID",
"of",
"the",
"child",
"process",
"if",
"not",
"daemonized",
"."
] |
BlueDragonX/detach
|
python
|
https://github.com/BlueDragonX/detach/blob/e2e5a1076e19f508baf3ffb2b586a75934fbae28/detach.py#L107-L135
|
[
"def",
"call",
"(",
"args",
",",
"stdout",
"=",
"None",
",",
"stderr",
"=",
"None",
",",
"stdin",
"=",
"None",
",",
"daemonize",
"=",
"False",
",",
"preexec_fn",
"=",
"None",
",",
"shell",
"=",
"False",
",",
"cwd",
"=",
"None",
",",
"env",
"=",
"None",
")",
":",
"stream",
"=",
"lambda",
"s",
",",
"m",
":",
"s",
"is",
"None",
"and",
"os",
".",
"open",
"(",
"os",
".",
"devnull",
",",
"m",
")",
"or",
"s",
"stdout",
"=",
"stream",
"(",
"stdout",
",",
"os",
".",
"O_WRONLY",
")",
"stderr",
"=",
"stream",
"(",
"stderr",
",",
"os",
".",
"O_WRONLY",
")",
"stdin",
"=",
"stream",
"(",
"stdin",
",",
"os",
".",
"O_RDONLY",
")",
"shared_pid",
"=",
"Value",
"(",
"'i'",
",",
"0",
")",
"pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"pid",
">",
"0",
":",
"os",
".",
"waitpid",
"(",
"pid",
",",
"0",
")",
"child_pid",
"=",
"shared_pid",
".",
"value",
"del",
"shared_pid",
"if",
"daemonize",
":",
"sys",
".",
"exit",
"(",
"0",
")",
"return",
"child_pid",
"else",
":",
"os",
".",
"setsid",
"(",
")",
"proc",
"=",
"subprocess",
".",
"Popen",
"(",
"args",
",",
"stdout",
"=",
"stdout",
",",
"stderr",
"=",
"stderr",
",",
"stdin",
"=",
"stdin",
",",
"close_fds",
"=",
"True",
",",
"preexec_fn",
"=",
"preexec_fn",
",",
"shell",
"=",
"shell",
",",
"cwd",
"=",
"cwd",
",",
"env",
"=",
"env",
")",
"shared_pid",
".",
"value",
"=",
"proc",
".",
"pid",
"os",
".",
"_exit",
"(",
"0",
")"
] |
e2e5a1076e19f508baf3ffb2b586a75934fbae28
|
test
|
Detach._get_max_fd
|
Return the maximum file descriptor value.
|
detach.py
|
def _get_max_fd(self):
"""Return the maximum file descriptor value."""
limits = resource.getrlimit(resource.RLIMIT_NOFILE)
result = limits[1]
if result == resource.RLIM_INFINITY:
result = maxfd
return result
|
def _get_max_fd(self):
"""Return the maximum file descriptor value."""
limits = resource.getrlimit(resource.RLIMIT_NOFILE)
result = limits[1]
if result == resource.RLIM_INFINITY:
result = maxfd
return result
|
[
"Return",
"the",
"maximum",
"file",
"descriptor",
"value",
"."
] |
BlueDragonX/detach
|
python
|
https://github.com/BlueDragonX/detach/blob/e2e5a1076e19f508baf3ffb2b586a75934fbae28/detach.py#L42-L48
|
[
"def",
"_get_max_fd",
"(",
"self",
")",
":",
"limits",
"=",
"resource",
".",
"getrlimit",
"(",
"resource",
".",
"RLIMIT_NOFILE",
")",
"result",
"=",
"limits",
"[",
"1",
"]",
"if",
"result",
"==",
"resource",
".",
"RLIM_INFINITY",
":",
"result",
"=",
"maxfd",
"return",
"result"
] |
e2e5a1076e19f508baf3ffb2b586a75934fbae28
|
test
|
Detach._close_fd
|
Close a file descriptor if it is open.
|
detach.py
|
def _close_fd(self, fd):
"""Close a file descriptor if it is open."""
try:
os.close(fd)
except OSError, exc:
if exc.errno != errno.EBADF:
msg = "Failed to close file descriptor {}: {}".format(fd, exc)
raise Error(msg)
|
def _close_fd(self, fd):
"""Close a file descriptor if it is open."""
try:
os.close(fd)
except OSError, exc:
if exc.errno != errno.EBADF:
msg = "Failed to close file descriptor {}: {}".format(fd, exc)
raise Error(msg)
|
[
"Close",
"a",
"file",
"descriptor",
"if",
"it",
"is",
"open",
"."
] |
BlueDragonX/detach
|
python
|
https://github.com/BlueDragonX/detach/blob/e2e5a1076e19f508baf3ffb2b586a75934fbae28/detach.py#L50-L57
|
[
"def",
"_close_fd",
"(",
"self",
",",
"fd",
")",
":",
"try",
":",
"os",
".",
"close",
"(",
"fd",
")",
"except",
"OSError",
",",
"exc",
":",
"if",
"exc",
".",
"errno",
"!=",
"errno",
".",
"EBADF",
":",
"msg",
"=",
"\"Failed to close file descriptor {}: {}\"",
".",
"format",
"(",
"fd",
",",
"exc",
")",
"raise",
"Error",
"(",
"msg",
")"
] |
e2e5a1076e19f508baf3ffb2b586a75934fbae28
|
test
|
Detach._close_open_fds
|
Close open file descriptors.
|
detach.py
|
def _close_open_fds(self):
"""Close open file descriptors."""
maxfd = self._get_max_fd()
for fd in reversed(range(maxfd)):
if fd not in self.exclude_fds:
self._close_fd(fd)
|
def _close_open_fds(self):
"""Close open file descriptors."""
maxfd = self._get_max_fd()
for fd in reversed(range(maxfd)):
if fd not in self.exclude_fds:
self._close_fd(fd)
|
[
"Close",
"open",
"file",
"descriptors",
"."
] |
BlueDragonX/detach
|
python
|
https://github.com/BlueDragonX/detach/blob/e2e5a1076e19f508baf3ffb2b586a75934fbae28/detach.py#L59-L64
|
[
"def",
"_close_open_fds",
"(",
"self",
")",
":",
"maxfd",
"=",
"self",
".",
"_get_max_fd",
"(",
")",
"for",
"fd",
"in",
"reversed",
"(",
"range",
"(",
"maxfd",
")",
")",
":",
"if",
"fd",
"not",
"in",
"self",
".",
"exclude_fds",
":",
"self",
".",
"_close_fd",
"(",
"fd",
")"
] |
e2e5a1076e19f508baf3ffb2b586a75934fbae28
|
test
|
Detach._redirect
|
Redirect a system stream to the provided target.
|
detach.py
|
def _redirect(self, stream, target):
"""Redirect a system stream to the provided target."""
if target is None:
target_fd = os.open(os.devnull, os.O_RDWR)
else:
target_fd = target.fileno()
os.dup2(target_fd, stream.fileno())
|
def _redirect(self, stream, target):
"""Redirect a system stream to the provided target."""
if target is None:
target_fd = os.open(os.devnull, os.O_RDWR)
else:
target_fd = target.fileno()
os.dup2(target_fd, stream.fileno())
|
[
"Redirect",
"a",
"system",
"stream",
"to",
"the",
"provided",
"target",
"."
] |
BlueDragonX/detach
|
python
|
https://github.com/BlueDragonX/detach/blob/e2e5a1076e19f508baf3ffb2b586a75934fbae28/detach.py#L66-L72
|
[
"def",
"_redirect",
"(",
"self",
",",
"stream",
",",
"target",
")",
":",
"if",
"target",
"is",
"None",
":",
"target_fd",
"=",
"os",
".",
"open",
"(",
"os",
".",
"devnull",
",",
"os",
".",
"O_RDWR",
")",
"else",
":",
"target_fd",
"=",
"target",
".",
"fileno",
"(",
")",
"os",
".",
"dup2",
"(",
"target_fd",
",",
"stream",
".",
"fileno",
"(",
")",
")"
] |
e2e5a1076e19f508baf3ffb2b586a75934fbae28
|
test
|
set_form_widgets_attrs
|
Applies a given HTML attributes to each field widget of a given form.
Example:
set_form_widgets_attrs(my_form, {'class': 'clickable'})
|
etc/toolbox.py
|
def set_form_widgets_attrs(form, attrs):
"""Applies a given HTML attributes to each field widget of a given form.
Example:
set_form_widgets_attrs(my_form, {'class': 'clickable'})
"""
for _, field in form.fields.items():
attrs_ = dict(attrs)
for name, val in attrs.items():
if hasattr(val, '__call__'):
attrs_[name] = val(field)
field.widget.attrs = field.widget.build_attrs(attrs_)
|
def set_form_widgets_attrs(form, attrs):
"""Applies a given HTML attributes to each field widget of a given form.
Example:
set_form_widgets_attrs(my_form, {'class': 'clickable'})
"""
for _, field in form.fields.items():
attrs_ = dict(attrs)
for name, val in attrs.items():
if hasattr(val, '__call__'):
attrs_[name] = val(field)
field.widget.attrs = field.widget.build_attrs(attrs_)
|
[
"Applies",
"a",
"given",
"HTML",
"attributes",
"to",
"each",
"field",
"widget",
"of",
"a",
"given",
"form",
"."
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/toolbox.py#L74-L87
|
[
"def",
"set_form_widgets_attrs",
"(",
"form",
",",
"attrs",
")",
":",
"for",
"_",
",",
"field",
"in",
"form",
".",
"fields",
".",
"items",
"(",
")",
":",
"attrs_",
"=",
"dict",
"(",
"attrs",
")",
"for",
"name",
",",
"val",
"in",
"attrs",
".",
"items",
"(",
")",
":",
"if",
"hasattr",
"(",
"val",
",",
"'__call__'",
")",
":",
"attrs_",
"[",
"name",
"]",
"=",
"val",
"(",
"field",
")",
"field",
".",
"widget",
".",
"attrs",
"=",
"field",
".",
"widget",
".",
"build_attrs",
"(",
"attrs_",
")"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
get_model_class_from_string
|
Returns a certain model as defined in a string formatted `<app_name>.<model_name>`.
Example:
model = get_model_class_from_string('myapp.MyModel')
|
etc/toolbox.py
|
def get_model_class_from_string(model_path):
"""Returns a certain model as defined in a string formatted `<app_name>.<model_name>`.
Example:
model = get_model_class_from_string('myapp.MyModel')
"""
try:
app_name, model_name = model_path.split('.')
except ValueError:
raise ImproperlyConfigured('`%s` must have the following format: `app_name.model_name`.' % model_path)
if apps_get_model is None:
model = get_model(app_name, model_name)
else:
try:
model = apps_get_model(app_name, model_name)
except (LookupError, ValueError):
model = None
if model is None:
raise ImproperlyConfigured('`%s` refers to a model `%s` that has not been installed.' % (model_path, model_name))
return model
|
def get_model_class_from_string(model_path):
"""Returns a certain model as defined in a string formatted `<app_name>.<model_name>`.
Example:
model = get_model_class_from_string('myapp.MyModel')
"""
try:
app_name, model_name = model_path.split('.')
except ValueError:
raise ImproperlyConfigured('`%s` must have the following format: `app_name.model_name`.' % model_path)
if apps_get_model is None:
model = get_model(app_name, model_name)
else:
try:
model = apps_get_model(app_name, model_name)
except (LookupError, ValueError):
model = None
if model is None:
raise ImproperlyConfigured('`%s` refers to a model `%s` that has not been installed.' % (model_path, model_name))
return model
|
[
"Returns",
"a",
"certain",
"model",
"as",
"defined",
"in",
"a",
"string",
"formatted",
"<app_name",
">",
".",
"<model_name",
">",
"."
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/toolbox.py#L90-L114
|
[
"def",
"get_model_class_from_string",
"(",
"model_path",
")",
":",
"try",
":",
"app_name",
",",
"model_name",
"=",
"model_path",
".",
"split",
"(",
"'.'",
")",
"except",
"ValueError",
":",
"raise",
"ImproperlyConfigured",
"(",
"'`%s` must have the following format: `app_name.model_name`.'",
"%",
"model_path",
")",
"if",
"apps_get_model",
"is",
"None",
":",
"model",
"=",
"get_model",
"(",
"app_name",
",",
"model_name",
")",
"else",
":",
"try",
":",
"model",
"=",
"apps_get_model",
"(",
"app_name",
",",
"model_name",
")",
"except",
"(",
"LookupError",
",",
"ValueError",
")",
":",
"model",
"=",
"None",
"if",
"model",
"is",
"None",
":",
"raise",
"ImproperlyConfigured",
"(",
"'`%s` refers to a model `%s` that has not been installed.'",
"%",
"(",
"model_path",
",",
"model_name",
")",
")",
"return",
"model"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
get_site_url
|
Tries to get a site URL from environment and settings
in the following order:
1. (SITE_PROTO / SITE_SCHEME) + SITE_DOMAIN
2. SITE_URL
3. Django Sites contrib
4. Request object
:param HttpRequest request: Request object to deduce URL from.
:rtype: str
|
etc/toolbox.py
|
def get_site_url(request=None):
"""Tries to get a site URL from environment and settings
in the following order:
1. (SITE_PROTO / SITE_SCHEME) + SITE_DOMAIN
2. SITE_URL
3. Django Sites contrib
4. Request object
:param HttpRequest request: Request object to deduce URL from.
:rtype: str
"""
env = partial(environ.get)
settings_ = partial(getattr, settings)
domain = None
scheme = None
url = None
for src in (env, settings_):
if url is None:
url = src('SITE_URL', None)
if domain is None:
domain = src('SITE_DOMAIN', None)
if scheme is None:
scheme = src('SITE_PROTO', src('SITE_SCHEME', None))
if domain is None and url is not None:
scheme, domain = url.split('://')[:2]
if domain is None:
site = get_current_site(request or DomainGetter(domain))
domain = site.domain
if scheme is None and request:
scheme = request.scheme
if domain is None:
domain = 'undefined-domain.local'
if scheme is None:
scheme = 'http'
domain = domain.rstrip('/')
return '%s://%s' % (scheme, domain)
|
def get_site_url(request=None):
"""Tries to get a site URL from environment and settings
in the following order:
1. (SITE_PROTO / SITE_SCHEME) + SITE_DOMAIN
2. SITE_URL
3. Django Sites contrib
4. Request object
:param HttpRequest request: Request object to deduce URL from.
:rtype: str
"""
env = partial(environ.get)
settings_ = partial(getattr, settings)
domain = None
scheme = None
url = None
for src in (env, settings_):
if url is None:
url = src('SITE_URL', None)
if domain is None:
domain = src('SITE_DOMAIN', None)
if scheme is None:
scheme = src('SITE_PROTO', src('SITE_SCHEME', None))
if domain is None and url is not None:
scheme, domain = url.split('://')[:2]
if domain is None:
site = get_current_site(request or DomainGetter(domain))
domain = site.domain
if scheme is None and request:
scheme = request.scheme
if domain is None:
domain = 'undefined-domain.local'
if scheme is None:
scheme = 'http'
domain = domain.rstrip('/')
return '%s://%s' % (scheme, domain)
|
[
"Tries",
"to",
"get",
"a",
"site",
"URL",
"from",
"environment",
"and",
"settings",
"in",
"the",
"following",
"order",
":"
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/toolbox.py#L139-L187
|
[
"def",
"get_site_url",
"(",
"request",
"=",
"None",
")",
":",
"env",
"=",
"partial",
"(",
"environ",
".",
"get",
")",
"settings_",
"=",
"partial",
"(",
"getattr",
",",
"settings",
")",
"domain",
"=",
"None",
"scheme",
"=",
"None",
"url",
"=",
"None",
"for",
"src",
"in",
"(",
"env",
",",
"settings_",
")",
":",
"if",
"url",
"is",
"None",
":",
"url",
"=",
"src",
"(",
"'SITE_URL'",
",",
"None",
")",
"if",
"domain",
"is",
"None",
":",
"domain",
"=",
"src",
"(",
"'SITE_DOMAIN'",
",",
"None",
")",
"if",
"scheme",
"is",
"None",
":",
"scheme",
"=",
"src",
"(",
"'SITE_PROTO'",
",",
"src",
"(",
"'SITE_SCHEME'",
",",
"None",
")",
")",
"if",
"domain",
"is",
"None",
"and",
"url",
"is",
"not",
"None",
":",
"scheme",
",",
"domain",
"=",
"url",
".",
"split",
"(",
"'://'",
")",
"[",
":",
"2",
"]",
"if",
"domain",
"is",
"None",
":",
"site",
"=",
"get_current_site",
"(",
"request",
"or",
"DomainGetter",
"(",
"domain",
")",
")",
"domain",
"=",
"site",
".",
"domain",
"if",
"scheme",
"is",
"None",
"and",
"request",
":",
"scheme",
"=",
"request",
".",
"scheme",
"if",
"domain",
"is",
"None",
":",
"domain",
"=",
"'undefined-domain.local'",
"if",
"scheme",
"is",
"None",
":",
"scheme",
"=",
"'http'",
"domain",
"=",
"domain",
".",
"rstrip",
"(",
"'/'",
")",
"return",
"'%s://%s'",
"%",
"(",
"scheme",
",",
"domain",
")"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
import_app_module
|
Returns a module from a given app by its name.
:param str app_name:
:param str module_name:
:rtype: module or None
|
etc/toolbox.py
|
def import_app_module(app_name, module_name):
"""Returns a module from a given app by its name.
:param str app_name:
:param str module_name:
:rtype: module or None
"""
name_split = app_name.split('.')
if name_split[-1][0].isupper(): # Seems that we have app config class path here.
app_name = '.'.join(name_split[:-2])
module = import_module(app_name)
try:
sub_module = import_module('%s.%s' % (app_name, module_name))
return sub_module
except:
# The same bubbling strategy as in autodiscover_modules().
if module_has_submodule(module, module_name): # Module is in a package.
raise
return None
|
def import_app_module(app_name, module_name):
"""Returns a module from a given app by its name.
:param str app_name:
:param str module_name:
:rtype: module or None
"""
name_split = app_name.split('.')
if name_split[-1][0].isupper(): # Seems that we have app config class path here.
app_name = '.'.join(name_split[:-2])
module = import_module(app_name)
try:
sub_module = import_module('%s.%s' % (app_name, module_name))
return sub_module
except:
# The same bubbling strategy as in autodiscover_modules().
if module_has_submodule(module, module_name): # Module is in a package.
raise
return None
|
[
"Returns",
"a",
"module",
"from",
"a",
"given",
"app",
"by",
"its",
"name",
"."
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/toolbox.py#L190-L214
|
[
"def",
"import_app_module",
"(",
"app_name",
",",
"module_name",
")",
":",
"name_split",
"=",
"app_name",
".",
"split",
"(",
"'.'",
")",
"if",
"name_split",
"[",
"-",
"1",
"]",
"[",
"0",
"]",
".",
"isupper",
"(",
")",
":",
"# Seems that we have app config class path here.",
"app_name",
"=",
"'.'",
".",
"join",
"(",
"name_split",
"[",
":",
"-",
"2",
"]",
")",
"module",
"=",
"import_module",
"(",
"app_name",
")",
"try",
":",
"sub_module",
"=",
"import_module",
"(",
"'%s.%s'",
"%",
"(",
"app_name",
",",
"module_name",
")",
")",
"return",
"sub_module",
"except",
":",
"# The same bubbling strategy as in autodiscover_modules().",
"if",
"module_has_submodule",
"(",
"module",
",",
"module_name",
")",
":",
"# Module is in a package.",
"raise",
"return",
"None"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
import_project_modules
|
Imports modules from registered apps using given module name
and returns them as a list.
:param str module_name:
:rtype: list
|
etc/toolbox.py
|
def import_project_modules(module_name):
"""Imports modules from registered apps using given module name
and returns them as a list.
:param str module_name:
:rtype: list
"""
from django.conf import settings
submodules = []
for app in settings.INSTALLED_APPS:
module = import_app_module(app, module_name)
if module is not None:
submodules.append(module)
return submodules
|
def import_project_modules(module_name):
"""Imports modules from registered apps using given module name
and returns them as a list.
:param str module_name:
:rtype: list
"""
from django.conf import settings
submodules = []
for app in settings.INSTALLED_APPS:
module = import_app_module(app, module_name)
if module is not None:
submodules.append(module)
return submodules
|
[
"Imports",
"modules",
"from",
"registered",
"apps",
"using",
"given",
"module",
"name",
"and",
"returns",
"them",
"as",
"a",
"list",
"."
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/toolbox.py#L217-L233
|
[
"def",
"import_project_modules",
"(",
"module_name",
")",
":",
"from",
"django",
".",
"conf",
"import",
"settings",
"submodules",
"=",
"[",
"]",
"for",
"app",
"in",
"settings",
".",
"INSTALLED_APPS",
":",
"module",
"=",
"import_app_module",
"(",
"app",
",",
"module_name",
")",
"if",
"module",
"is",
"not",
"None",
":",
"submodules",
".",
"append",
"(",
"module",
")",
"return",
"submodules"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
include_
|
Similar to built-in ``include`` template tag, but allowing
template variables to be used in template name and a fallback template,
thus making the tag more dynamic.
.. warning:: Requires Django 1.8+
Example:
{% load etc_misc %}
{% include_ "sub_{{ postfix_var }}.html" fallback "default.html" %}
|
etc/templatetags/etc_misc.py
|
def include_(parser, token):
"""Similar to built-in ``include`` template tag, but allowing
template variables to be used in template name and a fallback template,
thus making the tag more dynamic.
.. warning:: Requires Django 1.8+
Example:
{% load etc_misc %}
{% include_ "sub_{{ postfix_var }}.html" fallback "default.html" %}
"""
bits = token.split_contents()
dynamic = False
# We fallback to built-in `include` if a template name contains no variables.
if len(bits) >= 2:
dynamic = '{{' in bits[1]
if dynamic:
fallback = None
bits_new = []
for bit in bits:
if fallback is True:
# This bit is a `fallback` argument.
fallback = bit
continue
if bit == 'fallback':
fallback = True
else:
bits_new.append(bit)
if fallback:
fallback = parser.compile_filter(construct_relative_path_(parser, fallback))
token.contents = ' '.join(bits_new)
token.contents = token.contents.replace('include_', 'include')
include_node = do_include(parser, token)
if dynamic:
# swap simple include with dynamic
include_node = DynamicIncludeNode(
include_node.template,
extra_context=include_node.extra_context,
isolated_context=include_node.isolated_context,
fallback=fallback or None,
)
return include_node
|
def include_(parser, token):
"""Similar to built-in ``include`` template tag, but allowing
template variables to be used in template name and a fallback template,
thus making the tag more dynamic.
.. warning:: Requires Django 1.8+
Example:
{% load etc_misc %}
{% include_ "sub_{{ postfix_var }}.html" fallback "default.html" %}
"""
bits = token.split_contents()
dynamic = False
# We fallback to built-in `include` if a template name contains no variables.
if len(bits) >= 2:
dynamic = '{{' in bits[1]
if dynamic:
fallback = None
bits_new = []
for bit in bits:
if fallback is True:
# This bit is a `fallback` argument.
fallback = bit
continue
if bit == 'fallback':
fallback = True
else:
bits_new.append(bit)
if fallback:
fallback = parser.compile_filter(construct_relative_path_(parser, fallback))
token.contents = ' '.join(bits_new)
token.contents = token.contents.replace('include_', 'include')
include_node = do_include(parser, token)
if dynamic:
# swap simple include with dynamic
include_node = DynamicIncludeNode(
include_node.template,
extra_context=include_node.extra_context,
isolated_context=include_node.isolated_context,
fallback=fallback or None,
)
return include_node
|
[
"Similar",
"to",
"built",
"-",
"in",
"include",
"template",
"tag",
"but",
"allowing",
"template",
"variables",
"to",
"be",
"used",
"in",
"template",
"name",
"and",
"a",
"fallback",
"template",
"thus",
"making",
"the",
"tag",
"more",
"dynamic",
"."
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/templatetags/etc_misc.py#L105-L160
|
[
"def",
"include_",
"(",
"parser",
",",
"token",
")",
":",
"bits",
"=",
"token",
".",
"split_contents",
"(",
")",
"dynamic",
"=",
"False",
"# We fallback to built-in `include` if a template name contains no variables.",
"if",
"len",
"(",
"bits",
")",
">=",
"2",
":",
"dynamic",
"=",
"'{{'",
"in",
"bits",
"[",
"1",
"]",
"if",
"dynamic",
":",
"fallback",
"=",
"None",
"bits_new",
"=",
"[",
"]",
"for",
"bit",
"in",
"bits",
":",
"if",
"fallback",
"is",
"True",
":",
"# This bit is a `fallback` argument.",
"fallback",
"=",
"bit",
"continue",
"if",
"bit",
"==",
"'fallback'",
":",
"fallback",
"=",
"True",
"else",
":",
"bits_new",
".",
"append",
"(",
"bit",
")",
"if",
"fallback",
":",
"fallback",
"=",
"parser",
".",
"compile_filter",
"(",
"construct_relative_path_",
"(",
"parser",
",",
"fallback",
")",
")",
"token",
".",
"contents",
"=",
"' '",
".",
"join",
"(",
"bits_new",
")",
"token",
".",
"contents",
"=",
"token",
".",
"contents",
".",
"replace",
"(",
"'include_'",
",",
"'include'",
")",
"include_node",
"=",
"do_include",
"(",
"parser",
",",
"token",
")",
"if",
"dynamic",
":",
"# swap simple include with dynamic",
"include_node",
"=",
"DynamicIncludeNode",
"(",
"include_node",
".",
"template",
",",
"extra_context",
"=",
"include_node",
".",
"extra_context",
",",
"isolated_context",
"=",
"include_node",
".",
"isolated_context",
",",
"fallback",
"=",
"fallback",
"or",
"None",
",",
")",
"return",
"include_node"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
RepoCollection.repositories
|
Return a list of all repository objects in the repofiles in the repo folder specified
:return:
|
pyum/repo.py
|
def repositories(self):
"""
Return a list of all repository objects in the repofiles in the repo folder specified
:return:
"""
for repo_path in self.path.glob('*.repo'):
for id, repository in self._get_repo_file(repo_path).repositories:
yield id, repository
|
def repositories(self):
"""
Return a list of all repository objects in the repofiles in the repo folder specified
:return:
"""
for repo_path in self.path.glob('*.repo'):
for id, repository in self._get_repo_file(repo_path).repositories:
yield id, repository
|
[
"Return",
"a",
"list",
"of",
"all",
"repository",
"objects",
"in",
"the",
"repofiles",
"in",
"the",
"repo",
"folder",
"specified",
":",
"return",
":"
] |
drewsonne/pyum
|
python
|
https://github.com/drewsonne/pyum/blob/5d2955f86575c9430ab7104211b3d67bd4c0febe/pyum/repo.py#L16-L23
|
[
"def",
"repositories",
"(",
"self",
")",
":",
"for",
"repo_path",
"in",
"self",
".",
"path",
".",
"glob",
"(",
"'*.repo'",
")",
":",
"for",
"id",
",",
"repository",
"in",
"self",
".",
"_get_repo_file",
"(",
"repo_path",
")",
".",
"repositories",
":",
"yield",
"id",
",",
"repository"
] |
5d2955f86575c9430ab7104211b3d67bd4c0febe
|
test
|
RepoCollection._get_repo_file
|
Lazy load RepoFile objects on demand.
:param repo_path:
:return:
|
pyum/repo.py
|
def _get_repo_file(self, repo_path):
"""
Lazy load RepoFile objects on demand.
:param repo_path:
:return:
"""
if repo_path not in self._repo_files:
self._repo_files[repo_path] = RepoFile(repo_path)
return self._repo_files[repo_path]
|
def _get_repo_file(self, repo_path):
"""
Lazy load RepoFile objects on demand.
:param repo_path:
:return:
"""
if repo_path not in self._repo_files:
self._repo_files[repo_path] = RepoFile(repo_path)
return self._repo_files[repo_path]
|
[
"Lazy",
"load",
"RepoFile",
"objects",
"on",
"demand",
".",
":",
"param",
"repo_path",
":",
":",
"return",
":"
] |
drewsonne/pyum
|
python
|
https://github.com/drewsonne/pyum/blob/5d2955f86575c9430ab7104211b3d67bd4c0febe/pyum/repo.py#L25-L33
|
[
"def",
"_get_repo_file",
"(",
"self",
",",
"repo_path",
")",
":",
"if",
"repo_path",
"not",
"in",
"self",
".",
"_repo_files",
":",
"self",
".",
"_repo_files",
"[",
"repo_path",
"]",
"=",
"RepoFile",
"(",
"repo_path",
")",
"return",
"self",
".",
"_repo_files",
"[",
"repo_path",
"]"
] |
5d2955f86575c9430ab7104211b3d67bd4c0febe
|
test
|
Package.from_url
|
Given a URL, return a package
:param url:
:return:
|
pyum/rpm.py
|
def from_url(url):
"""
Given a URL, return a package
:param url:
:return:
"""
package_data = HTTPClient().http_request(url=url, decode=None)
return Package(raw_data=package_data)
|
def from_url(url):
"""
Given a URL, return a package
:param url:
:return:
"""
package_data = HTTPClient().http_request(url=url, decode=None)
return Package(raw_data=package_data)
|
[
"Given",
"a",
"URL",
"return",
"a",
"package",
":",
"param",
"url",
":",
":",
"return",
":"
] |
drewsonne/pyum
|
python
|
https://github.com/drewsonne/pyum/blob/5d2955f86575c9430ab7104211b3d67bd4c0febe/pyum/rpm.py#L101-L108
|
[
"def",
"from_url",
"(",
"url",
")",
":",
"package_data",
"=",
"HTTPClient",
"(",
")",
".",
"http_request",
"(",
"url",
"=",
"url",
",",
"decode",
"=",
"None",
")",
"return",
"Package",
"(",
"raw_data",
"=",
"package_data",
")"
] |
5d2955f86575c9430ab7104211b3d67bd4c0febe
|
test
|
Package.dependencies
|
Read the contents of the rpm itself
:return:
|
pyum/rpm.py
|
def dependencies(self):
"""
Read the contents of the rpm itself
:return:
"""
cpio = self.rpm.gzip_file.read()
content = cpio.read()
return []
|
def dependencies(self):
"""
Read the contents of the rpm itself
:return:
"""
cpio = self.rpm.gzip_file.read()
content = cpio.read()
return []
|
[
"Read",
"the",
"contents",
"of",
"the",
"rpm",
"itself",
":",
"return",
":"
] |
drewsonne/pyum
|
python
|
https://github.com/drewsonne/pyum/blob/5d2955f86575c9430ab7104211b3d67bd4c0febe/pyum/rpm.py#L126-L133
|
[
"def",
"dependencies",
"(",
"self",
")",
":",
"cpio",
"=",
"self",
".",
"rpm",
".",
"gzip_file",
".",
"read",
"(",
")",
"content",
"=",
"cpio",
".",
"read",
"(",
")",
"return",
"[",
"]"
] |
5d2955f86575c9430ab7104211b3d67bd4c0febe
|
test
|
gravatar_get_url
|
Returns Gravatar image URL for a given string or UserModel.
Example:
{% load gravatar %}
{% gravatar_get_url user_model %}
:param UserModel, str obj:
:param int size:
:param str default:
:return:
|
etc/templatetags/gravatar.py
|
def gravatar_get_url(obj, size=65, default='identicon'):
"""Returns Gravatar image URL for a given string or UserModel.
Example:
{% load gravatar %}
{% gravatar_get_url user_model %}
:param UserModel, str obj:
:param int size:
:param str default:
:return:
"""
return get_gravatar_url(obj, size=size, default=default)
|
def gravatar_get_url(obj, size=65, default='identicon'):
"""Returns Gravatar image URL for a given string or UserModel.
Example:
{% load gravatar %}
{% gravatar_get_url user_model %}
:param UserModel, str obj:
:param int size:
:param str default:
:return:
"""
return get_gravatar_url(obj, size=size, default=default)
|
[
"Returns",
"Gravatar",
"image",
"URL",
"for",
"a",
"given",
"string",
"or",
"UserModel",
"."
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/templatetags/gravatar.py#L39-L52
|
[
"def",
"gravatar_get_url",
"(",
"obj",
",",
"size",
"=",
"65",
",",
"default",
"=",
"'identicon'",
")",
":",
"return",
"get_gravatar_url",
"(",
"obj",
",",
"size",
"=",
"size",
",",
"default",
"=",
"default",
")"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
gravatar_get_img
|
Returns Gravatar image HTML tag for a given string or UserModel.
Example:
{% load gravatar %}
{% gravatar_get_img user_model %}
:param UserModel, str obj:
:param int size:
:param str default:
:return:
|
etc/templatetags/gravatar.py
|
def gravatar_get_img(obj, size=65, default='identicon'):
"""Returns Gravatar image HTML tag for a given string or UserModel.
Example:
{% load gravatar %}
{% gravatar_get_img user_model %}
:param UserModel, str obj:
:param int size:
:param str default:
:return:
"""
url = get_gravatar_url(obj, size=size, default=default)
if url:
return safe('<img src="%s" class="gravatar">' % url)
return ''
|
def gravatar_get_img(obj, size=65, default='identicon'):
"""Returns Gravatar image HTML tag for a given string or UserModel.
Example:
{% load gravatar %}
{% gravatar_get_img user_model %}
:param UserModel, str obj:
:param int size:
:param str default:
:return:
"""
url = get_gravatar_url(obj, size=size, default=default)
if url:
return safe('<img src="%s" class="gravatar">' % url)
return ''
|
[
"Returns",
"Gravatar",
"image",
"HTML",
"tag",
"for",
"a",
"given",
"string",
"or",
"UserModel",
"."
] |
idlesign/django-etc
|
python
|
https://github.com/idlesign/django-etc/blob/dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe/etc/templatetags/gravatar.py#L56-L72
|
[
"def",
"gravatar_get_img",
"(",
"obj",
",",
"size",
"=",
"65",
",",
"default",
"=",
"'identicon'",
")",
":",
"url",
"=",
"get_gravatar_url",
"(",
"obj",
",",
"size",
"=",
"size",
",",
"default",
"=",
"default",
")",
"if",
"url",
":",
"return",
"safe",
"(",
"'<img src=\"%s\" class=\"gravatar\">'",
"%",
"url",
")",
"return",
"''"
] |
dbfc7e9dfc4fdfe69547f71ba4921989f9e97dbe
|
test
|
Data.parse
|
Parses an xml_path with the inherited xml parser
:param xml_path:
:return:
|
pyum/repometadata/base.py
|
def parse(cls, xml_path):
"""
Parses an xml_path with the inherited xml parser
:param xml_path:
:return:
"""
parser = etree.XMLParser(target=cls.xml_parse())
return etree.parse(xml_path, parser)
|
def parse(cls, xml_path):
"""
Parses an xml_path with the inherited xml parser
:param xml_path:
:return:
"""
parser = etree.XMLParser(target=cls.xml_parse())
return etree.parse(xml_path, parser)
|
[
"Parses",
"an",
"xml_path",
"with",
"the",
"inherited",
"xml",
"parser",
":",
"param",
"xml_path",
":",
":",
"return",
":"
] |
drewsonne/pyum
|
python
|
https://github.com/drewsonne/pyum/blob/5d2955f86575c9430ab7104211b3d67bd4c0febe/pyum/repometadata/base.py#L20-L27
|
[
"def",
"parse",
"(",
"cls",
",",
"xml_path",
")",
":",
"parser",
"=",
"etree",
".",
"XMLParser",
"(",
"target",
"=",
"cls",
".",
"xml_parse",
"(",
")",
")",
"return",
"etree",
".",
"parse",
"(",
"xml_path",
",",
"parser",
")"
] |
5d2955f86575c9430ab7104211b3d67bd4c0febe
|
test
|
Data.load
|
Load the repo database from the remote source, and then parse it.
:return:
|
pyum/repometadata/base.py
|
def load(self):
"""
Load the repo database from the remote source, and then parse it.
:return:
"""
data = self.http_request(self.location())
self._parse(data)
return self
|
def load(self):
"""
Load the repo database from the remote source, and then parse it.
:return:
"""
data = self.http_request(self.location())
self._parse(data)
return self
|
[
"Load",
"the",
"repo",
"database",
"from",
"the",
"remote",
"source",
"and",
"then",
"parse",
"it",
".",
":",
"return",
":"
] |
drewsonne/pyum
|
python
|
https://github.com/drewsonne/pyum/blob/5d2955f86575c9430ab7104211b3d67bd4c0febe/pyum/repometadata/base.py#L64-L71
|
[
"def",
"load",
"(",
"self",
")",
":",
"data",
"=",
"self",
".",
"http_request",
"(",
"self",
".",
"location",
"(",
")",
")",
"self",
".",
"_parse",
"(",
"data",
")",
"return",
"self"
] |
5d2955f86575c9430ab7104211b3d67bd4c0febe
|
test
|
TaskService.register_task
|
Register a task for a python dict
:param task_def: dict defining gbdx task
|
gbdx_cloud_harness/services/task_service.py
|
def register_task(self, task_def):
'''
Register a task for a python dict
:param task_def: dict defining gbdx task
'''
r = self.session.post(
self.task_url,
data=task_def,
headers={'Content-Type': 'application/json', 'Accept': 'application/json'}
)
task_dict = json.loads(task_def)
if r.status_code == 200:
return r.status_code, 'Task %s registered' % task_dict['name']
else:
return r.status_code, 'Task %s was not registered: %s' % (task_dict['name'], r.text)
|
def register_task(self, task_def):
'''
Register a task for a python dict
:param task_def: dict defining gbdx task
'''
r = self.session.post(
self.task_url,
data=task_def,
headers={'Content-Type': 'application/json', 'Accept': 'application/json'}
)
task_dict = json.loads(task_def)
if r.status_code == 200:
return r.status_code, 'Task %s registered' % task_dict['name']
else:
return r.status_code, 'Task %s was not registered: %s' % (task_dict['name'], r.text)
|
[
"Register",
"a",
"task",
"for",
"a",
"python",
"dict",
":",
"param",
"task_def",
":",
"dict",
"defining",
"gbdx",
"task"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/services/task_service.py#L17-L33
|
[
"def",
"register_task",
"(",
"self",
",",
"task_def",
")",
":",
"r",
"=",
"self",
".",
"session",
".",
"post",
"(",
"self",
".",
"task_url",
",",
"data",
"=",
"task_def",
",",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
",",
"'Accept'",
":",
"'application/json'",
"}",
")",
"task_dict",
"=",
"json",
".",
"loads",
"(",
"task_def",
")",
"if",
"r",
".",
"status_code",
"==",
"200",
":",
"return",
"r",
".",
"status_code",
",",
"'Task %s registered'",
"%",
"task_dict",
"[",
"'name'",
"]",
"else",
":",
"return",
"r",
".",
"status_code",
",",
"'Task %s was not registered: %s'",
"%",
"(",
"task_dict",
"[",
"'name'",
"]",
",",
"r",
".",
"text",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskService.delete_task
|
Delete a task from the platforms regoistry
:param task_name: name of the task to delete
|
gbdx_cloud_harness/services/task_service.py
|
def delete_task(self, task_name):
'''
Delete a task from the platforms regoistry
:param task_name: name of the task to delete
'''
response = self.session.delete('%s/%s' % (self.task_url, task_name))
if response.status_code == 200:
return response.status_code, 'Task %s deleted' % task_name
elif response.status_code == 400:
return response.status_code, None # Task isn't registered.
else:
return response.status_code, 'Task %s was not deleted: %s' % (task_name, response.text)
|
def delete_task(self, task_name):
'''
Delete a task from the platforms regoistry
:param task_name: name of the task to delete
'''
response = self.session.delete('%s/%s' % (self.task_url, task_name))
if response.status_code == 200:
return response.status_code, 'Task %s deleted' % task_name
elif response.status_code == 400:
return response.status_code, None # Task isn't registered.
else:
return response.status_code, 'Task %s was not deleted: %s' % (task_name, response.text)
|
[
"Delete",
"a",
"task",
"from",
"the",
"platforms",
"regoistry",
":",
"param",
"task_name",
":",
"name",
"of",
"the",
"task",
"to",
"delete"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/services/task_service.py#L35-L47
|
[
"def",
"delete_task",
"(",
"self",
",",
"task_name",
")",
":",
"response",
"=",
"self",
".",
"session",
".",
"delete",
"(",
"'%s/%s'",
"%",
"(",
"self",
".",
"task_url",
",",
"task_name",
")",
")",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"return",
"response",
".",
"status_code",
",",
"'Task %s deleted'",
"%",
"task_name",
"elif",
"response",
".",
"status_code",
"==",
"400",
":",
"return",
"response",
".",
"status_code",
",",
"None",
"# Task isn't registered.",
"else",
":",
"return",
"response",
".",
"status_code",
",",
"'Task %s was not deleted: %s'",
"%",
"(",
"task_name",
",",
"response",
".",
"text",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
GbdxTaskInterface.get_input_string_port
|
Get input string port value
:param port_name:
:param default:
:return: :rtype:
|
gbdx_task_template/gbdx_task_interface.py
|
def get_input_string_port(self, port_name, default=None):
"""
Get input string port value
:param port_name:
:param default:
:return: :rtype:
"""
if self.__string_input_ports:
return self.__string_input_ports.get(port_name, default)
return default
|
def get_input_string_port(self, port_name, default=None):
"""
Get input string port value
:param port_name:
:param default:
:return: :rtype:
"""
if self.__string_input_ports:
return self.__string_input_ports.get(port_name, default)
return default
|
[
"Get",
"input",
"string",
"port",
"value",
":",
"param",
"port_name",
":",
":",
"param",
"default",
":",
":",
"return",
":",
":",
"rtype",
":"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/gbdx_task_interface.py#L46-L55
|
[
"def",
"get_input_string_port",
"(",
"self",
",",
"port_name",
",",
"default",
"=",
"None",
")",
":",
"if",
"self",
".",
"__string_input_ports",
":",
"return",
"self",
".",
"__string_input_ports",
".",
"get",
"(",
"port_name",
",",
"default",
")",
"return",
"default"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
GbdxTaskInterface.set_output_string_port
|
Set output string port value
:param port_name:
:param value:
:return: :rtype:
|
gbdx_task_template/gbdx_task_interface.py
|
def set_output_string_port(self, port_name, value):
"""
Set output string port value
:param port_name:
:param value:
:return: :rtype:
"""
if not self.__string_output_ports:
self.__string_output_ports = {}
self.__string_output_ports[port_name] = value
|
def set_output_string_port(self, port_name, value):
"""
Set output string port value
:param port_name:
:param value:
:return: :rtype:
"""
if not self.__string_output_ports:
self.__string_output_ports = {}
self.__string_output_ports[port_name] = value
|
[
"Set",
"output",
"string",
"port",
"value",
":",
"param",
"port_name",
":",
":",
"param",
"value",
":",
":",
"return",
":",
":",
"rtype",
":"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/gbdx_task_interface.py#L73-L83
|
[
"def",
"set_output_string_port",
"(",
"self",
",",
"port_name",
",",
"value",
")",
":",
"if",
"not",
"self",
".",
"__string_output_ports",
":",
"self",
".",
"__string_output_ports",
"=",
"{",
"}",
"self",
".",
"__string_output_ports",
"[",
"port_name",
"]",
"=",
"value"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
GbdxTaskInterface.finalize
|
:param success_or_fail: string that is 'success' or 'fail'
:param message:
|
gbdx_task_template/gbdx_task_interface.py
|
def finalize(self, success_or_fail, message=''):
"""
:param success_or_fail: string that is 'success' or 'fail'
:param message:
"""
self.logit.debug('String OutputPorts: %s' % self.__string_output_ports)
if self.__string_output_ports:
with open(os.path.join(self.output_path, 'ports.json'), 'w') as opf:
json.dump(self.__string_output_ports, opf, indent=4)
self.logit.debug('Ports.json written to %s' % os.path.join(self.output_path, 'ports.json'))
with open(os.path.join(self.base_path, 'status.json'), 'w') as sf:
json.dump({'status': success_or_fail, 'reason': message}, sf, indent=4)
|
def finalize(self, success_or_fail, message=''):
"""
:param success_or_fail: string that is 'success' or 'fail'
:param message:
"""
self.logit.debug('String OutputPorts: %s' % self.__string_output_ports)
if self.__string_output_ports:
with open(os.path.join(self.output_path, 'ports.json'), 'w') as opf:
json.dump(self.__string_output_ports, opf, indent=4)
self.logit.debug('Ports.json written to %s' % os.path.join(self.output_path, 'ports.json'))
with open(os.path.join(self.base_path, 'status.json'), 'w') as sf:
json.dump({'status': success_or_fail, 'reason': message}, sf, indent=4)
|
[
":",
"param",
"success_or_fail",
":",
"string",
"that",
"is",
"success",
"or",
"fail",
":",
"param",
"message",
":"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/gbdx_task_interface.py#L93-L105
|
[
"def",
"finalize",
"(",
"self",
",",
"success_or_fail",
",",
"message",
"=",
"''",
")",
":",
"self",
".",
"logit",
".",
"debug",
"(",
"'String OutputPorts: %s'",
"%",
"self",
".",
"__string_output_ports",
")",
"if",
"self",
".",
"__string_output_ports",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"output_path",
",",
"'ports.json'",
")",
",",
"'w'",
")",
"as",
"opf",
":",
"json",
".",
"dump",
"(",
"self",
".",
"__string_output_ports",
",",
"opf",
",",
"indent",
"=",
"4",
")",
"self",
".",
"logit",
".",
"debug",
"(",
"'Ports.json written to %s'",
"%",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"output_path",
",",
"'ports.json'",
")",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"base_path",
",",
"'status.json'",
")",
",",
"'w'",
")",
"as",
"sf",
":",
"json",
".",
"dump",
"(",
"{",
"'status'",
":",
"success_or_fail",
",",
"'reason'",
":",
"message",
"}",
",",
"sf",
",",
"indent",
"=",
"4",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
Port.list_files
|
List the ports contents by file type or all.
:param extensions: string extensions, single string or list of extensions.
:return: A list of full path names of each file.
|
gbdx_task_template/port.py
|
def list_files(self, extensions=None):
"""
List the ports contents by file type or all.
:param extensions: string extensions, single string or list of extensions.
:return: A list of full path names of each file.
"""
if self.type.lower() != 'directory':
raise ValueError("Port type is not == directory")
filesystem_location = self.path
for root, dirs, files in os.walk(filesystem_location):
if extensions is None:
return [os.path.join(root, f) for f in files]
elif not isinstance(extensions, list):
extensions = [extensions]
subset_files = []
for f in files:
for extension in extensions:
if f.lower().endswith(extension.lower()):
subset_files.append(os.path.join(root, f))
break
return subset_files
|
def list_files(self, extensions=None):
"""
List the ports contents by file type or all.
:param extensions: string extensions, single string or list of extensions.
:return: A list of full path names of each file.
"""
if self.type.lower() != 'directory':
raise ValueError("Port type is not == directory")
filesystem_location = self.path
for root, dirs, files in os.walk(filesystem_location):
if extensions is None:
return [os.path.join(root, f) for f in files]
elif not isinstance(extensions, list):
extensions = [extensions]
subset_files = []
for f in files:
for extension in extensions:
if f.lower().endswith(extension.lower()):
subset_files.append(os.path.join(root, f))
break
return subset_files
|
[
"List",
"the",
"ports",
"contents",
"by",
"file",
"type",
"or",
"all",
".",
":",
"param",
"extensions",
":",
"string",
"extensions",
"single",
"string",
"or",
"list",
"of",
"extensions",
".",
":",
"return",
":",
"A",
"list",
"of",
"full",
"path",
"names",
"of",
"each",
"file",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/port.py#L159-L183
|
[
"def",
"list_files",
"(",
"self",
",",
"extensions",
"=",
"None",
")",
":",
"if",
"self",
".",
"type",
".",
"lower",
"(",
")",
"!=",
"'directory'",
":",
"raise",
"ValueError",
"(",
"\"Port type is not == directory\"",
")",
"filesystem_location",
"=",
"self",
".",
"path",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"filesystem_location",
")",
":",
"if",
"extensions",
"is",
"None",
":",
"return",
"[",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"f",
")",
"for",
"f",
"in",
"files",
"]",
"elif",
"not",
"isinstance",
"(",
"extensions",
",",
"list",
")",
":",
"extensions",
"=",
"[",
"extensions",
"]",
"subset_files",
"=",
"[",
"]",
"for",
"f",
"in",
"files",
":",
"for",
"extension",
"in",
"extensions",
":",
"if",
"f",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"extension",
".",
"lower",
"(",
")",
")",
":",
"subset_files",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"f",
")",
")",
"break",
"return",
"subset_files"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
Port.is_valid_filesys
|
Checks if the path is correct and exists, must be abs-> a dir -> and not a file.
|
gbdx_task_template/port.py
|
def is_valid_filesys(path):
"""Checks if the path is correct and exists, must be abs-> a dir -> and not a file."""
if os.path.isabs(path) and os.path.isdir(path) and \
not os.path.isfile(path):
return True
else:
raise LocalPortValidationError(
'Port value %s is not a valid filesystem location' % path
)
|
def is_valid_filesys(path):
"""Checks if the path is correct and exists, must be abs-> a dir -> and not a file."""
if os.path.isabs(path) and os.path.isdir(path) and \
not os.path.isfile(path):
return True
else:
raise LocalPortValidationError(
'Port value %s is not a valid filesystem location' % path
)
|
[
"Checks",
"if",
"the",
"path",
"is",
"correct",
"and",
"exists",
"must",
"be",
"abs",
"-",
">",
"a",
"dir",
"-",
">",
"and",
"not",
"a",
"file",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/port.py#L214-L222
|
[
"def",
"is_valid_filesys",
"(",
"path",
")",
":",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"path",
")",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
"and",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
":",
"return",
"True",
"else",
":",
"raise",
"LocalPortValidationError",
"(",
"'Port value %s is not a valid filesystem location'",
"%",
"path",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
Port.is_valid_s3_url
|
Checks if the url contains S3. Not an accurate validation of the url
|
gbdx_task_template/port.py
|
def is_valid_s3_url(url):
"""Checks if the url contains S3. Not an accurate validation of the url"""
# Skip if the url start with source: (gbdxtools syntax)
if url.startswith('source:'):
return True
scheme, netloc, path, _, _, _ = urlparse(url)
port_except = RemotePortValidationError(
'Port value %s is not a valid s3 location' % url
)
if len(scheme) < 2:
raise port_except
if 's3' in scheme or 's3' in netloc or 's3' in path:
return True
else:
raise port_except
|
def is_valid_s3_url(url):
"""Checks if the url contains S3. Not an accurate validation of the url"""
# Skip if the url start with source: (gbdxtools syntax)
if url.startswith('source:'):
return True
scheme, netloc, path, _, _, _ = urlparse(url)
port_except = RemotePortValidationError(
'Port value %s is not a valid s3 location' % url
)
if len(scheme) < 2:
raise port_except
if 's3' in scheme or 's3' in netloc or 's3' in path:
return True
else:
raise port_except
|
[
"Checks",
"if",
"the",
"url",
"contains",
"S3",
".",
"Not",
"an",
"accurate",
"validation",
"of",
"the",
"url"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/port.py#L225-L243
|
[
"def",
"is_valid_s3_url",
"(",
"url",
")",
":",
"# Skip if the url start with source: (gbdxtools syntax)",
"if",
"url",
".",
"startswith",
"(",
"'source:'",
")",
":",
"return",
"True",
"scheme",
",",
"netloc",
",",
"path",
",",
"_",
",",
"_",
",",
"_",
"=",
"urlparse",
"(",
"url",
")",
"port_except",
"=",
"RemotePortValidationError",
"(",
"'Port value %s is not a valid s3 location'",
"%",
"url",
")",
"if",
"len",
"(",
"scheme",
")",
"<",
"2",
":",
"raise",
"port_except",
"if",
"'s3'",
"in",
"scheme",
"or",
"'s3'",
"in",
"netloc",
"or",
"'s3'",
"in",
"path",
":",
"return",
"True",
"else",
":",
"raise",
"port_except"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskController.invoke
|
Execute the command from the arguments.
:return: None or Error
|
gbdx_cloud_harness/controller.py
|
def invoke(self):
"""
Execute the command from the arguments.
:return: None or Error
"""
for key in self.FUNCTION_KEYS.keys():
if self._arguments[key] is True:
self.FUNCTION_KEYS[key]()
|
def invoke(self):
"""
Execute the command from the arguments.
:return: None or Error
"""
for key in self.FUNCTION_KEYS.keys():
if self._arguments[key] is True:
self.FUNCTION_KEYS[key]()
|
[
"Execute",
"the",
"command",
"from",
"the",
"arguments",
".",
":",
"return",
":",
"None",
"or",
"Error"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/controller.py#L76-L83
|
[
"def",
"invoke",
"(",
"self",
")",
":",
"for",
"key",
"in",
"self",
".",
"FUNCTION_KEYS",
".",
"keys",
"(",
")",
":",
"if",
"self",
".",
"_arguments",
"[",
"key",
"]",
"is",
"True",
":",
"self",
".",
"FUNCTION_KEYS",
"[",
"key",
"]",
"(",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskController._register_anonymous_task
|
Register the anonymouse task or overwrite it.
:return: success or fail message.
|
gbdx_cloud_harness/controller.py
|
def _register_anonymous_task(self):
"""
Register the anonymouse task or overwrite it.
:return: success or fail message.
"""
is_overwrite = self._arguments.get('--overwrite')
task_name = "CloudHarness_Anonymous_Task"
task_srv = TaskService()
if is_overwrite:
# Delete the task first
code, message = task_srv.delete_task(task_name)
# ignore status if deleted, or not registered
if code not in [200, 400]:
raise TaskRegistryError(message)
task_def_file = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'gbdx_task_template', 'task_definition.json'
)
with open(task_def_file, 'r') as f:
code, message = task_srv.register_task(f.read())
if code == 200:
print(message)
elif code == 409:
print('Task already exists')
else:
raise TaskRegistryError(message)
|
def _register_anonymous_task(self):
"""
Register the anonymouse task or overwrite it.
:return: success or fail message.
"""
is_overwrite = self._arguments.get('--overwrite')
task_name = "CloudHarness_Anonymous_Task"
task_srv = TaskService()
if is_overwrite:
# Delete the task first
code, message = task_srv.delete_task(task_name)
# ignore status if deleted, or not registered
if code not in [200, 400]:
raise TaskRegistryError(message)
task_def_file = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'gbdx_task_template', 'task_definition.json'
)
with open(task_def_file, 'r') as f:
code, message = task_srv.register_task(f.read())
if code == 200:
print(message)
elif code == 409:
print('Task already exists')
else:
raise TaskRegistryError(message)
|
[
"Register",
"the",
"anonymouse",
"task",
"or",
"overwrite",
"it",
".",
":",
"return",
":",
"success",
"or",
"fail",
"message",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/controller.py#L85-L114
|
[
"def",
"_register_anonymous_task",
"(",
"self",
")",
":",
"is_overwrite",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'--overwrite'",
")",
"task_name",
"=",
"\"CloudHarness_Anonymous_Task\"",
"task_srv",
"=",
"TaskService",
"(",
")",
"if",
"is_overwrite",
":",
"# Delete the task first",
"code",
",",
"message",
"=",
"task_srv",
".",
"delete_task",
"(",
"task_name",
")",
"# ignore status if deleted, or not registered",
"if",
"code",
"not",
"in",
"[",
"200",
",",
"400",
"]",
":",
"raise",
"TaskRegistryError",
"(",
"message",
")",
"task_def_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"__file__",
")",
")",
")",
",",
"'gbdx_task_template'",
",",
"'task_definition.json'",
")",
"with",
"open",
"(",
"task_def_file",
",",
"'r'",
")",
"as",
"f",
":",
"code",
",",
"message",
"=",
"task_srv",
".",
"register_task",
"(",
"f",
".",
"read",
"(",
")",
")",
"if",
"code",
"==",
"200",
":",
"print",
"(",
"message",
")",
"elif",
"code",
"==",
"409",
":",
"print",
"(",
"'Task already exists'",
")",
"else",
":",
"raise",
"TaskRegistryError",
"(",
"message",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskController._create_app
|
Method for creating a new Application Template.
USAGE: cloud-harness create <dir_name> [--destination=<path>]
|
gbdx_cloud_harness/controller.py
|
def _create_app(self):
"""
Method for creating a new Application Template.
USAGE: cloud-harness create <dir_name> [--destination=<path>]
"""
template_path = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), self.TEMPLATE_FOLDER, self.TEMPLATE_FILENAME
)
new_dir = self._arguments['<dir_name>']
# Make new application directory
override_destination = self._arguments.get('--destination', None)
if override_destination is not None:
if override_destination == '':
raise ValueError('Destination path is empty')
# Check if the new destination is abs and exists.
if os.path.isabs(override_destination) and os.path.isdir(override_destination):
new_dir = os.path.join(override_destination, new_dir)
else:
# Create a path from the cwd, then check if it is valid and exists.
override_path = os.path.join(os.getcwd(), override_destination)
if not os.path.isabs(override_path) or not os.path.isdir(override_path):
raise ValueError('New path parameter %s is not a directory' % override_destination)
new_dir = os.path.join(override_path, new_dir)
else:
if os.path.isabs(new_dir) or os.path.sep in new_dir:
raise ValueError("Directory name is invalid")
# No override, put the folder in the cwd.
new_dir = os.path.join(os.getcwd(), new_dir)
os.makedirs(new_dir)
new_file_path = os.path.join(new_dir, self.DEFAULT_NEW_APP_FILENAME)
# Copy the template the new application location.
shutil.copyfile(template_path, new_file_path)
printer('New Application created at %s' % new_file_path)
|
def _create_app(self):
"""
Method for creating a new Application Template.
USAGE: cloud-harness create <dir_name> [--destination=<path>]
"""
template_path = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), self.TEMPLATE_FOLDER, self.TEMPLATE_FILENAME
)
new_dir = self._arguments['<dir_name>']
# Make new application directory
override_destination = self._arguments.get('--destination', None)
if override_destination is not None:
if override_destination == '':
raise ValueError('Destination path is empty')
# Check if the new destination is abs and exists.
if os.path.isabs(override_destination) and os.path.isdir(override_destination):
new_dir = os.path.join(override_destination, new_dir)
else:
# Create a path from the cwd, then check if it is valid and exists.
override_path = os.path.join(os.getcwd(), override_destination)
if not os.path.isabs(override_path) or not os.path.isdir(override_path):
raise ValueError('New path parameter %s is not a directory' % override_destination)
new_dir = os.path.join(override_path, new_dir)
else:
if os.path.isabs(new_dir) or os.path.sep in new_dir:
raise ValueError("Directory name is invalid")
# No override, put the folder in the cwd.
new_dir = os.path.join(os.getcwd(), new_dir)
os.makedirs(new_dir)
new_file_path = os.path.join(new_dir, self.DEFAULT_NEW_APP_FILENAME)
# Copy the template the new application location.
shutil.copyfile(template_path, new_file_path)
printer('New Application created at %s' % new_file_path)
|
[
"Method",
"for",
"creating",
"a",
"new",
"Application",
"Template",
".",
"USAGE",
":",
"cloud",
"-",
"harness",
"create",
"<dir_name",
">",
"[",
"--",
"destination",
"=",
"<path",
">",
"]"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/controller.py#L116-L155
|
[
"def",
"_create_app",
"(",
"self",
")",
":",
"template_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"__file__",
")",
")",
")",
",",
"self",
".",
"TEMPLATE_FOLDER",
",",
"self",
".",
"TEMPLATE_FILENAME",
")",
"new_dir",
"=",
"self",
".",
"_arguments",
"[",
"'<dir_name>'",
"]",
"# Make new application directory",
"override_destination",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'--destination'",
",",
"None",
")",
"if",
"override_destination",
"is",
"not",
"None",
":",
"if",
"override_destination",
"==",
"''",
":",
"raise",
"ValueError",
"(",
"'Destination path is empty'",
")",
"# Check if the new destination is abs and exists.",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"override_destination",
")",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"override_destination",
")",
":",
"new_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"override_destination",
",",
"new_dir",
")",
"else",
":",
"# Create a path from the cwd, then check if it is valid and exists.",
"override_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"override_destination",
")",
"if",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"override_path",
")",
"or",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"override_path",
")",
":",
"raise",
"ValueError",
"(",
"'New path parameter %s is not a directory'",
"%",
"override_destination",
")",
"new_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"override_path",
",",
"new_dir",
")",
"else",
":",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"new_dir",
")",
"or",
"os",
".",
"path",
".",
"sep",
"in",
"new_dir",
":",
"raise",
"ValueError",
"(",
"\"Directory name is invalid\"",
")",
"# No override, put the folder in the cwd.",
"new_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"new_dir",
")",
"os",
".",
"makedirs",
"(",
"new_dir",
")",
"new_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"new_dir",
",",
"self",
".",
"DEFAULT_NEW_APP_FILENAME",
")",
"# Copy the template the new application location.",
"shutil",
".",
"copyfile",
"(",
"template_path",
",",
"new_file_path",
")",
"printer",
"(",
"'New Application created at %s'",
"%",
"new_file_path",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskController._run_app
|
Method for running a custom Application Templates.
NOTES:
* The default name of the application is app.py. So this function is going to look
for app.py, unless the --file option is provide with a different file name.
* The generated source bundle will package everything in the work_path. If large files
not required for the application source, they need to be ignored. Use a file called "pkg_ignore"
to identify folders and files to ignore.
USAGE: cloud-harness run <file_name> [--remote] [--verbose] [--upload] [--download] [--dry-run]
|
gbdx_cloud_harness/controller.py
|
def _run_app(self):
"""
Method for running a custom Application Templates.
NOTES:
* The default name of the application is app.py. So this function is going to look
for app.py, unless the --file option is provide with a different file name.
* The generated source bundle will package everything in the work_path. If large files
not required for the application source, they need to be ignored. Use a file called "pkg_ignore"
to identify folders and files to ignore.
USAGE: cloud-harness run <file_name> [--remote] [--verbose] [--upload] [--download] [--dry-run]
"""
is_remote_run = self._arguments.get('--remote')
filename = self._arguments.get('<file_name>')
upload_ports = self._arguments.get('--upload')
download_ports = self._arguments.get('--download')
is_verbose = self._arguments.get('--verbose')
# A dry run means, allow port sot be pushed up, but don't allow execution and monitoring.
is_dry_run = self._arguments.get('--dry-run')
if download_ports: # TODO temporary until implemented.
raise NotImplementedError("Downloading of output ports is not implemented yet.")
# Check if the filename passed is actually a class object (gbdxtools functionality)
if not isinstance(filename, str) and issubclass(filename, TaskTemplate):
template_class = filename
template_file = inspect.getfile(template_class)
config_file = self._write_config_file(template_file)
else:
template_file = self._get_template_abs_path(filename)
if not os.path.isfile(template_file):
raise ValueError('The location %s does not exist' % template_file)
config_file = self._write_config_file(template_file)
template_class = self._get_class(template_file)
with template_class() as template:
if is_remote_run: # Means the user is running with --remote, push to S3 and submit workflow.
task = template.task
# Set the source bundle directory to where the template_file is.
task.source_bundle.value = os.path.join(os.path.dirname(template_file), 'tmp_%s' % str(uuid.uuid4()))
task.run_name = '{task_name}_src'.format(task_name=task.name)
src_bundle_dir = task.source_bundle.value
# Create source bundle to be executed on the GBDX platform
self._archive_source(os.path.dirname(src_bundle_dir), src_bundle_dir)
port_service = PortService(task)
if upload_ports:
# Push all port data to S3
port_service.upload_input_ports()
else:
# Only push source bundle port
port_service.upload_input_ports(port_list=[self.SOURCE_BUNDLE_PORT])
# Delete source bundle directory and config after upload.
shutil.rmtree(src_bundle_dir)
os.remove(config_file)
# Get the new task object with uploaded port locations.
task = port_service.task
# Validate task
task.is_valid(remote=True)
workflow = Workflow(task)
if is_verbose:
temp_wf = workflow.json
printer(temp_wf)
if is_dry_run:
return task
try:
workflow.execute()
printer(workflow.id)
except Exception as e:
printer(e.message)
template.reason = "Execution Failed: %s" % e.message
return
# Monitor events of workflow
is_done = workflow.monitor_run()
if not is_done:
template.reason = "Execution Failed during Run"
if download_ports:
# TODO port_service.download_output_port()
pass
else:
# For local and Docker container execution.
# Check that all output locations exist.
template.check_and_create_outputs()
# Validate task
template.task.is_valid()
if is_verbose:
printer(template.task.json())
all_ports = template.task.ports[0] + template.task.ports[1]
printer([port.__str__() for port in all_ports])
if is_dry_run:
template.reason = "Execution Skipped"
return
# Run Task Locally
try:
template.invoke()
except Exception as e:
template.reason = "Failed Exception: %s" % e
if template.reason is None or template.reason == '':
template.reason = "Execution Completed"
|
def _run_app(self):
"""
Method for running a custom Application Templates.
NOTES:
* The default name of the application is app.py. So this function is going to look
for app.py, unless the --file option is provide with a different file name.
* The generated source bundle will package everything in the work_path. If large files
not required for the application source, they need to be ignored. Use a file called "pkg_ignore"
to identify folders and files to ignore.
USAGE: cloud-harness run <file_name> [--remote] [--verbose] [--upload] [--download] [--dry-run]
"""
is_remote_run = self._arguments.get('--remote')
filename = self._arguments.get('<file_name>')
upload_ports = self._arguments.get('--upload')
download_ports = self._arguments.get('--download')
is_verbose = self._arguments.get('--verbose')
# A dry run means, allow port sot be pushed up, but don't allow execution and monitoring.
is_dry_run = self._arguments.get('--dry-run')
if download_ports: # TODO temporary until implemented.
raise NotImplementedError("Downloading of output ports is not implemented yet.")
# Check if the filename passed is actually a class object (gbdxtools functionality)
if not isinstance(filename, str) and issubclass(filename, TaskTemplate):
template_class = filename
template_file = inspect.getfile(template_class)
config_file = self._write_config_file(template_file)
else:
template_file = self._get_template_abs_path(filename)
if not os.path.isfile(template_file):
raise ValueError('The location %s does not exist' % template_file)
config_file = self._write_config_file(template_file)
template_class = self._get_class(template_file)
with template_class() as template:
if is_remote_run: # Means the user is running with --remote, push to S3 and submit workflow.
task = template.task
# Set the source bundle directory to where the template_file is.
task.source_bundle.value = os.path.join(os.path.dirname(template_file), 'tmp_%s' % str(uuid.uuid4()))
task.run_name = '{task_name}_src'.format(task_name=task.name)
src_bundle_dir = task.source_bundle.value
# Create source bundle to be executed on the GBDX platform
self._archive_source(os.path.dirname(src_bundle_dir), src_bundle_dir)
port_service = PortService(task)
if upload_ports:
# Push all port data to S3
port_service.upload_input_ports()
else:
# Only push source bundle port
port_service.upload_input_ports(port_list=[self.SOURCE_BUNDLE_PORT])
# Delete source bundle directory and config after upload.
shutil.rmtree(src_bundle_dir)
os.remove(config_file)
# Get the new task object with uploaded port locations.
task = port_service.task
# Validate task
task.is_valid(remote=True)
workflow = Workflow(task)
if is_verbose:
temp_wf = workflow.json
printer(temp_wf)
if is_dry_run:
return task
try:
workflow.execute()
printer(workflow.id)
except Exception as e:
printer(e.message)
template.reason = "Execution Failed: %s" % e.message
return
# Monitor events of workflow
is_done = workflow.monitor_run()
if not is_done:
template.reason = "Execution Failed during Run"
if download_ports:
# TODO port_service.download_output_port()
pass
else:
# For local and Docker container execution.
# Check that all output locations exist.
template.check_and_create_outputs()
# Validate task
template.task.is_valid()
if is_verbose:
printer(template.task.json())
all_ports = template.task.ports[0] + template.task.ports[1]
printer([port.__str__() for port in all_ports])
if is_dry_run:
template.reason = "Execution Skipped"
return
# Run Task Locally
try:
template.invoke()
except Exception as e:
template.reason = "Failed Exception: %s" % e
if template.reason is None or template.reason == '':
template.reason = "Execution Completed"
|
[
"Method",
"for",
"running",
"a",
"custom",
"Application",
"Templates",
".",
"NOTES",
":",
"*",
"The",
"default",
"name",
"of",
"the",
"application",
"is",
"app",
".",
"py",
".",
"So",
"this",
"function",
"is",
"going",
"to",
"look",
"for",
"app",
".",
"py",
"unless",
"the",
"--",
"file",
"option",
"is",
"provide",
"with",
"a",
"different",
"file",
"name",
".",
"*",
"The",
"generated",
"source",
"bundle",
"will",
"package",
"everything",
"in",
"the",
"work_path",
".",
"If",
"large",
"files",
"not",
"required",
"for",
"the",
"application",
"source",
"they",
"need",
"to",
"be",
"ignored",
".",
"Use",
"a",
"file",
"called",
"pkg_ignore",
"to",
"identify",
"folders",
"and",
"files",
"to",
"ignore",
".",
"USAGE",
":",
"cloud",
"-",
"harness",
"run",
"<file_name",
">",
"[",
"--",
"remote",
"]",
"[",
"--",
"verbose",
"]",
"[",
"--",
"upload",
"]",
"[",
"--",
"download",
"]",
"[",
"--",
"dry",
"-",
"run",
"]"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/controller.py#L157-L281
|
[
"def",
"_run_app",
"(",
"self",
")",
":",
"is_remote_run",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'--remote'",
")",
"filename",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'<file_name>'",
")",
"upload_ports",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'--upload'",
")",
"download_ports",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'--download'",
")",
"is_verbose",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'--verbose'",
")",
"# A dry run means, allow port sot be pushed up, but don't allow execution and monitoring.",
"is_dry_run",
"=",
"self",
".",
"_arguments",
".",
"get",
"(",
"'--dry-run'",
")",
"if",
"download_ports",
":",
"# TODO temporary until implemented.",
"raise",
"NotImplementedError",
"(",
"\"Downloading of output ports is not implemented yet.\"",
")",
"# Check if the filename passed is actually a class object (gbdxtools functionality)",
"if",
"not",
"isinstance",
"(",
"filename",
",",
"str",
")",
"and",
"issubclass",
"(",
"filename",
",",
"TaskTemplate",
")",
":",
"template_class",
"=",
"filename",
"template_file",
"=",
"inspect",
".",
"getfile",
"(",
"template_class",
")",
"config_file",
"=",
"self",
".",
"_write_config_file",
"(",
"template_file",
")",
"else",
":",
"template_file",
"=",
"self",
".",
"_get_template_abs_path",
"(",
"filename",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"template_file",
")",
":",
"raise",
"ValueError",
"(",
"'The location %s does not exist'",
"%",
"template_file",
")",
"config_file",
"=",
"self",
".",
"_write_config_file",
"(",
"template_file",
")",
"template_class",
"=",
"self",
".",
"_get_class",
"(",
"template_file",
")",
"with",
"template_class",
"(",
")",
"as",
"template",
":",
"if",
"is_remote_run",
":",
"# Means the user is running with --remote, push to S3 and submit workflow.",
"task",
"=",
"template",
".",
"task",
"# Set the source bundle directory to where the template_file is.",
"task",
".",
"source_bundle",
".",
"value",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"template_file",
")",
",",
"'tmp_%s'",
"%",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
")",
"task",
".",
"run_name",
"=",
"'{task_name}_src'",
".",
"format",
"(",
"task_name",
"=",
"task",
".",
"name",
")",
"src_bundle_dir",
"=",
"task",
".",
"source_bundle",
".",
"value",
"# Create source bundle to be executed on the GBDX platform",
"self",
".",
"_archive_source",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"src_bundle_dir",
")",
",",
"src_bundle_dir",
")",
"port_service",
"=",
"PortService",
"(",
"task",
")",
"if",
"upload_ports",
":",
"# Push all port data to S3",
"port_service",
".",
"upload_input_ports",
"(",
")",
"else",
":",
"# Only push source bundle port",
"port_service",
".",
"upload_input_ports",
"(",
"port_list",
"=",
"[",
"self",
".",
"SOURCE_BUNDLE_PORT",
"]",
")",
"# Delete source bundle directory and config after upload.",
"shutil",
".",
"rmtree",
"(",
"src_bundle_dir",
")",
"os",
".",
"remove",
"(",
"config_file",
")",
"# Get the new task object with uploaded port locations.",
"task",
"=",
"port_service",
".",
"task",
"# Validate task",
"task",
".",
"is_valid",
"(",
"remote",
"=",
"True",
")",
"workflow",
"=",
"Workflow",
"(",
"task",
")",
"if",
"is_verbose",
":",
"temp_wf",
"=",
"workflow",
".",
"json",
"printer",
"(",
"temp_wf",
")",
"if",
"is_dry_run",
":",
"return",
"task",
"try",
":",
"workflow",
".",
"execute",
"(",
")",
"printer",
"(",
"workflow",
".",
"id",
")",
"except",
"Exception",
"as",
"e",
":",
"printer",
"(",
"e",
".",
"message",
")",
"template",
".",
"reason",
"=",
"\"Execution Failed: %s\"",
"%",
"e",
".",
"message",
"return",
"# Monitor events of workflow",
"is_done",
"=",
"workflow",
".",
"monitor_run",
"(",
")",
"if",
"not",
"is_done",
":",
"template",
".",
"reason",
"=",
"\"Execution Failed during Run\"",
"if",
"download_ports",
":",
"# TODO port_service.download_output_port()",
"pass",
"else",
":",
"# For local and Docker container execution.",
"# Check that all output locations exist.",
"template",
".",
"check_and_create_outputs",
"(",
")",
"# Validate task",
"template",
".",
"task",
".",
"is_valid",
"(",
")",
"if",
"is_verbose",
":",
"printer",
"(",
"template",
".",
"task",
".",
"json",
"(",
")",
")",
"all_ports",
"=",
"template",
".",
"task",
".",
"ports",
"[",
"0",
"]",
"+",
"template",
".",
"task",
".",
"ports",
"[",
"1",
"]",
"printer",
"(",
"[",
"port",
".",
"__str__",
"(",
")",
"for",
"port",
"in",
"all_ports",
"]",
")",
"if",
"is_dry_run",
":",
"template",
".",
"reason",
"=",
"\"Execution Skipped\"",
"return",
"# Run Task Locally",
"try",
":",
"template",
".",
"invoke",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"template",
".",
"reason",
"=",
"\"Failed Exception: %s\"",
"%",
"e",
"if",
"template",
".",
"reason",
"is",
"None",
"or",
"template",
".",
"reason",
"==",
"''",
":",
"template",
".",
"reason",
"=",
"\"Execution Completed\""
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskController._write_config_file
|
Write a config file to the source bundle location to identify the entry point.
:param template_file: path to the task template subclass (executable)
|
gbdx_cloud_harness/controller.py
|
def _write_config_file(template_file):
"""
Write a config file to the source bundle location to identify the entry point.
:param template_file: path to the task template subclass (executable)
"""
config_filename = '.cloud_harness_config.json'
config_path = os.path.dirname(template_file)
filename = os.path.split(template_file)[1]
if filename.endswith('.pyc'):
filename = filename[:-1]
config_file = os.path.join(config_path, config_filename)
with open(config_file, 'w') as f:
f.write(json.dumps({'task_filename': filename}))
return config_file
|
def _write_config_file(template_file):
"""
Write a config file to the source bundle location to identify the entry point.
:param template_file: path to the task template subclass (executable)
"""
config_filename = '.cloud_harness_config.json'
config_path = os.path.dirname(template_file)
filename = os.path.split(template_file)[1]
if filename.endswith('.pyc'):
filename = filename[:-1]
config_file = os.path.join(config_path, config_filename)
with open(config_file, 'w') as f:
f.write(json.dumps({'task_filename': filename}))
return config_file
|
[
"Write",
"a",
"config",
"file",
"to",
"the",
"source",
"bundle",
"location",
"to",
"identify",
"the",
"entry",
"point",
".",
":",
"param",
"template_file",
":",
"path",
"to",
"the",
"task",
"template",
"subclass",
"(",
"executable",
")"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/controller.py#L285-L303
|
[
"def",
"_write_config_file",
"(",
"template_file",
")",
":",
"config_filename",
"=",
"'.cloud_harness_config.json'",
"config_path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"template_file",
")",
"filename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"template_file",
")",
"[",
"1",
"]",
"if",
"filename",
".",
"endswith",
"(",
"'.pyc'",
")",
":",
"filename",
"=",
"filename",
"[",
":",
"-",
"1",
"]",
"config_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_path",
",",
"config_filename",
")",
"with",
"open",
"(",
"config_file",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"{",
"'task_filename'",
":",
"filename",
"}",
")",
")",
"return",
"config_file"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskController._get_class
|
Import the file and inspect for subclass of TaskTemplate.
:param template_file: filename to import.
|
gbdx_cloud_harness/controller.py
|
def _get_class(template_file):
"""
Import the file and inspect for subclass of TaskTemplate.
:param template_file: filename to import.
"""
with warnings.catch_warnings():
# suppress warning from importing
warnings.filterwarnings("ignore", category=RuntimeWarning)
template_module = imp.load_source('module.name', template_file)
# Find the subclass of TaskTemplate
for name, data in inspect.getmembers(template_module, inspect.isclass):
if issubclass(data, TaskTemplate) and data.__name__ != TaskTemplate.__name__:
return data
|
def _get_class(template_file):
"""
Import the file and inspect for subclass of TaskTemplate.
:param template_file: filename to import.
"""
with warnings.catch_warnings():
# suppress warning from importing
warnings.filterwarnings("ignore", category=RuntimeWarning)
template_module = imp.load_source('module.name', template_file)
# Find the subclass of TaskTemplate
for name, data in inspect.getmembers(template_module, inspect.isclass):
if issubclass(data, TaskTemplate) and data.__name__ != TaskTemplate.__name__:
return data
|
[
"Import",
"the",
"file",
"and",
"inspect",
"for",
"subclass",
"of",
"TaskTemplate",
".",
":",
"param",
"template_file",
":",
"filename",
"to",
"import",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/controller.py#L306-L319
|
[
"def",
"_get_class",
"(",
"template_file",
")",
":",
"with",
"warnings",
".",
"catch_warnings",
"(",
")",
":",
"# suppress warning from importing",
"warnings",
".",
"filterwarnings",
"(",
"\"ignore\"",
",",
"category",
"=",
"RuntimeWarning",
")",
"template_module",
"=",
"imp",
".",
"load_source",
"(",
"'module.name'",
",",
"template_file",
")",
"# Find the subclass of TaskTemplate",
"for",
"name",
",",
"data",
"in",
"inspect",
".",
"getmembers",
"(",
"template_module",
",",
"inspect",
".",
"isclass",
")",
":",
"if",
"issubclass",
"(",
"data",
",",
"TaskTemplate",
")",
"and",
"data",
".",
"__name__",
"!=",
"TaskTemplate",
".",
"__name__",
":",
"return",
"data"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskController._get_template_abs_path
|
Return a valid absolute path. filename can be relative or absolute.
|
gbdx_cloud_harness/controller.py
|
def _get_template_abs_path(filename):
"""
Return a valid absolute path. filename can be relative or absolute.
"""
if os.path.isabs(filename) and os.path.isfile(filename):
return filename
else:
return os.path.join(os.getcwd(), filename)
|
def _get_template_abs_path(filename):
"""
Return a valid absolute path. filename can be relative or absolute.
"""
if os.path.isabs(filename) and os.path.isfile(filename):
return filename
else:
return os.path.join(os.getcwd(), filename)
|
[
"Return",
"a",
"valid",
"absolute",
"path",
".",
"filename",
"can",
"be",
"relative",
"or",
"absolute",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/controller.py#L322-L329
|
[
"def",
"_get_template_abs_path",
"(",
"filename",
")",
":",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"filename",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
":",
"return",
"filename",
"else",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"filename",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
AccountStorageService.upload
|
Upload a list of files to a users account location
:param source_files: list of files to upload, or single file name
:param s3_folder: the user location to upload to.
|
gbdx_cloud_harness/services/account_storage_service.py
|
def upload(self, source_files, s3_folder=None):
"""
Upload a list of files to a users account location
:param source_files: list of files to upload, or single file name
:param s3_folder: the user location to upload to.
"""
if s3_folder is None:
folder = self.prefix
else:
folder = '%s/%s' % (self.prefix, s3_folder)
if isinstance(source_files, list):
for file_tuple in source_files:
self.__upload_file(file_tuple, folder)
elif isinstance(source_files, tuple):
self.__upload_file(source_files, folder)
else:
raise ValueError("Source Files must be a tuple or list of tuples: (filename, keyname)")
|
def upload(self, source_files, s3_folder=None):
"""
Upload a list of files to a users account location
:param source_files: list of files to upload, or single file name
:param s3_folder: the user location to upload to.
"""
if s3_folder is None:
folder = self.prefix
else:
folder = '%s/%s' % (self.prefix, s3_folder)
if isinstance(source_files, list):
for file_tuple in source_files:
self.__upload_file(file_tuple, folder)
elif isinstance(source_files, tuple):
self.__upload_file(source_files, folder)
else:
raise ValueError("Source Files must be a tuple or list of tuples: (filename, keyname)")
|
[
"Upload",
"a",
"list",
"of",
"files",
"to",
"a",
"users",
"account",
"location",
":",
"param",
"source_files",
":",
"list",
"of",
"files",
"to",
"upload",
"or",
"single",
"file",
"name",
":",
"param",
"s3_folder",
":",
"the",
"user",
"location",
"to",
"upload",
"to",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/services/account_storage_service.py#L70-L88
|
[
"def",
"upload",
"(",
"self",
",",
"source_files",
",",
"s3_folder",
"=",
"None",
")",
":",
"if",
"s3_folder",
"is",
"None",
":",
"folder",
"=",
"self",
".",
"prefix",
"else",
":",
"folder",
"=",
"'%s/%s'",
"%",
"(",
"self",
".",
"prefix",
",",
"s3_folder",
")",
"if",
"isinstance",
"(",
"source_files",
",",
"list",
")",
":",
"for",
"file_tuple",
"in",
"source_files",
":",
"self",
".",
"__upload_file",
"(",
"file_tuple",
",",
"folder",
")",
"elif",
"isinstance",
"(",
"source_files",
",",
"tuple",
")",
":",
"self",
".",
"__upload_file",
"(",
"source_files",
",",
"folder",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Source Files must be a tuple or list of tuples: (filename, keyname)\"",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
AccountStorageService.download
|
download all files from a users account location
:param local_port_path: the local path where the data is to download to
:param key_name: can start with self.prefix or taken as relative to prefix.
Example:
local_port_path = /home/user/myworkflow/input_images/ (sync all data in this folder)
s3_folder = myworkflow/input_images/ (location on s3 that will be synced to local path)
|
gbdx_cloud_harness/services/account_storage_service.py
|
def download(self, local_port_path, key_names): # pragma: no cover
"""
download all files from a users account location
:param local_port_path: the local path where the data is to download to
:param key_name: can start with self.prefix or taken as relative to prefix.
Example:
local_port_path = /home/user/myworkflow/input_images/ (sync all data in this folder)
s3_folder = myworkflow/input_images/ (location on s3 that will be synced to local path)
"""
if not os.path.isdir(local_port_path):
raise ValueError("Download path does not exist: %s" % local_port_path)
if not isinstance(key_names, list):
key_names = [key_names]
for key_name in key_names:
is_folder = key_name.endswith('/')
# strip leading and trailing slashes
key_name = key_name.lstrip('/').rstrip('/')
key_parts = key_name.split('/')
# Key names from the list function will include the account prefix
# and any folder namespace.
if key_parts[0] == self.prefix:
path = os.path.join(local_port_path, *key_parts[1:])
if not is_folder:
folder_path = os.path.join(local_port_path, *key_parts[1:-1])
get_key_name = key_name
else:
path = os.path.join(local_port_path, *key_parts)
if not is_folder:
folder_path = os.path.join(local_port_path, *key_parts[:-1])
get_key_name = '%s/%s' % (self.prefix, key_name)
if is_folder and not os.path.isdir(path):
# A directory that doesn't exist
os.makedirs(path)
else:
if not os.path.isdir(folder_path):
os.makedirs(folder_path)
# Assume it is a file
self.__download_file(path, get_key_name)
|
def download(self, local_port_path, key_names): # pragma: no cover
"""
download all files from a users account location
:param local_port_path: the local path where the data is to download to
:param key_name: can start with self.prefix or taken as relative to prefix.
Example:
local_port_path = /home/user/myworkflow/input_images/ (sync all data in this folder)
s3_folder = myworkflow/input_images/ (location on s3 that will be synced to local path)
"""
if not os.path.isdir(local_port_path):
raise ValueError("Download path does not exist: %s" % local_port_path)
if not isinstance(key_names, list):
key_names = [key_names]
for key_name in key_names:
is_folder = key_name.endswith('/')
# strip leading and trailing slashes
key_name = key_name.lstrip('/').rstrip('/')
key_parts = key_name.split('/')
# Key names from the list function will include the account prefix
# and any folder namespace.
if key_parts[0] == self.prefix:
path = os.path.join(local_port_path, *key_parts[1:])
if not is_folder:
folder_path = os.path.join(local_port_path, *key_parts[1:-1])
get_key_name = key_name
else:
path = os.path.join(local_port_path, *key_parts)
if not is_folder:
folder_path = os.path.join(local_port_path, *key_parts[:-1])
get_key_name = '%s/%s' % (self.prefix, key_name)
if is_folder and not os.path.isdir(path):
# A directory that doesn't exist
os.makedirs(path)
else:
if not os.path.isdir(folder_path):
os.makedirs(folder_path)
# Assume it is a file
self.__download_file(path, get_key_name)
|
[
"download",
"all",
"files",
"from",
"a",
"users",
"account",
"location",
":",
"param",
"local_port_path",
":",
"the",
"local",
"path",
"where",
"the",
"data",
"is",
"to",
"download",
"to",
":",
"param",
"key_name",
":",
"can",
"start",
"with",
"self",
".",
"prefix",
"or",
"taken",
"as",
"relative",
"to",
"prefix",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/services/account_storage_service.py#L95-L139
|
[
"def",
"download",
"(",
"self",
",",
"local_port_path",
",",
"key_names",
")",
":",
"# pragma: no cover",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"local_port_path",
")",
":",
"raise",
"ValueError",
"(",
"\"Download path does not exist: %s\"",
"%",
"local_port_path",
")",
"if",
"not",
"isinstance",
"(",
"key_names",
",",
"list",
")",
":",
"key_names",
"=",
"[",
"key_names",
"]",
"for",
"key_name",
"in",
"key_names",
":",
"is_folder",
"=",
"key_name",
".",
"endswith",
"(",
"'/'",
")",
"# strip leading and trailing slashes",
"key_name",
"=",
"key_name",
".",
"lstrip",
"(",
"'/'",
")",
".",
"rstrip",
"(",
"'/'",
")",
"key_parts",
"=",
"key_name",
".",
"split",
"(",
"'/'",
")",
"# Key names from the list function will include the account prefix",
"# and any folder namespace.",
"if",
"key_parts",
"[",
"0",
"]",
"==",
"self",
".",
"prefix",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"local_port_path",
",",
"*",
"key_parts",
"[",
"1",
":",
"]",
")",
"if",
"not",
"is_folder",
":",
"folder_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"local_port_path",
",",
"*",
"key_parts",
"[",
"1",
":",
"-",
"1",
"]",
")",
"get_key_name",
"=",
"key_name",
"else",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"local_port_path",
",",
"*",
"key_parts",
")",
"if",
"not",
"is_folder",
":",
"folder_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"local_port_path",
",",
"*",
"key_parts",
"[",
":",
"-",
"1",
"]",
")",
"get_key_name",
"=",
"'%s/%s'",
"%",
"(",
"self",
".",
"prefix",
",",
"key_name",
")",
"if",
"is_folder",
"and",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"# A directory that doesn't exist",
"os",
".",
"makedirs",
"(",
"path",
")",
"else",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"folder_path",
")",
":",
"os",
".",
"makedirs",
"(",
"folder_path",
")",
"# Assume it is a file",
"self",
".",
"__download_file",
"(",
"path",
",",
"get_key_name",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
AccountStorageService.list
|
Get a list of keys for the accounts
|
gbdx_cloud_harness/services/account_storage_service.py
|
def list(self, s3_folder='', full_key_data=False):
"""Get a list of keys for the accounts"""
if not s3_folder.startswith('/'):
s3_folder = '/' + s3_folder
s3_prefix = self.prefix + s3_folder
bucket_data = self.client.list_objects(Bucket=self.bucket, Prefix=s3_prefix)
if full_key_data:
return bucket_data['Contents']
else:
return [k['Key'] for k in bucket_data['Contents']]
|
def list(self, s3_folder='', full_key_data=False):
"""Get a list of keys for the accounts"""
if not s3_folder.startswith('/'):
s3_folder = '/' + s3_folder
s3_prefix = self.prefix + s3_folder
bucket_data = self.client.list_objects(Bucket=self.bucket, Prefix=s3_prefix)
if full_key_data:
return bucket_data['Contents']
else:
return [k['Key'] for k in bucket_data['Contents']]
|
[
"Get",
"a",
"list",
"of",
"keys",
"for",
"the",
"accounts"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/services/account_storage_service.py#L144-L156
|
[
"def",
"list",
"(",
"self",
",",
"s3_folder",
"=",
"''",
",",
"full_key_data",
"=",
"False",
")",
":",
"if",
"not",
"s3_folder",
".",
"startswith",
"(",
"'/'",
")",
":",
"s3_folder",
"=",
"'/'",
"+",
"s3_folder",
"s3_prefix",
"=",
"self",
".",
"prefix",
"+",
"s3_folder",
"bucket_data",
"=",
"self",
".",
"client",
".",
"list_objects",
"(",
"Bucket",
"=",
"self",
".",
"bucket",
",",
"Prefix",
"=",
"s3_prefix",
")",
"if",
"full_key_data",
":",
"return",
"bucket_data",
"[",
"'Contents'",
"]",
"else",
":",
"return",
"[",
"k",
"[",
"'Key'",
"]",
"for",
"k",
"in",
"bucket_data",
"[",
"'Contents'",
"]",
"]"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
Workflow._build_worklfow_json
|
Build a workflow definition from the cloud_harness task.
|
gbdx_cloud_harness/workflow.py
|
def _build_worklfow_json(self):
"""
Build a workflow definition from the cloud_harness task.
"""
wf_json = {'tasks': [], 'name': 'cloud-harness_%s' % str(uuid.uuid4())}
task_def = json.loads(self.task_template.json())
d = {
"name": task_def['name'],
"outputs": [],
"inputs": [],
"taskType": task_def['taskType']
}
# Add input ports
for port in self.task_template.input_ports:
port_value = port.value
if port_value is False:
port_value = 'false'
if port_value is True:
port_value = 'true'
d['inputs'].append({
"name": port._name,
"value": port_value
})
# Add output ports
for port in self.task_template.output_ports:
d['outputs'].append({
"name": port._name
})
# Add task to workflow
wf_json['tasks'].append(d)
# Add port to be saved
for port in self.task_template.output_ports:
# Add save data locations
if hasattr(port, 'stageToS3') and port.stageToS3:
save_location = '{customer_storage}/{run_name}/{port}'.format(
customer_storage=self.storage.location,
run_name=self.task_template.run_name,
port=port.name
)
new_task = dict(**self.STAGE_TO_S3)
new_task['inputs'] = [
{'name': 'data', 'source': '%s:%s' % (task_def['name'], port._name)},
{'name': 'destination', 'value': save_location}
]
wf_json['tasks'].append(new_task)
return wf_json
|
def _build_worklfow_json(self):
"""
Build a workflow definition from the cloud_harness task.
"""
wf_json = {'tasks': [], 'name': 'cloud-harness_%s' % str(uuid.uuid4())}
task_def = json.loads(self.task_template.json())
d = {
"name": task_def['name'],
"outputs": [],
"inputs": [],
"taskType": task_def['taskType']
}
# Add input ports
for port in self.task_template.input_ports:
port_value = port.value
if port_value is False:
port_value = 'false'
if port_value is True:
port_value = 'true'
d['inputs'].append({
"name": port._name,
"value": port_value
})
# Add output ports
for port in self.task_template.output_ports:
d['outputs'].append({
"name": port._name
})
# Add task to workflow
wf_json['tasks'].append(d)
# Add port to be saved
for port in self.task_template.output_ports:
# Add save data locations
if hasattr(port, 'stageToS3') and port.stageToS3:
save_location = '{customer_storage}/{run_name}/{port}'.format(
customer_storage=self.storage.location,
run_name=self.task_template.run_name,
port=port.name
)
new_task = dict(**self.STAGE_TO_S3)
new_task['inputs'] = [
{'name': 'data', 'source': '%s:%s' % (task_def['name'], port._name)},
{'name': 'destination', 'value': save_location}
]
wf_json['tasks'].append(new_task)
return wf_json
|
[
"Build",
"a",
"workflow",
"definition",
"from",
"the",
"cloud_harness",
"task",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/workflow.py#L44-L98
|
[
"def",
"_build_worklfow_json",
"(",
"self",
")",
":",
"wf_json",
"=",
"{",
"'tasks'",
":",
"[",
"]",
",",
"'name'",
":",
"'cloud-harness_%s'",
"%",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
"}",
"task_def",
"=",
"json",
".",
"loads",
"(",
"self",
".",
"task_template",
".",
"json",
"(",
")",
")",
"d",
"=",
"{",
"\"name\"",
":",
"task_def",
"[",
"'name'",
"]",
",",
"\"outputs\"",
":",
"[",
"]",
",",
"\"inputs\"",
":",
"[",
"]",
",",
"\"taskType\"",
":",
"task_def",
"[",
"'taskType'",
"]",
"}",
"# Add input ports",
"for",
"port",
"in",
"self",
".",
"task_template",
".",
"input_ports",
":",
"port_value",
"=",
"port",
".",
"value",
"if",
"port_value",
"is",
"False",
":",
"port_value",
"=",
"'false'",
"if",
"port_value",
"is",
"True",
":",
"port_value",
"=",
"'true'",
"d",
"[",
"'inputs'",
"]",
".",
"append",
"(",
"{",
"\"name\"",
":",
"port",
".",
"_name",
",",
"\"value\"",
":",
"port_value",
"}",
")",
"# Add output ports",
"for",
"port",
"in",
"self",
".",
"task_template",
".",
"output_ports",
":",
"d",
"[",
"'outputs'",
"]",
".",
"append",
"(",
"{",
"\"name\"",
":",
"port",
".",
"_name",
"}",
")",
"# Add task to workflow",
"wf_json",
"[",
"'tasks'",
"]",
".",
"append",
"(",
"d",
")",
"# Add port to be saved",
"for",
"port",
"in",
"self",
".",
"task_template",
".",
"output_ports",
":",
"# Add save data locations",
"if",
"hasattr",
"(",
"port",
",",
"'stageToS3'",
")",
"and",
"port",
".",
"stageToS3",
":",
"save_location",
"=",
"'{customer_storage}/{run_name}/{port}'",
".",
"format",
"(",
"customer_storage",
"=",
"self",
".",
"storage",
".",
"location",
",",
"run_name",
"=",
"self",
".",
"task_template",
".",
"run_name",
",",
"port",
"=",
"port",
".",
"name",
")",
"new_task",
"=",
"dict",
"(",
"*",
"*",
"self",
".",
"STAGE_TO_S3",
")",
"new_task",
"[",
"'inputs'",
"]",
"=",
"[",
"{",
"'name'",
":",
"'data'",
",",
"'source'",
":",
"'%s:%s'",
"%",
"(",
"task_def",
"[",
"'name'",
"]",
",",
"port",
".",
"_name",
")",
"}",
",",
"{",
"'name'",
":",
"'destination'",
",",
"'value'",
":",
"save_location",
"}",
"]",
"wf_json",
"[",
"'tasks'",
"]",
".",
"append",
"(",
"new_task",
")",
"return",
"wf_json"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
Workflow.execute
|
Execute the cloud_harness task.
|
gbdx_cloud_harness/workflow.py
|
def execute(self, override_wf_json=None):
"""
Execute the cloud_harness task.
"""
r = self.gbdx.post(
self.URL,
json=self.json if override_wf_json is None else override_wf_json
)
try:
r.raise_for_status()
except:
print("GBDX API Status Code: %s" % r.status_code)
print("GBDX API Response: %s" % r.text)
self.id = None
return
self.id = r.json()['id']
self._refresh_status()
|
def execute(self, override_wf_json=None):
"""
Execute the cloud_harness task.
"""
r = self.gbdx.post(
self.URL,
json=self.json if override_wf_json is None else override_wf_json
)
try:
r.raise_for_status()
except:
print("GBDX API Status Code: %s" % r.status_code)
print("GBDX API Response: %s" % r.text)
self.id = None
return
self.id = r.json()['id']
self._refresh_status()
|
[
"Execute",
"the",
"cloud_harness",
"task",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/workflow.py#L100-L118
|
[
"def",
"execute",
"(",
"self",
",",
"override_wf_json",
"=",
"None",
")",
":",
"r",
"=",
"self",
".",
"gbdx",
".",
"post",
"(",
"self",
".",
"URL",
",",
"json",
"=",
"self",
".",
"json",
"if",
"override_wf_json",
"is",
"None",
"else",
"override_wf_json",
")",
"try",
":",
"r",
".",
"raise_for_status",
"(",
")",
"except",
":",
"print",
"(",
"\"GBDX API Status Code: %s\"",
"%",
"r",
".",
"status_code",
")",
"print",
"(",
"\"GBDX API Response: %s\"",
"%",
"r",
".",
"text",
")",
"self",
".",
"id",
"=",
"None",
"return",
"self",
".",
"id",
"=",
"r",
".",
"json",
"(",
")",
"[",
"'id'",
"]",
"self",
".",
"_refresh_status",
"(",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
Workflow.monitor_run
|
Monitor the workflows events and display spinner while running.
:param workflow: the workflow object
|
gbdx_cloud_harness/workflow.py
|
def monitor_run(self): # pragma: no cover
"""
Monitor the workflows events and display spinner while running.
:param workflow: the workflow object
"""
spinner = itertools.cycle(['-', '/', '|', '\\'])
while not self.complete:
for i in xrange(300):
sys.stdout.write(spinner.next())
sys.stdout.flush()
sys.stdout.write('\b')
time.sleep(0.03)
if self.succeeded:
sys.stdout.write("\nWorkflow completed successfully\n")
return True
else:
sys.stdout.write("\nWorkflow failed: %s\n" % self.status)
return False
|
def monitor_run(self): # pragma: no cover
"""
Monitor the workflows events and display spinner while running.
:param workflow: the workflow object
"""
spinner = itertools.cycle(['-', '/', '|', '\\'])
while not self.complete:
for i in xrange(300):
sys.stdout.write(spinner.next())
sys.stdout.flush()
sys.stdout.write('\b')
time.sleep(0.03)
if self.succeeded:
sys.stdout.write("\nWorkflow completed successfully\n")
return True
else:
sys.stdout.write("\nWorkflow failed: %s\n" % self.status)
return False
|
[
"Monitor",
"the",
"workflows",
"events",
"and",
"display",
"spinner",
"while",
"running",
".",
":",
"param",
"workflow",
":",
"the",
"workflow",
"object"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/workflow.py#L134-L154
|
[
"def",
"monitor_run",
"(",
"self",
")",
":",
"# pragma: no cover",
"spinner",
"=",
"itertools",
".",
"cycle",
"(",
"[",
"'-'",
",",
"'/'",
",",
"'|'",
",",
"'\\\\'",
"]",
")",
"while",
"not",
"self",
".",
"complete",
":",
"for",
"i",
"in",
"xrange",
"(",
"300",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"spinner",
".",
"next",
"(",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\b'",
")",
"time",
".",
"sleep",
"(",
"0.03",
")",
"if",
"self",
".",
"succeeded",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"\\nWorkflow completed successfully\\n\"",
")",
"return",
"True",
"else",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"\\nWorkflow failed: %s\\n\"",
"%",
"self",
".",
"status",
")",
"return",
"False"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskTemplate.finalize
|
:param success_or_fail: string that is 'success' or 'fail'
:param message:
|
gbdx_task_template/base.py
|
def finalize(self, success_or_fail, message=''):
"""
:param success_or_fail: string that is 'success' or 'fail'
:param message:
"""
if not self.__remote_run:
return json.dumps({'status': success_or_fail, 'reason': message}, indent=4)
else:
super(TaskTemplate, self).finalize(success_or_fail, message)
|
def finalize(self, success_or_fail, message=''):
"""
:param success_or_fail: string that is 'success' or 'fail'
:param message:
"""
if not self.__remote_run:
return json.dumps({'status': success_or_fail, 'reason': message}, indent=4)
else:
super(TaskTemplate, self).finalize(success_or_fail, message)
|
[
":",
"param",
"success_or_fail",
":",
"string",
"that",
"is",
"success",
"or",
"fail",
":",
"param",
"message",
":"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/base.py#L45-L53
|
[
"def",
"finalize",
"(",
"self",
",",
"success_or_fail",
",",
"message",
"=",
"''",
")",
":",
"if",
"not",
"self",
".",
"__remote_run",
":",
"return",
"json",
".",
"dumps",
"(",
"{",
"'status'",
":",
"success_or_fail",
",",
"'reason'",
":",
"message",
"}",
",",
"indent",
"=",
"4",
")",
"else",
":",
"super",
"(",
"TaskTemplate",
",",
"self",
")",
".",
"finalize",
"(",
"success_or_fail",
",",
"message",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
TaskTemplate.check_and_create_outputs
|
Iterate through the task outputs.
Two scenarios:
- User is running locally, check that output folders exist.
- User is running remotely, when docker container runs filesystem, check that output folders exist.
- Else, do nothing.
:return: None
|
gbdx_task_template/base.py
|
def check_and_create_outputs(self):
"""
Iterate through the task outputs.
Two scenarios:
- User is running locally, check that output folders exist.
- User is running remotely, when docker container runs filesystem, check that output folders exist.
- Else, do nothing.
:return: None
"""
if self.task is None:
raise TaskTemplateError('A task must be initialized before running a TaskTemplate subclass.')
for output_port in self.task.output_ports:
# Make the dir
if output_port.type == 'directory':
try:
is_file = os.path.isabs(output_port.value) and not os.path.isfile(output_port.value)
is_remote = output_port.is_valid_s3_url(output_port.value)
except LocalPortValidationError:
is_file = False
is_remote = None
except RemotePortValidationError:
is_remote = False
self.logit.debug('Create Outputs: %s -> is_filesys %s, is_valid_s3_url %s' %
(output_port.name, is_file, is_remote))
if is_file and not is_remote:
try:
os.makedirs(output_port.value)
except OSError as e:
self.logit.exception(e)
if 'File exists' not in e.strerror:
raise e
|
def check_and_create_outputs(self):
"""
Iterate through the task outputs.
Two scenarios:
- User is running locally, check that output folders exist.
- User is running remotely, when docker container runs filesystem, check that output folders exist.
- Else, do nothing.
:return: None
"""
if self.task is None:
raise TaskTemplateError('A task must be initialized before running a TaskTemplate subclass.')
for output_port in self.task.output_ports:
# Make the dir
if output_port.type == 'directory':
try:
is_file = os.path.isabs(output_port.value) and not os.path.isfile(output_port.value)
is_remote = output_port.is_valid_s3_url(output_port.value)
except LocalPortValidationError:
is_file = False
is_remote = None
except RemotePortValidationError:
is_remote = False
self.logit.debug('Create Outputs: %s -> is_filesys %s, is_valid_s3_url %s' %
(output_port.name, is_file, is_remote))
if is_file and not is_remote:
try:
os.makedirs(output_port.value)
except OSError as e:
self.logit.exception(e)
if 'File exists' not in e.strerror:
raise e
|
[
"Iterate",
"through",
"the",
"task",
"outputs",
".",
"Two",
"scenarios",
":",
"-",
"User",
"is",
"running",
"locally",
"check",
"that",
"output",
"folders",
"exist",
".",
"-",
"User",
"is",
"running",
"remotely",
"when",
"docker",
"container",
"runs",
"filesystem",
"check",
"that",
"output",
"folders",
"exist",
".",
"-",
"Else",
"do",
"nothing",
".",
":",
"return",
":",
"None"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/base.py#L55-L88
|
[
"def",
"check_and_create_outputs",
"(",
"self",
")",
":",
"if",
"self",
".",
"task",
"is",
"None",
":",
"raise",
"TaskTemplateError",
"(",
"'A task must be initialized before running a TaskTemplate subclass.'",
")",
"for",
"output_port",
"in",
"self",
".",
"task",
".",
"output_ports",
":",
"# Make the dir",
"if",
"output_port",
".",
"type",
"==",
"'directory'",
":",
"try",
":",
"is_file",
"=",
"os",
".",
"path",
".",
"isabs",
"(",
"output_port",
".",
"value",
")",
"and",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"output_port",
".",
"value",
")",
"is_remote",
"=",
"output_port",
".",
"is_valid_s3_url",
"(",
"output_port",
".",
"value",
")",
"except",
"LocalPortValidationError",
":",
"is_file",
"=",
"False",
"is_remote",
"=",
"None",
"except",
"RemotePortValidationError",
":",
"is_remote",
"=",
"False",
"self",
".",
"logit",
".",
"debug",
"(",
"'Create Outputs: %s -> is_filesys %s, is_valid_s3_url %s'",
"%",
"(",
"output_port",
".",
"name",
",",
"is_file",
",",
"is_remote",
")",
")",
"if",
"is_file",
"and",
"not",
"is_remote",
":",
"try",
":",
"os",
".",
"makedirs",
"(",
"output_port",
".",
"value",
")",
"except",
"OSError",
"as",
"e",
":",
"self",
".",
"logit",
".",
"exception",
"(",
"e",
")",
"if",
"'File exists'",
"not",
"in",
"e",
".",
"strerror",
":",
"raise",
"e"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
PortService.upload_input_ports
|
Takes the workflow value for each port and does the following:
* If local filesystem -> Uploads locally files to s3.
S3 location will be as follows:
gbd-customer-data/<acct_id>/<workflow_name>/<task_name>/<port_name>/
* If S3 url -> do nothing.
:returns the update workflow with S3 urls.
|
gbdx_cloud_harness/services/port_service.py
|
def upload_input_ports(self, port_list=None, exclude_list=None):
"""
Takes the workflow value for each port and does the following:
* If local filesystem -> Uploads locally files to s3.
S3 location will be as follows:
gbd-customer-data/<acct_id>/<workflow_name>/<task_name>/<port_name>/
* If S3 url -> do nothing.
:returns the update workflow with S3 urls.
"""
input_ports = self._task.input_ports
for port in input_ports:
# If port list is not None, then only allow port names in the list
if port_list and port.name not in port_list:
continue
# Exclude ports as provided
if exclude_list and port.name in exclude_list:
continue
# port_value = port.get('value', None)
# Check if the port value is a valid file system location
if not port.value or not os.path.isabs(port.value) or not os.path.isdir(port.value):
continue
# The prefix for each key that is uploaded, not including the the acct id.
prefix = '{run_name}/{port}'.format(
run_name=self._task.run_name,
# task=self._task.name,
port=port.name
)
port_files = self._get_port_files(port.value, prefix)
# Update the port value with an S3 url
port.value = '%s/%s' % (self.s3_root, prefix)
if len(port_files) == 0:
printer('Port %s is empty, push to S3 skipped' % port.name)
else:
self.storage.upload(port_files)
printer('Port %s pushed to account storage, %s files' % (port.name, len(port_files)))
|
def upload_input_ports(self, port_list=None, exclude_list=None):
"""
Takes the workflow value for each port and does the following:
* If local filesystem -> Uploads locally files to s3.
S3 location will be as follows:
gbd-customer-data/<acct_id>/<workflow_name>/<task_name>/<port_name>/
* If S3 url -> do nothing.
:returns the update workflow with S3 urls.
"""
input_ports = self._task.input_ports
for port in input_ports:
# If port list is not None, then only allow port names in the list
if port_list and port.name not in port_list:
continue
# Exclude ports as provided
if exclude_list and port.name in exclude_list:
continue
# port_value = port.get('value', None)
# Check if the port value is a valid file system location
if not port.value or not os.path.isabs(port.value) or not os.path.isdir(port.value):
continue
# The prefix for each key that is uploaded, not including the the acct id.
prefix = '{run_name}/{port}'.format(
run_name=self._task.run_name,
# task=self._task.name,
port=port.name
)
port_files = self._get_port_files(port.value, prefix)
# Update the port value with an S3 url
port.value = '%s/%s' % (self.s3_root, prefix)
if len(port_files) == 0:
printer('Port %s is empty, push to S3 skipped' % port.name)
else:
self.storage.upload(port_files)
printer('Port %s pushed to account storage, %s files' % (port.name, len(port_files)))
|
[
"Takes",
"the",
"workflow",
"value",
"for",
"each",
"port",
"and",
"does",
"the",
"following",
":",
"*",
"If",
"local",
"filesystem",
"-",
">",
"Uploads",
"locally",
"files",
"to",
"s3",
".",
"S3",
"location",
"will",
"be",
"as",
"follows",
":",
"gbd",
"-",
"customer",
"-",
"data",
"/",
"<acct_id",
">",
"/",
"<workflow_name",
">",
"/",
"<task_name",
">",
"/",
"<port_name",
">",
"/",
"*",
"If",
"S3",
"url",
"-",
">",
"do",
"nothing",
".",
":",
"returns",
"the",
"update",
"workflow",
"with",
"S3",
"urls",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/services/port_service.py#L25-L69
|
[
"def",
"upload_input_ports",
"(",
"self",
",",
"port_list",
"=",
"None",
",",
"exclude_list",
"=",
"None",
")",
":",
"input_ports",
"=",
"self",
".",
"_task",
".",
"input_ports",
"for",
"port",
"in",
"input_ports",
":",
"# If port list is not None, then only allow port names in the list",
"if",
"port_list",
"and",
"port",
".",
"name",
"not",
"in",
"port_list",
":",
"continue",
"# Exclude ports as provided",
"if",
"exclude_list",
"and",
"port",
".",
"name",
"in",
"exclude_list",
":",
"continue",
"# port_value = port.get('value', None)",
"# Check if the port value is a valid file system location",
"if",
"not",
"port",
".",
"value",
"or",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"port",
".",
"value",
")",
"or",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"port",
".",
"value",
")",
":",
"continue",
"# The prefix for each key that is uploaded, not including the the acct id.",
"prefix",
"=",
"'{run_name}/{port}'",
".",
"format",
"(",
"run_name",
"=",
"self",
".",
"_task",
".",
"run_name",
",",
"# task=self._task.name,",
"port",
"=",
"port",
".",
"name",
")",
"port_files",
"=",
"self",
".",
"_get_port_files",
"(",
"port",
".",
"value",
",",
"prefix",
")",
"# Update the port value with an S3 url",
"port",
".",
"value",
"=",
"'%s/%s'",
"%",
"(",
"self",
".",
"s3_root",
",",
"prefix",
")",
"if",
"len",
"(",
"port_files",
")",
"==",
"0",
":",
"printer",
"(",
"'Port %s is empty, push to S3 skipped'",
"%",
"port",
".",
"name",
")",
"else",
":",
"self",
".",
"storage",
".",
"upload",
"(",
"port_files",
")",
"printer",
"(",
"'Port %s pushed to account storage, %s files'",
"%",
"(",
"port",
".",
"name",
",",
"len",
"(",
"port_files",
")",
")",
")"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
PortService._get_port_files
|
Find files for the local_path and return tuples of filename and keynames
:param local_path: the local path to search for files
:param prefix: the S3 prefix for each key name on S3
|
gbdx_cloud_harness/services/port_service.py
|
def _get_port_files(local_path, prefix):
"""
Find files for the local_path and return tuples of filename and keynames
:param local_path: the local path to search for files
:param prefix: the S3 prefix for each key name on S3
"""
source_files = []
for root, dirs, files in os.walk(local_path, topdown=False):
for name in files:
fname = os.path.join(root, name)
key_name = '%s/%s' % (prefix, fname[len(local_path) + 1:])
source_files.append((fname, key_name))
return source_files
|
def _get_port_files(local_path, prefix):
"""
Find files for the local_path and return tuples of filename and keynames
:param local_path: the local path to search for files
:param prefix: the S3 prefix for each key name on S3
"""
source_files = []
for root, dirs, files in os.walk(local_path, topdown=False):
for name in files:
fname = os.path.join(root, name)
key_name = '%s/%s' % (prefix, fname[len(local_path) + 1:])
source_files.append((fname, key_name))
return source_files
|
[
"Find",
"files",
"for",
"the",
"local_path",
"and",
"return",
"tuples",
"of",
"filename",
"and",
"keynames",
":",
"param",
"local_path",
":",
"the",
"local",
"path",
"to",
"search",
"for",
"files",
":",
"param",
"prefix",
":",
"the",
"S3",
"prefix",
"for",
"each",
"key",
"name",
"on",
"S3"
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_cloud_harness/services/port_service.py#L72-L89
|
[
"def",
"_get_port_files",
"(",
"local_path",
",",
"prefix",
")",
":",
"source_files",
"=",
"[",
"]",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"local_path",
",",
"topdown",
"=",
"False",
")",
":",
"for",
"name",
"in",
"files",
":",
"fname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"name",
")",
"key_name",
"=",
"'%s/%s'",
"%",
"(",
"prefix",
",",
"fname",
"[",
"len",
"(",
"local_path",
")",
"+",
"1",
":",
"]",
")",
"source_files",
".",
"append",
"(",
"(",
"fname",
",",
"key_name",
")",
")",
"return",
"source_files"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
archive
|
Move an active project to the archive.
|
proj/__init__.py
|
def archive(folder, dry_run=False):
"Move an active project to the archive."
# error handling on archive_dir already done in main()
for f in folder:
if not os.path.exists(f):
bail('folder does not exist: ' + f)
_archive_safe(folder, PROJ_ARCHIVE, dry_run=dry_run)
|
def archive(folder, dry_run=False):
"Move an active project to the archive."
# error handling on archive_dir already done in main()
for f in folder:
if not os.path.exists(f):
bail('folder does not exist: ' + f)
_archive_safe(folder, PROJ_ARCHIVE, dry_run=dry_run)
|
[
"Move",
"an",
"active",
"project",
"to",
"the",
"archive",
"."
] |
larsyencken/proj
|
python
|
https://github.com/larsyencken/proj/blob/44fd72aeb9bbf72046d81c4e9e4306a23335dc0a/proj/__init__.py#L55-L63
|
[
"def",
"archive",
"(",
"folder",
",",
"dry_run",
"=",
"False",
")",
":",
"# error handling on archive_dir already done in main()",
"for",
"f",
"in",
"folder",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"f",
")",
":",
"bail",
"(",
"'folder does not exist: '",
"+",
"f",
")",
"_archive_safe",
"(",
"folder",
",",
"PROJ_ARCHIVE",
",",
"dry_run",
"=",
"dry_run",
")"
] |
44fd72aeb9bbf72046d81c4e9e4306a23335dc0a
|
test
|
_mkdir
|
The equivalent of 'mkdir -p' in shell.
|
proj/__init__.py
|
def _mkdir(p):
"The equivalent of 'mkdir -p' in shell."
isdir = os.path.isdir
stack = [os.path.abspath(p)]
while not isdir(stack[-1]):
parent_dir = os.path.dirname(stack[-1])
stack.append(parent_dir)
while stack:
p = stack.pop()
if not isdir(p):
os.mkdir(p)
|
def _mkdir(p):
"The equivalent of 'mkdir -p' in shell."
isdir = os.path.isdir
stack = [os.path.abspath(p)]
while not isdir(stack[-1]):
parent_dir = os.path.dirname(stack[-1])
stack.append(parent_dir)
while stack:
p = stack.pop()
if not isdir(p):
os.mkdir(p)
|
[
"The",
"equivalent",
"of",
"mkdir",
"-",
"p",
"in",
"shell",
"."
] |
larsyencken/proj
|
python
|
https://github.com/larsyencken/proj/blob/44fd72aeb9bbf72046d81c4e9e4306a23335dc0a/proj/__init__.py#L106-L118
|
[
"def",
"_mkdir",
"(",
"p",
")",
":",
"isdir",
"=",
"os",
".",
"path",
".",
"isdir",
"stack",
"=",
"[",
"os",
".",
"path",
".",
"abspath",
"(",
"p",
")",
"]",
"while",
"not",
"isdir",
"(",
"stack",
"[",
"-",
"1",
"]",
")",
":",
"parent_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"stack",
"[",
"-",
"1",
"]",
")",
"stack",
".",
"append",
"(",
"parent_dir",
")",
"while",
"stack",
":",
"p",
"=",
"stack",
".",
"pop",
"(",
")",
"if",
"not",
"isdir",
"(",
"p",
")",
":",
"os",
".",
"mkdir",
"(",
"p",
")"
] |
44fd72aeb9bbf72046d81c4e9e4306a23335dc0a
|
test
|
list
|
List the contents of the archive directory.
|
proj/__init__.py
|
def list(pattern=()):
"List the contents of the archive directory."
# strategy: pick the intersection of all the patterns the user provides
globs = ['*{0}*'.format(p) for p in pattern] + ['*']
matches = []
offset = len(PROJ_ARCHIVE) + 1
for suffix in globs:
glob_pattern = os.path.join(PROJ_ARCHIVE, '*', '*', suffix)
matches.append(set(
f[offset:] for f in glob.glob(glob_pattern)
))
matches = reduce(lambda x, y: x.intersection(y),
matches)
for m in sorted(matches):
print(m)
|
def list(pattern=()):
"List the contents of the archive directory."
# strategy: pick the intersection of all the patterns the user provides
globs = ['*{0}*'.format(p) for p in pattern] + ['*']
matches = []
offset = len(PROJ_ARCHIVE) + 1
for suffix in globs:
glob_pattern = os.path.join(PROJ_ARCHIVE, '*', '*', suffix)
matches.append(set(
f[offset:] for f in glob.glob(glob_pattern)
))
matches = reduce(lambda x, y: x.intersection(y),
matches)
for m in sorted(matches):
print(m)
|
[
"List",
"the",
"contents",
"of",
"the",
"archive",
"directory",
"."
] |
larsyencken/proj
|
python
|
https://github.com/larsyencken/proj/blob/44fd72aeb9bbf72046d81c4e9e4306a23335dc0a/proj/__init__.py#L123-L140
|
[
"def",
"list",
"(",
"pattern",
"=",
"(",
")",
")",
":",
"# strategy: pick the intersection of all the patterns the user provides",
"globs",
"=",
"[",
"'*{0}*'",
".",
"format",
"(",
"p",
")",
"for",
"p",
"in",
"pattern",
"]",
"+",
"[",
"'*'",
"]",
"matches",
"=",
"[",
"]",
"offset",
"=",
"len",
"(",
"PROJ_ARCHIVE",
")",
"+",
"1",
"for",
"suffix",
"in",
"globs",
":",
"glob_pattern",
"=",
"os",
".",
"path",
".",
"join",
"(",
"PROJ_ARCHIVE",
",",
"'*'",
",",
"'*'",
",",
"suffix",
")",
"matches",
".",
"append",
"(",
"set",
"(",
"f",
"[",
"offset",
":",
"]",
"for",
"f",
"in",
"glob",
".",
"glob",
"(",
"glob_pattern",
")",
")",
")",
"matches",
"=",
"reduce",
"(",
"lambda",
"x",
",",
"y",
":",
"x",
".",
"intersection",
"(",
"y",
")",
",",
"matches",
")",
"for",
"m",
"in",
"sorted",
"(",
"matches",
")",
":",
"print",
"(",
"m",
")"
] |
44fd72aeb9bbf72046d81c4e9e4306a23335dc0a
|
test
|
restore
|
Restore a project from the archive.
|
proj/__init__.py
|
def restore(folder):
"Restore a project from the archive."
if os.path.isdir(folder):
bail('a folder of the same name already exists!')
pattern = os.path.join(PROJ_ARCHIVE, '*', '*', folder)
matches = glob.glob(pattern)
if not matches:
bail('no project matches: ' + folder)
if len(matches) > 1:
print('Warning: multiple matches, picking the most recent',
file=sys.stderr)
source = sorted(matches)[-1]
print(source, '-->', folder)
shutil.move(source, '.')
|
def restore(folder):
"Restore a project from the archive."
if os.path.isdir(folder):
bail('a folder of the same name already exists!')
pattern = os.path.join(PROJ_ARCHIVE, '*', '*', folder)
matches = glob.glob(pattern)
if not matches:
bail('no project matches: ' + folder)
if len(matches) > 1:
print('Warning: multiple matches, picking the most recent',
file=sys.stderr)
source = sorted(matches)[-1]
print(source, '-->', folder)
shutil.move(source, '.')
|
[
"Restore",
"a",
"project",
"from",
"the",
"archive",
"."
] |
larsyencken/proj
|
python
|
https://github.com/larsyencken/proj/blob/44fd72aeb9bbf72046d81c4e9e4306a23335dc0a/proj/__init__.py#L145-L161
|
[
"def",
"restore",
"(",
"folder",
")",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"folder",
")",
":",
"bail",
"(",
"'a folder of the same name already exists!'",
")",
"pattern",
"=",
"os",
".",
"path",
".",
"join",
"(",
"PROJ_ARCHIVE",
",",
"'*'",
",",
"'*'",
",",
"folder",
")",
"matches",
"=",
"glob",
".",
"glob",
"(",
"pattern",
")",
"if",
"not",
"matches",
":",
"bail",
"(",
"'no project matches: '",
"+",
"folder",
")",
"if",
"len",
"(",
"matches",
")",
">",
"1",
":",
"print",
"(",
"'Warning: multiple matches, picking the most recent'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"source",
"=",
"sorted",
"(",
"matches",
")",
"[",
"-",
"1",
"]",
"print",
"(",
"source",
",",
"'-->'",
",",
"folder",
")",
"shutil",
".",
"move",
"(",
"source",
",",
"'.'",
")"
] |
44fd72aeb9bbf72046d81c4e9e4306a23335dc0a
|
test
|
Client.new
|
Create new storage service client.
Arguments:
environment(str): The service environment to be used for the client.
'prod' or 'dev'.
access_token(str): The access token used to authenticate with the
service
Returns:
A storage_service.Client instance
|
hbp_service_client/storage_service/client.py
|
def new(cls, access_token, environment='prod'):
'''Create new storage service client.
Arguments:
environment(str): The service environment to be used for the client.
'prod' or 'dev'.
access_token(str): The access token used to authenticate with the
service
Returns:
A storage_service.Client instance
'''
api_client = ApiClient.new(access_token, environment)
return cls(api_client)
|
def new(cls, access_token, environment='prod'):
'''Create new storage service client.
Arguments:
environment(str): The service environment to be used for the client.
'prod' or 'dev'.
access_token(str): The access token used to authenticate with the
service
Returns:
A storage_service.Client instance
'''
api_client = ApiClient.new(access_token, environment)
return cls(api_client)
|
[
"Create",
"new",
"storage",
"service",
"client",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L34-L48
|
[
"def",
"new",
"(",
"cls",
",",
"access_token",
",",
"environment",
"=",
"'prod'",
")",
":",
"api_client",
"=",
"ApiClient",
".",
"new",
"(",
"access_token",
",",
"environment",
")",
"return",
"cls",
"(",
"api_client",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.list
|
List the entities found directly under the given path.
Args:
path (str): The path of the entity to be listed. Must start with a '/'.
Returns:
The list of entity names directly under the given path:
u'/12345/folder_1'
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/client.py
|
def list(self, path):
'''List the entities found directly under the given path.
Args:
path (str): The path of the entity to be listed. Must start with a '/'.
Returns:
The list of entity names directly under the given path:
u'/12345/folder_1'
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path)
entity = self.api_client.get_entity_by_query(path=path)
if entity['entity_type'] not in self.__BROWSABLE_TYPES:
raise StorageArgumentException('The entity type "{0}" cannot be'
'listed'.format(entity['entity_type']))
entity_uuid = entity['uuid']
file_names = []
# get files
more_pages = True
page_number = 1
while more_pages:
response = self.api_client.list_folder_content(
entity_uuid, page=page_number, ordering='name')
more_pages = response['next'] is not None
page_number += 1
for child in response['results']:
pattern = '/{name}' if child['entity_type'] == 'folder' else '{name}'
file_names.append(pattern.format(name=child['name']))
return file_names
|
def list(self, path):
'''List the entities found directly under the given path.
Args:
path (str): The path of the entity to be listed. Must start with a '/'.
Returns:
The list of entity names directly under the given path:
u'/12345/folder_1'
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path)
entity = self.api_client.get_entity_by_query(path=path)
if entity['entity_type'] not in self.__BROWSABLE_TYPES:
raise StorageArgumentException('The entity type "{0}" cannot be'
'listed'.format(entity['entity_type']))
entity_uuid = entity['uuid']
file_names = []
# get files
more_pages = True
page_number = 1
while more_pages:
response = self.api_client.list_folder_content(
entity_uuid, page=page_number, ordering='name')
more_pages = response['next'] is not None
page_number += 1
for child in response['results']:
pattern = '/{name}' if child['entity_type'] == 'folder' else '{name}'
file_names.append(pattern.format(name=child['name']))
return file_names
|
[
"List",
"the",
"entities",
"found",
"directly",
"under",
"the",
"given",
"path",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L50-L88
|
[
"def",
"list",
"(",
"self",
",",
"path",
")",
":",
"self",
".",
"__validate_storage_path",
"(",
"path",
")",
"entity",
"=",
"self",
".",
"api_client",
".",
"get_entity_by_query",
"(",
"path",
"=",
"path",
")",
"if",
"entity",
"[",
"'entity_type'",
"]",
"not",
"in",
"self",
".",
"__BROWSABLE_TYPES",
":",
"raise",
"StorageArgumentException",
"(",
"'The entity type \"{0}\" cannot be'",
"'listed'",
".",
"format",
"(",
"entity",
"[",
"'entity_type'",
"]",
")",
")",
"entity_uuid",
"=",
"entity",
"[",
"'uuid'",
"]",
"file_names",
"=",
"[",
"]",
"# get files",
"more_pages",
"=",
"True",
"page_number",
"=",
"1",
"while",
"more_pages",
":",
"response",
"=",
"self",
".",
"api_client",
".",
"list_folder_content",
"(",
"entity_uuid",
",",
"page",
"=",
"page_number",
",",
"ordering",
"=",
"'name'",
")",
"more_pages",
"=",
"response",
"[",
"'next'",
"]",
"is",
"not",
"None",
"page_number",
"+=",
"1",
"for",
"child",
"in",
"response",
"[",
"'results'",
"]",
":",
"pattern",
"=",
"'/{name}'",
"if",
"child",
"[",
"'entity_type'",
"]",
"==",
"'folder'",
"else",
"'{name}'",
"file_names",
".",
"append",
"(",
"pattern",
".",
"format",
"(",
"name",
"=",
"child",
"[",
"'name'",
"]",
")",
")",
"return",
"file_names"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.download_file
|
Download a file from storage service to local disk.
Existing files on the target path will be overwritten.
The download is not recursive, as it only works on files.
Args:
path (str): The path of the entity to be downloaded. Must start with a '/'.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/client.py
|
def download_file(self, path, target_path):
'''Download a file from storage service to local disk.
Existing files on the target path will be overwritten.
The download is not recursive, as it only works on files.
Args:
path (str): The path of the entity to be downloaded. Must start with a '/'.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path)
entity = self.api_client.get_entity_by_query(path=path)
if entity['entity_type'] != 'file':
raise StorageArgumentException('Only file entities can be downloaded')
signed_url = self.api_client.get_signed_url(entity['uuid'])
response = self.api_client.download_signed_url(signed_url)
with open(target_path, "wb") as output:
for chunk in response.iter_content(chunk_size=1024):
output.write(chunk)
|
def download_file(self, path, target_path):
'''Download a file from storage service to local disk.
Existing files on the target path will be overwritten.
The download is not recursive, as it only works on files.
Args:
path (str): The path of the entity to be downloaded. Must start with a '/'.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path)
entity = self.api_client.get_entity_by_query(path=path)
if entity['entity_type'] != 'file':
raise StorageArgumentException('Only file entities can be downloaded')
signed_url = self.api_client.get_signed_url(entity['uuid'])
response = self.api_client.download_signed_url(signed_url)
with open(target_path, "wb") as output:
for chunk in response.iter_content(chunk_size=1024):
output.write(chunk)
|
[
"Download",
"a",
"file",
"from",
"storage",
"service",
"to",
"local",
"disk",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L90-L119
|
[
"def",
"download_file",
"(",
"self",
",",
"path",
",",
"target_path",
")",
":",
"self",
".",
"__validate_storage_path",
"(",
"path",
")",
"entity",
"=",
"self",
".",
"api_client",
".",
"get_entity_by_query",
"(",
"path",
"=",
"path",
")",
"if",
"entity",
"[",
"'entity_type'",
"]",
"!=",
"'file'",
":",
"raise",
"StorageArgumentException",
"(",
"'Only file entities can be downloaded'",
")",
"signed_url",
"=",
"self",
".",
"api_client",
".",
"get_signed_url",
"(",
"entity",
"[",
"'uuid'",
"]",
")",
"response",
"=",
"self",
".",
"api_client",
".",
"download_signed_url",
"(",
"signed_url",
")",
"with",
"open",
"(",
"target_path",
",",
"\"wb\"",
")",
"as",
"output",
":",
"for",
"chunk",
"in",
"response",
".",
"iter_content",
"(",
"chunk_size",
"=",
"1024",
")",
":",
"output",
".",
"write",
"(",
"chunk",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.exists
|
Check if a certain path exists in the storage service.
Args:
path (str): The path to be checked
Returns:
True if the path exists, False otherwise
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/client.py
|
def exists(self, path):
'''Check if a certain path exists in the storage service.
Args:
path (str): The path to be checked
Returns:
True if the path exists, False otherwise
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path)
try:
metadata = self.api_client.get_entity_by_query(path=path)
except StorageNotFoundException:
return False
return metadata and 'uuid' in metadata
|
def exists(self, path):
'''Check if a certain path exists in the storage service.
Args:
path (str): The path to be checked
Returns:
True if the path exists, False otherwise
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path)
try:
metadata = self.api_client.get_entity_by_query(path=path)
except StorageNotFoundException:
return False
return metadata and 'uuid' in metadata
|
[
"Check",
"if",
"a",
"certain",
"path",
"exists",
"in",
"the",
"storage",
"service",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L121-L143
|
[
"def",
"exists",
"(",
"self",
",",
"path",
")",
":",
"self",
".",
"__validate_storage_path",
"(",
"path",
")",
"try",
":",
"metadata",
"=",
"self",
".",
"api_client",
".",
"get_entity_by_query",
"(",
"path",
"=",
"path",
")",
"except",
"StorageNotFoundException",
":",
"return",
"False",
"return",
"metadata",
"and",
"'uuid'",
"in",
"metadata"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.get_parent
|
Get the parent entity of the entity pointed by the given path.
Args:
path (str): The path of the entity whose parent is needed
Returns:
A JSON object of the parent entity if found.
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/client.py
|
def get_parent(self, path):
'''Get the parent entity of the entity pointed by the given path.
Args:
path (str): The path of the entity whose parent is needed
Returns:
A JSON object of the parent entity if found.
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path, projects_allowed=False)
path_steps = [step for step in path.split('/') if step]
del path_steps[-1]
parent_path = '/{0}'.format('/'.join(path_steps))
return self.api_client.get_entity_by_query(path=parent_path)
|
def get_parent(self, path):
'''Get the parent entity of the entity pointed by the given path.
Args:
path (str): The path of the entity whose parent is needed
Returns:
A JSON object of the parent entity if found.
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path, projects_allowed=False)
path_steps = [step for step in path.split('/') if step]
del path_steps[-1]
parent_path = '/{0}'.format('/'.join(path_steps))
return self.api_client.get_entity_by_query(path=parent_path)
|
[
"Get",
"the",
"parent",
"entity",
"of",
"the",
"entity",
"pointed",
"by",
"the",
"given",
"path",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L145-L165
|
[
"def",
"get_parent",
"(",
"self",
",",
"path",
")",
":",
"self",
".",
"__validate_storage_path",
"(",
"path",
",",
"projects_allowed",
"=",
"False",
")",
"path_steps",
"=",
"[",
"step",
"for",
"step",
"in",
"path",
".",
"split",
"(",
"'/'",
")",
"if",
"step",
"]",
"del",
"path_steps",
"[",
"-",
"1",
"]",
"parent_path",
"=",
"'/{0}'",
".",
"format",
"(",
"'/'",
".",
"join",
"(",
"path_steps",
")",
")",
"return",
"self",
".",
"api_client",
".",
"get_entity_by_query",
"(",
"path",
"=",
"parent_path",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.mkdir
|
Create a folder in the storage service pointed by the given path.
Args:
path (str): The path of the folder to be created
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/client.py
|
def mkdir(self, path):
'''Create a folder in the storage service pointed by the given path.
Args:
path (str): The path of the folder to be created
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path, projects_allowed=False)
parent_metadata = self.get_parent(path)
self.api_client.create_folder(path.split('/')[-1], parent_metadata['uuid'])
|
def mkdir(self, path):
'''Create a folder in the storage service pointed by the given path.
Args:
path (str): The path of the folder to be created
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path, projects_allowed=False)
parent_metadata = self.get_parent(path)
self.api_client.create_folder(path.split('/')[-1], parent_metadata['uuid'])
|
[
"Create",
"a",
"folder",
"in",
"the",
"storage",
"service",
"pointed",
"by",
"the",
"given",
"path",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L167-L185
|
[
"def",
"mkdir",
"(",
"self",
",",
"path",
")",
":",
"self",
".",
"__validate_storage_path",
"(",
"path",
",",
"projects_allowed",
"=",
"False",
")",
"parent_metadata",
"=",
"self",
".",
"get_parent",
"(",
"path",
")",
"self",
".",
"api_client",
".",
"create_folder",
"(",
"path",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
",",
"parent_metadata",
"[",
"'uuid'",
"]",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.upload_file
|
Upload local file content to a storage service destination folder.
Args:
local_file(str)
dest_path(str):
absolute Storage service path '/project' prefix is essential
suffix should be the name the file will have on in the destination folder
i.e.: /project/folder/.../file_name
mimetype(str): set the contentType attribute
Returns:
The uuid of created file entity as string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/client.py
|
def upload_file(self, local_file, dest_path, mimetype):
'''Upload local file content to a storage service destination folder.
Args:
local_file(str)
dest_path(str):
absolute Storage service path '/project' prefix is essential
suffix should be the name the file will have on in the destination folder
i.e.: /project/folder/.../file_name
mimetype(str): set the contentType attribute
Returns:
The uuid of created file entity as string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(dest_path)
# get the paths of the target dir and the target file name
if dest_path.endswith('/'):
raise StorageArgumentException('Must specify target file name in dest_path argument')
if local_file.endswith(os.path.sep):
raise StorageArgumentException('Must specify source file name in local_file'
' argument, directory upload not supported')
# create the file container
new_file = self.api_client.create_file(
name=dest_path.split('/').pop(),
content_type=mimetype,
parent=self.get_parent(dest_path)['uuid']
)
etag = self.api_client.upload_file_content(new_file['uuid'], source=local_file)
new_file['etag'] = etag
return new_file
|
def upload_file(self, local_file, dest_path, mimetype):
'''Upload local file content to a storage service destination folder.
Args:
local_file(str)
dest_path(str):
absolute Storage service path '/project' prefix is essential
suffix should be the name the file will have on in the destination folder
i.e.: /project/folder/.../file_name
mimetype(str): set the contentType attribute
Returns:
The uuid of created file entity as string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(dest_path)
# get the paths of the target dir and the target file name
if dest_path.endswith('/'):
raise StorageArgumentException('Must specify target file name in dest_path argument')
if local_file.endswith(os.path.sep):
raise StorageArgumentException('Must specify source file name in local_file'
' argument, directory upload not supported')
# create the file container
new_file = self.api_client.create_file(
name=dest_path.split('/').pop(),
content_type=mimetype,
parent=self.get_parent(dest_path)['uuid']
)
etag = self.api_client.upload_file_content(new_file['uuid'], source=local_file)
new_file['etag'] = etag
return new_file
|
[
"Upload",
"local",
"file",
"content",
"to",
"a",
"storage",
"service",
"destination",
"folder",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L189-L228
|
[
"def",
"upload_file",
"(",
"self",
",",
"local_file",
",",
"dest_path",
",",
"mimetype",
")",
":",
"self",
".",
"__validate_storage_path",
"(",
"dest_path",
")",
"# get the paths of the target dir and the target file name",
"if",
"dest_path",
".",
"endswith",
"(",
"'/'",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Must specify target file name in dest_path argument'",
")",
"if",
"local_file",
".",
"endswith",
"(",
"os",
".",
"path",
".",
"sep",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Must specify source file name in local_file'",
"' argument, directory upload not supported'",
")",
"# create the file container",
"new_file",
"=",
"self",
".",
"api_client",
".",
"create_file",
"(",
"name",
"=",
"dest_path",
".",
"split",
"(",
"'/'",
")",
".",
"pop",
"(",
")",
",",
"content_type",
"=",
"mimetype",
",",
"parent",
"=",
"self",
".",
"get_parent",
"(",
"dest_path",
")",
"[",
"'uuid'",
"]",
")",
"etag",
"=",
"self",
".",
"api_client",
".",
"upload_file_content",
"(",
"new_file",
"[",
"'uuid'",
"]",
",",
"source",
"=",
"local_file",
")",
"new_file",
"[",
"'etag'",
"]",
"=",
"etag",
"return",
"new_file"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.delete
|
Delete an entity from the storage service using its path.
Args:
path(str): The path of the entity to be delete
Returns:
The uuid of created file entity as string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/client.py
|
def delete(self, path):
''' Delete an entity from the storage service using its path.
Args:
path(str): The path of the entity to be delete
Returns:
The uuid of created file entity as string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path, projects_allowed=False)
entity = self.api_client.get_entity_by_query(path=path)
if entity['entity_type'] in self.__BROWSABLE_TYPES:
# At this point it can only be a folder
contents = self.api_client.list_folder_content(entity['uuid'])
if contents['count'] > 0:
raise StorageArgumentException(
'This method cannot delete non-empty folder. Please empty the folder first.')
self.api_client.delete_folder(entity['uuid'])
elif entity['entity_type'] == 'file':
self.api_client.delete_file(entity['uuid'])
|
def delete(self, path):
''' Delete an entity from the storage service using its path.
Args:
path(str): The path of the entity to be delete
Returns:
The uuid of created file entity as string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
self.__validate_storage_path(path, projects_allowed=False)
entity = self.api_client.get_entity_by_query(path=path)
if entity['entity_type'] in self.__BROWSABLE_TYPES:
# At this point it can only be a folder
contents = self.api_client.list_folder_content(entity['uuid'])
if contents['count'] > 0:
raise StorageArgumentException(
'This method cannot delete non-empty folder. Please empty the folder first.')
self.api_client.delete_folder(entity['uuid'])
elif entity['entity_type'] == 'file':
self.api_client.delete_file(entity['uuid'])
|
[
"Delete",
"an",
"entity",
"from",
"the",
"storage",
"service",
"using",
"its",
"path",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L230-L259
|
[
"def",
"delete",
"(",
"self",
",",
"path",
")",
":",
"self",
".",
"__validate_storage_path",
"(",
"path",
",",
"projects_allowed",
"=",
"False",
")",
"entity",
"=",
"self",
".",
"api_client",
".",
"get_entity_by_query",
"(",
"path",
"=",
"path",
")",
"if",
"entity",
"[",
"'entity_type'",
"]",
"in",
"self",
".",
"__BROWSABLE_TYPES",
":",
"# At this point it can only be a folder",
"contents",
"=",
"self",
".",
"api_client",
".",
"list_folder_content",
"(",
"entity",
"[",
"'uuid'",
"]",
")",
"if",
"contents",
"[",
"'count'",
"]",
">",
"0",
":",
"raise",
"StorageArgumentException",
"(",
"'This method cannot delete non-empty folder. Please empty the folder first.'",
")",
"self",
".",
"api_client",
".",
"delete_folder",
"(",
"entity",
"[",
"'uuid'",
"]",
")",
"elif",
"entity",
"[",
"'entity_type'",
"]",
"==",
"'file'",
":",
"self",
".",
"api_client",
".",
"delete_file",
"(",
"entity",
"[",
"'uuid'",
"]",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Client.__validate_storage_path
|
Validate a string as a valid storage path
|
hbp_service_client/storage_service/client.py
|
def __validate_storage_path(cls, path, projects_allowed=True):
'''Validate a string as a valid storage path'''
if not path or not isinstance(path, str) or path[0] != '/' or path == '/':
raise StorageArgumentException(
'The path must be a string, start with a slash (/), and be longer'
' than 1 character.')
if not projects_allowed and len([elem for elem in path.split('/') if elem]) == 1:
raise StorageArgumentException(
'This method does not accept projects in the path.')
|
def __validate_storage_path(cls, path, projects_allowed=True):
'''Validate a string as a valid storage path'''
if not path or not isinstance(path, str) or path[0] != '/' or path == '/':
raise StorageArgumentException(
'The path must be a string, start with a slash (/), and be longer'
' than 1 character.')
if not projects_allowed and len([elem for elem in path.split('/') if elem]) == 1:
raise StorageArgumentException(
'This method does not accept projects in the path.')
|
[
"Validate",
"a",
"string",
"as",
"a",
"valid",
"storage",
"path"
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/client.py#L262-L271
|
[
"def",
"__validate_storage_path",
"(",
"cls",
",",
"path",
",",
"projects_allowed",
"=",
"True",
")",
":",
"if",
"not",
"path",
"or",
"not",
"isinstance",
"(",
"path",
",",
"str",
")",
"or",
"path",
"[",
"0",
"]",
"!=",
"'/'",
"or",
"path",
"==",
"'/'",
":",
"raise",
"StorageArgumentException",
"(",
"'The path must be a string, start with a slash (/), and be longer'",
"' than 1 character.'",
")",
"if",
"not",
"projects_allowed",
"and",
"len",
"(",
"[",
"elem",
"for",
"elem",
"in",
"path",
".",
"split",
"(",
"'/'",
")",
"if",
"elem",
"]",
")",
"==",
"1",
":",
"raise",
"StorageArgumentException",
"(",
"'This method does not accept projects in the path.'",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
Task.is_valid
|
Check cloud-harness code is valid. task schema validation is
left to the API endpoint.
:param remote: Flag indicating if the task is being ran on the platform or not.
:return: is valid or not.
|
gbdx_task_template/task.py
|
def is_valid(self, remote=False):
"""
Check cloud-harness code is valid. task schema validation is
left to the API endpoint.
:param remote: Flag indicating if the task is being ran on the platform or not.
:return: is valid or not.
"""
if len(self.input_ports) < 1:
return False
if remote:
# Ignore output ports as value will overriden.
ports = [
port for port in self.input_ports if port.type == 'directory'
]
for port in ports:
# Will raise exception if the port is invalid.
port.is_valid_s3_url(port.value)
else:
all_ports = self.ports[0] + self.ports[1]
ports = [
port for port in all_ports if port.type == 'directory' and port.name != 'source_bundle'
]
for port in ports:
# Will raise exception if the port is invalid.
port.is_valid_filesys(port.value)
return True
|
def is_valid(self, remote=False):
"""
Check cloud-harness code is valid. task schema validation is
left to the API endpoint.
:param remote: Flag indicating if the task is being ran on the platform or not.
:return: is valid or not.
"""
if len(self.input_ports) < 1:
return False
if remote:
# Ignore output ports as value will overriden.
ports = [
port for port in self.input_ports if port.type == 'directory'
]
for port in ports:
# Will raise exception if the port is invalid.
port.is_valid_s3_url(port.value)
else:
all_ports = self.ports[0] + self.ports[1]
ports = [
port for port in all_ports if port.type == 'directory' and port.name != 'source_bundle'
]
for port in ports:
# Will raise exception if the port is invalid.
port.is_valid_filesys(port.value)
return True
|
[
"Check",
"cloud",
"-",
"harness",
"code",
"is",
"valid",
".",
"task",
"schema",
"validation",
"is",
"left",
"to",
"the",
"API",
"endpoint",
".",
":",
"param",
"remote",
":",
"Flag",
"indicating",
"if",
"the",
"task",
"is",
"being",
"ran",
"on",
"the",
"platform",
"or",
"not",
".",
":",
"return",
":",
"is",
"valid",
"or",
"not",
"."
] |
TDG-Platform/cloud-harness
|
python
|
https://github.com/TDG-Platform/cloud-harness/blob/1d8f972f861816b90785a484e9bec5bd4bc2f569/gbdx_task_template/task.py#L101-L129
|
[
"def",
"is_valid",
"(",
"self",
",",
"remote",
"=",
"False",
")",
":",
"if",
"len",
"(",
"self",
".",
"input_ports",
")",
"<",
"1",
":",
"return",
"False",
"if",
"remote",
":",
"# Ignore output ports as value will overriden.",
"ports",
"=",
"[",
"port",
"for",
"port",
"in",
"self",
".",
"input_ports",
"if",
"port",
".",
"type",
"==",
"'directory'",
"]",
"for",
"port",
"in",
"ports",
":",
"# Will raise exception if the port is invalid.",
"port",
".",
"is_valid_s3_url",
"(",
"port",
".",
"value",
")",
"else",
":",
"all_ports",
"=",
"self",
".",
"ports",
"[",
"0",
"]",
"+",
"self",
".",
"ports",
"[",
"1",
"]",
"ports",
"=",
"[",
"port",
"for",
"port",
"in",
"all_ports",
"if",
"port",
".",
"type",
"==",
"'directory'",
"and",
"port",
".",
"name",
"!=",
"'source_bundle'",
"]",
"for",
"port",
"in",
"ports",
":",
"# Will raise exception if the port is invalid.",
"port",
".",
"is_valid_filesys",
"(",
"port",
".",
"value",
")",
"return",
"True"
] |
1d8f972f861816b90785a484e9bec5bd4bc2f569
|
test
|
median_min_distance
|
This function computes a graph of nearest-neighbors for each sample point in
'data' and returns the median of the distribution of distances between those
nearest-neighbors, the distance metric being specified by 'metric'.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
metric : string
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
Returns
-------
median_min_dist : float
The median of the distribution of distances between nearest-neighbors.
|
Density_Sampling.py
|
def median_min_distance(data, metric):
"""This function computes a graph of nearest-neighbors for each sample point in
'data' and returns the median of the distribution of distances between those
nearest-neighbors, the distance metric being specified by 'metric'.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
metric : string
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
Returns
-------
median_min_dist : float
The median of the distribution of distances between nearest-neighbors.
"""
data = np.atleast_2d(data)
nearest_distances = kneighbors_graph(data, 1, mode = 'distance', metric = metric, include_self = False).data
median_min_dist = np.median(nearest_distances, overwrite_input = True)
return round(median_min_dist, 4)
|
def median_min_distance(data, metric):
"""This function computes a graph of nearest-neighbors for each sample point in
'data' and returns the median of the distribution of distances between those
nearest-neighbors, the distance metric being specified by 'metric'.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
metric : string
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
Returns
-------
median_min_dist : float
The median of the distribution of distances between nearest-neighbors.
"""
data = np.atleast_2d(data)
nearest_distances = kneighbors_graph(data, 1, mode = 'distance', metric = metric, include_self = False).data
median_min_dist = np.median(nearest_distances, overwrite_input = True)
return round(median_min_dist, 4)
|
[
"This",
"function",
"computes",
"a",
"graph",
"of",
"nearest",
"-",
"neighbors",
"for",
"each",
"sample",
"point",
"in",
"data",
"and",
"returns",
"the",
"median",
"of",
"the",
"distribution",
"of",
"distances",
"between",
"those",
"nearest",
"-",
"neighbors",
"the",
"distance",
"metric",
"being",
"specified",
"by",
"metric",
".",
"Parameters",
"----------",
"data",
":",
"array",
"of",
"shape",
"(",
"n_samples",
"n_features",
")",
"The",
"data",
"-",
"set",
"a",
"fraction",
"of",
"whose",
"sample",
"points",
"will",
"be",
"extracted",
"by",
"density",
"sampling",
".",
"metric",
":",
"string",
"The",
"distance",
"metric",
"used",
"to",
"determine",
"the",
"nearest",
"-",
"neighbor",
"to",
"each",
"data",
"-",
"point",
".",
"The",
"DistanceMetric",
"class",
"defined",
"in",
"scikit",
"-",
"learn",
"s",
"library",
"lists",
"all",
"available",
"metrics",
".",
"Returns",
"-------",
"median_min_dist",
":",
"float",
"The",
"median",
"of",
"the",
"distribution",
"of",
"distances",
"between",
"nearest",
"-",
"neighbors",
"."
] |
GGiecold/Density_Sampling
|
python
|
https://github.com/GGiecold/Density_Sampling/blob/8c8e6c63a97fecf958238e12947e5e6542b64102/Density_Sampling.py#L116-L144
|
[
"def",
"median_min_distance",
"(",
"data",
",",
"metric",
")",
":",
"data",
"=",
"np",
".",
"atleast_2d",
"(",
"data",
")",
"nearest_distances",
"=",
"kneighbors_graph",
"(",
"data",
",",
"1",
",",
"mode",
"=",
"'distance'",
",",
"metric",
"=",
"metric",
",",
"include_self",
"=",
"False",
")",
".",
"data",
"median_min_dist",
"=",
"np",
".",
"median",
"(",
"nearest_distances",
",",
"overwrite_input",
"=",
"True",
")",
"return",
"round",
"(",
"median_min_dist",
",",
"4",
")"
] |
8c8e6c63a97fecf958238e12947e5e6542b64102
|
test
|
get_local_densities
|
For each sample point of the data-set 'data', estimate a local density in feature
space by counting the number of neighboring data-points within a particular
region centered around that sample point.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
Returns
-------
local_densities : array of shape (n_samples,)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
|
Density_Sampling.py
|
def get_local_densities(data, kernel_mult = 2.0, metric = 'manhattan'):
"""For each sample point of the data-set 'data', estimate a local density in feature
space by counting the number of neighboring data-points within a particular
region centered around that sample point.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
Returns
-------
local_densities : array of shape (n_samples,)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
"""
data = np.atleast_2d(data)
assert isinstance(kernel_mult, numbers.Real) and kernel_mult > 0
kernel_width = kernel_mult * median_min_distance(data, metric)
N_samples = data.shape[0]
if 8.0 * get_chunk_size(N_samples, 1) > N_samples:
A = radius_neighbors_graph(data, kernel_width, mode = 'connectivity', metric = metric, include_self = True)
rows, _ = A.nonzero()
with NamedTemporaryFile('w', delete = True, dir = './') as file_name:
fp = np.memmap(file_name, dtype = int, mode = 'w+', shape = rows.shape)
fp[:] = rows[:]
_, counts = np.unique(fp, return_counts = True)
local_densities = np.zeros(N_samples, dtype = int)
for i in xrange(N_samples):
local_densities[i] = counts[i]
else:
local_densities = np.zeros(N_samples, dtype = int)
chunks_size = get_chunk_size(N_samples, 2)
for i in xrange(0, N_samples, chunks_size):
chunk = data[i:min(i + chunks_size, N_samples)]
D = pairwise_distances(chunk, data, metric, n_jobs = 1)
D = (D <= kernel_width)
local_densities[i + np.arange(min(chunks_size, N_samples - i))] = D.sum(axis = 1)
return local_densities
|
def get_local_densities(data, kernel_mult = 2.0, metric = 'manhattan'):
"""For each sample point of the data-set 'data', estimate a local density in feature
space by counting the number of neighboring data-points within a particular
region centered around that sample point.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
Returns
-------
local_densities : array of shape (n_samples,)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
"""
data = np.atleast_2d(data)
assert isinstance(kernel_mult, numbers.Real) and kernel_mult > 0
kernel_width = kernel_mult * median_min_distance(data, metric)
N_samples = data.shape[0]
if 8.0 * get_chunk_size(N_samples, 1) > N_samples:
A = radius_neighbors_graph(data, kernel_width, mode = 'connectivity', metric = metric, include_self = True)
rows, _ = A.nonzero()
with NamedTemporaryFile('w', delete = True, dir = './') as file_name:
fp = np.memmap(file_name, dtype = int, mode = 'w+', shape = rows.shape)
fp[:] = rows[:]
_, counts = np.unique(fp, return_counts = True)
local_densities = np.zeros(N_samples, dtype = int)
for i in xrange(N_samples):
local_densities[i] = counts[i]
else:
local_densities = np.zeros(N_samples, dtype = int)
chunks_size = get_chunk_size(N_samples, 2)
for i in xrange(0, N_samples, chunks_size):
chunk = data[i:min(i + chunks_size, N_samples)]
D = pairwise_distances(chunk, data, metric, n_jobs = 1)
D = (D <= kernel_width)
local_densities[i + np.arange(min(chunks_size, N_samples - i))] = D.sum(axis = 1)
return local_densities
|
[
"For",
"each",
"sample",
"point",
"of",
"the",
"data",
"-",
"set",
"data",
"estimate",
"a",
"local",
"density",
"in",
"feature",
"space",
"by",
"counting",
"the",
"number",
"of",
"neighboring",
"data",
"-",
"points",
"within",
"a",
"particular",
"region",
"centered",
"around",
"that",
"sample",
"point",
".",
"Parameters",
"----------",
"data",
":",
"array",
"of",
"shape",
"(",
"n_samples",
"n_features",
")",
"The",
"data",
"-",
"set",
"a",
"fraction",
"of",
"whose",
"sample",
"points",
"will",
"be",
"extracted",
"by",
"density",
"sampling",
".",
"kernel_mult",
":",
"float",
"optional",
"(",
"default",
"=",
"2",
".",
"0",
")",
"The",
"kernel",
"multiplier",
"which",
"determine",
"(",
"in",
"terms",
"of",
"the",
"median",
"of",
"the",
"distribution",
"of",
"distances",
"among",
"nearest",
"neighbors",
")",
"the",
"extent",
"of",
"the",
"regions",
"centered",
"around",
"each",
"sample",
"point",
"to",
"consider",
"for",
"the",
"computation",
"of",
"the",
"local",
"density",
"associated",
"to",
"that",
"particular",
"sample",
"point",
".",
"metric",
":",
"string",
"optional",
"(",
"default",
"=",
"manhattan",
")",
"The",
"distance",
"metric",
"used",
"to",
"determine",
"the",
"nearest",
"-",
"neighbor",
"to",
"each",
"data",
"-",
"point",
".",
"The",
"DistanceMetric",
"class",
"defined",
"in",
"scikit",
"-",
"learn",
"s",
"library",
"lists",
"all",
"available",
"metrics",
".",
"Returns",
"-------",
"local_densities",
":",
"array",
"of",
"shape",
"(",
"n_samples",
")",
"The",
"i",
"-",
"th",
"entry",
"of",
"this",
"vector",
"corresponds",
"to",
"the",
"local",
"density",
"of",
"the",
"i",
"-",
"th",
"sample",
"point",
"in",
"the",
"order",
"of",
"the",
"rows",
"of",
"data",
"."
] |
GGiecold/Density_Sampling
|
python
|
https://github.com/GGiecold/Density_Sampling/blob/8c8e6c63a97fecf958238e12947e5e6542b64102/Density_Sampling.py#L147-L209
|
[
"def",
"get_local_densities",
"(",
"data",
",",
"kernel_mult",
"=",
"2.0",
",",
"metric",
"=",
"'manhattan'",
")",
":",
"data",
"=",
"np",
".",
"atleast_2d",
"(",
"data",
")",
"assert",
"isinstance",
"(",
"kernel_mult",
",",
"numbers",
".",
"Real",
")",
"and",
"kernel_mult",
">",
"0",
"kernel_width",
"=",
"kernel_mult",
"*",
"median_min_distance",
"(",
"data",
",",
"metric",
")",
"N_samples",
"=",
"data",
".",
"shape",
"[",
"0",
"]",
"if",
"8.0",
"*",
"get_chunk_size",
"(",
"N_samples",
",",
"1",
")",
">",
"N_samples",
":",
"A",
"=",
"radius_neighbors_graph",
"(",
"data",
",",
"kernel_width",
",",
"mode",
"=",
"'connectivity'",
",",
"metric",
"=",
"metric",
",",
"include_self",
"=",
"True",
")",
"rows",
",",
"_",
"=",
"A",
".",
"nonzero",
"(",
")",
"with",
"NamedTemporaryFile",
"(",
"'w'",
",",
"delete",
"=",
"True",
",",
"dir",
"=",
"'./'",
")",
"as",
"file_name",
":",
"fp",
"=",
"np",
".",
"memmap",
"(",
"file_name",
",",
"dtype",
"=",
"int",
",",
"mode",
"=",
"'w+'",
",",
"shape",
"=",
"rows",
".",
"shape",
")",
"fp",
"[",
":",
"]",
"=",
"rows",
"[",
":",
"]",
"_",
",",
"counts",
"=",
"np",
".",
"unique",
"(",
"fp",
",",
"return_counts",
"=",
"True",
")",
"local_densities",
"=",
"np",
".",
"zeros",
"(",
"N_samples",
",",
"dtype",
"=",
"int",
")",
"for",
"i",
"in",
"xrange",
"(",
"N_samples",
")",
":",
"local_densities",
"[",
"i",
"]",
"=",
"counts",
"[",
"i",
"]",
"else",
":",
"local_densities",
"=",
"np",
".",
"zeros",
"(",
"N_samples",
",",
"dtype",
"=",
"int",
")",
"chunks_size",
"=",
"get_chunk_size",
"(",
"N_samples",
",",
"2",
")",
"for",
"i",
"in",
"xrange",
"(",
"0",
",",
"N_samples",
",",
"chunks_size",
")",
":",
"chunk",
"=",
"data",
"[",
"i",
":",
"min",
"(",
"i",
"+",
"chunks_size",
",",
"N_samples",
")",
"]",
"D",
"=",
"pairwise_distances",
"(",
"chunk",
",",
"data",
",",
"metric",
",",
"n_jobs",
"=",
"1",
")",
"D",
"=",
"(",
"D",
"<=",
"kernel_width",
")",
"local_densities",
"[",
"i",
"+",
"np",
".",
"arange",
"(",
"min",
"(",
"chunks_size",
",",
"N_samples",
"-",
"i",
")",
")",
"]",
"=",
"D",
".",
"sum",
"(",
"axis",
"=",
"1",
")",
"return",
"local_densities"
] |
8c8e6c63a97fecf958238e12947e5e6542b64102
|
test
|
density_sampling
|
The i-th sample point of the data-set 'data' is selected by density sampling
with a probability given by:
| 0 if outlier_density > LD[i];
P(keep the i-th data-point) = | 1 if outlier_density <= LD[i] <= target_density;
| target_density / LD[i] if LD[i] > target_density.
Here 'LD[i]' denotes the local density of the i-th sample point of the data-set,
whereas 'outlier_density' and 'target_density' are computed as particular percentiles
of that distribution of local densities.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
local_densities : array of shape (n_samples,), optional (default = None)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
outlier_percentile : float, optional (default = 0.01)
Specify the outlier density as a percentile of the distribution of local densities.
target_percentile : float, optional (default = 0.05)
Specifiy the target density as a percentile of the distribution of local densities.
Relevant only if 'desired_samples' is left unspecified.
desired_samples : int, optional (default = None)
The number of samples to be selected from the whole data-set such that members
of rare populations and members of more common populations are roughly
equally represented. To that purpose, a target density is computed that to selects about
'desired_samples' data-points.
Returns
-------
samples_kept : array of shape (n_selected_samples,)
If the 'i'-th sample point of 'data' has been selected by a given instance of
density sampling, number 'i' is featured in the array returned by
the present function.
|
Density_Sampling.py
|
def density_sampling(data, local_densities = None, metric = 'manhattan',
kernel_mult = 2.0, outlier_percentile = 0.01,
target_percentile = 0.05, desired_samples = None):
"""The i-th sample point of the data-set 'data' is selected by density sampling
with a probability given by:
| 0 if outlier_density > LD[i];
P(keep the i-th data-point) = | 1 if outlier_density <= LD[i] <= target_density;
| target_density / LD[i] if LD[i] > target_density.
Here 'LD[i]' denotes the local density of the i-th sample point of the data-set,
whereas 'outlier_density' and 'target_density' are computed as particular percentiles
of that distribution of local densities.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
local_densities : array of shape (n_samples,), optional (default = None)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
outlier_percentile : float, optional (default = 0.01)
Specify the outlier density as a percentile of the distribution of local densities.
target_percentile : float, optional (default = 0.05)
Specifiy the target density as a percentile of the distribution of local densities.
Relevant only if 'desired_samples' is left unspecified.
desired_samples : int, optional (default = None)
The number of samples to be selected from the whole data-set such that members
of rare populations and members of more common populations are roughly
equally represented. To that purpose, a target density is computed that to selects about
'desired_samples' data-points.
Returns
-------
samples_kept : array of shape (n_selected_samples,)
If the 'i'-th sample point of 'data' has been selected by a given instance of
density sampling, number 'i' is featured in the array returned by
the present function.
"""
random_state = np.random.RandomState()
data = np.atleast_2d(data)
for x in (kernel_mult, outlier_percentile, target_percentile):
assert isinstance(x, numbers.Real) and x > 0
for x in (outlier_percentile, target_percentile):
assert x <= 1.0
if local_densities is None:
local_densities = get_local_densities(data, kernel_mult, metric)
if reduce(operator.mul, local_densities.shape, 1) != max(local_densities.shape):
raise ValueError("\nERROR: Density_Sampling: density_sampling: problem with "
"the dimensions of the vector of local densities provided.\n")
else:
local_densities = np.reshape(local_densities, local_densities.size)
outlier_density = np.percentile(local_densities, outlier_percentile)
target_density = np.percentile(local_densities, target_percentile)
samples_kept = np.where(local_densities > outlier_density)[0]
N_kept = samples_kept.size
local_densities = local_densities[samples_kept]
if desired_samples is None:
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size = N_kept))[0]
samples_kept = samples_kept[ind]
elif desired_samples <= N_kept:
sorted_densities = np.sort(local_densities)
temp = np.reciprocal(sorted_densities[::-1].astype(float))
cdf = np.cumsum(temp)[::-1]
target_density = (desired_samples + 0.0) / cdf[0]
if target_density > sorted_densities[0]:
temp = desired_samples - np.arange(1.0, N_kept + 1.0)
possible_targets = np.divide(temp, cdf)
ind = np.argmax(possible_targets < sorted_densities)
target_density = possible_targets[ind]
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size = N_kept))[0]
samples_kept = samples_kept[ind]
else:
print("\nERROR: Density_Sampling: density_sampling: 'desired_samples' has been "
"assigned a value of {desired_samples}, larger than {N_kept}, "
"the number of samples whose local densities are high enough "
"(i.e. excluded are the local densities in the lowest {outlier_percentile} "
"percentile).\n".format(**locals()))
exit(1)
return samples_kept
|
def density_sampling(data, local_densities = None, metric = 'manhattan',
kernel_mult = 2.0, outlier_percentile = 0.01,
target_percentile = 0.05, desired_samples = None):
"""The i-th sample point of the data-set 'data' is selected by density sampling
with a probability given by:
| 0 if outlier_density > LD[i];
P(keep the i-th data-point) = | 1 if outlier_density <= LD[i] <= target_density;
| target_density / LD[i] if LD[i] > target_density.
Here 'LD[i]' denotes the local density of the i-th sample point of the data-set,
whereas 'outlier_density' and 'target_density' are computed as particular percentiles
of that distribution of local densities.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
local_densities : array of shape (n_samples,), optional (default = None)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
outlier_percentile : float, optional (default = 0.01)
Specify the outlier density as a percentile of the distribution of local densities.
target_percentile : float, optional (default = 0.05)
Specifiy the target density as a percentile of the distribution of local densities.
Relevant only if 'desired_samples' is left unspecified.
desired_samples : int, optional (default = None)
The number of samples to be selected from the whole data-set such that members
of rare populations and members of more common populations are roughly
equally represented. To that purpose, a target density is computed that to selects about
'desired_samples' data-points.
Returns
-------
samples_kept : array of shape (n_selected_samples,)
If the 'i'-th sample point of 'data' has been selected by a given instance of
density sampling, number 'i' is featured in the array returned by
the present function.
"""
random_state = np.random.RandomState()
data = np.atleast_2d(data)
for x in (kernel_mult, outlier_percentile, target_percentile):
assert isinstance(x, numbers.Real) and x > 0
for x in (outlier_percentile, target_percentile):
assert x <= 1.0
if local_densities is None:
local_densities = get_local_densities(data, kernel_mult, metric)
if reduce(operator.mul, local_densities.shape, 1) != max(local_densities.shape):
raise ValueError("\nERROR: Density_Sampling: density_sampling: problem with "
"the dimensions of the vector of local densities provided.\n")
else:
local_densities = np.reshape(local_densities, local_densities.size)
outlier_density = np.percentile(local_densities, outlier_percentile)
target_density = np.percentile(local_densities, target_percentile)
samples_kept = np.where(local_densities > outlier_density)[0]
N_kept = samples_kept.size
local_densities = local_densities[samples_kept]
if desired_samples is None:
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size = N_kept))[0]
samples_kept = samples_kept[ind]
elif desired_samples <= N_kept:
sorted_densities = np.sort(local_densities)
temp = np.reciprocal(sorted_densities[::-1].astype(float))
cdf = np.cumsum(temp)[::-1]
target_density = (desired_samples + 0.0) / cdf[0]
if target_density > sorted_densities[0]:
temp = desired_samples - np.arange(1.0, N_kept + 1.0)
possible_targets = np.divide(temp, cdf)
ind = np.argmax(possible_targets < sorted_densities)
target_density = possible_targets[ind]
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size = N_kept))[0]
samples_kept = samples_kept[ind]
else:
print("\nERROR: Density_Sampling: density_sampling: 'desired_samples' has been "
"assigned a value of {desired_samples}, larger than {N_kept}, "
"the number of samples whose local densities are high enough "
"(i.e. excluded are the local densities in the lowest {outlier_percentile} "
"percentile).\n".format(**locals()))
exit(1)
return samples_kept
|
[
"The",
"i",
"-",
"th",
"sample",
"point",
"of",
"the",
"data",
"-",
"set",
"data",
"is",
"selected",
"by",
"density",
"sampling",
"with",
"a",
"probability",
"given",
"by",
":",
"|",
"0",
"if",
"outlier_density",
">",
"LD",
"[",
"i",
"]",
";",
"P",
"(",
"keep",
"the",
"i",
"-",
"th",
"data",
"-",
"point",
")",
"=",
"|",
"1",
"if",
"outlier_density",
"<",
"=",
"LD",
"[",
"i",
"]",
"<",
"=",
"target_density",
";",
"|",
"target_density",
"/",
"LD",
"[",
"i",
"]",
"if",
"LD",
"[",
"i",
"]",
">",
"target_density",
".",
"Here",
"LD",
"[",
"i",
"]",
"denotes",
"the",
"local",
"density",
"of",
"the",
"i",
"-",
"th",
"sample",
"point",
"of",
"the",
"data",
"-",
"set",
"whereas",
"outlier_density",
"and",
"target_density",
"are",
"computed",
"as",
"particular",
"percentiles",
"of",
"that",
"distribution",
"of",
"local",
"densities",
".",
"Parameters",
"----------",
"data",
":",
"array",
"of",
"shape",
"(",
"n_samples",
"n_features",
")",
"The",
"data",
"-",
"set",
"a",
"fraction",
"of",
"whose",
"sample",
"points",
"will",
"be",
"extracted",
"by",
"density",
"sampling",
".",
"local_densities",
":",
"array",
"of",
"shape",
"(",
"n_samples",
")",
"optional",
"(",
"default",
"=",
"None",
")",
"The",
"i",
"-",
"th",
"entry",
"of",
"this",
"vector",
"corresponds",
"to",
"the",
"local",
"density",
"of",
"the",
"i",
"-",
"th",
"sample",
"point",
"in",
"the",
"order",
"of",
"the",
"rows",
"of",
"data",
".",
"metric",
":",
"string",
"optional",
"(",
"default",
"=",
"manhattan",
")",
"The",
"distance",
"metric",
"used",
"to",
"determine",
"the",
"nearest",
"-",
"neighbor",
"to",
"each",
"data",
"-",
"point",
".",
"The",
"DistanceMetric",
"class",
"defined",
"in",
"scikit",
"-",
"learn",
"s",
"library",
"lists",
"all",
"available",
"metrics",
".",
"kernel_mult",
":",
"float",
"optional",
"(",
"default",
"=",
"2",
".",
"0",
")",
"The",
"kernel",
"multiplier",
"which",
"determine",
"(",
"in",
"terms",
"of",
"the",
"median",
"of",
"the",
"distribution",
"of",
"distances",
"among",
"nearest",
"neighbors",
")",
"the",
"extent",
"of",
"the",
"regions",
"centered",
"around",
"each",
"sample",
"point",
"to",
"consider",
"for",
"the",
"computation",
"of",
"the",
"local",
"density",
"associated",
"to",
"that",
"particular",
"sample",
"point",
".",
"outlier_percentile",
":",
"float",
"optional",
"(",
"default",
"=",
"0",
".",
"01",
")",
"Specify",
"the",
"outlier",
"density",
"as",
"a",
"percentile",
"of",
"the",
"distribution",
"of",
"local",
"densities",
".",
"target_percentile",
":",
"float",
"optional",
"(",
"default",
"=",
"0",
".",
"05",
")",
"Specifiy",
"the",
"target",
"density",
"as",
"a",
"percentile",
"of",
"the",
"distribution",
"of",
"local",
"densities",
".",
"Relevant",
"only",
"if",
"desired_samples",
"is",
"left",
"unspecified",
".",
"desired_samples",
":",
"int",
"optional",
"(",
"default",
"=",
"None",
")",
"The",
"number",
"of",
"samples",
"to",
"be",
"selected",
"from",
"the",
"whole",
"data",
"-",
"set",
"such",
"that",
"members",
"of",
"rare",
"populations",
"and",
"members",
"of",
"more",
"common",
"populations",
"are",
"roughly",
"equally",
"represented",
".",
"To",
"that",
"purpose",
"a",
"target",
"density",
"is",
"computed",
"that",
"to",
"selects",
"about",
"desired_samples",
"data",
"-",
"points",
".",
"Returns",
"-------",
"samples_kept",
":",
"array",
"of",
"shape",
"(",
"n_selected_samples",
")",
"If",
"the",
"i",
"-",
"th",
"sample",
"point",
"of",
"data",
"has",
"been",
"selected",
"by",
"a",
"given",
"instance",
"of",
"density",
"sampling",
"number",
"i",
"is",
"featured",
"in",
"the",
"array",
"returned",
"by",
"the",
"present",
"function",
"."
] |
GGiecold/Density_Sampling
|
python
|
https://github.com/GGiecold/Density_Sampling/blob/8c8e6c63a97fecf958238e12947e5e6542b64102/Density_Sampling.py#L212-L323
|
[
"def",
"density_sampling",
"(",
"data",
",",
"local_densities",
"=",
"None",
",",
"metric",
"=",
"'manhattan'",
",",
"kernel_mult",
"=",
"2.0",
",",
"outlier_percentile",
"=",
"0.01",
",",
"target_percentile",
"=",
"0.05",
",",
"desired_samples",
"=",
"None",
")",
":",
"random_state",
"=",
"np",
".",
"random",
".",
"RandomState",
"(",
")",
"data",
"=",
"np",
".",
"atleast_2d",
"(",
"data",
")",
"for",
"x",
"in",
"(",
"kernel_mult",
",",
"outlier_percentile",
",",
"target_percentile",
")",
":",
"assert",
"isinstance",
"(",
"x",
",",
"numbers",
".",
"Real",
")",
"and",
"x",
">",
"0",
"for",
"x",
"in",
"(",
"outlier_percentile",
",",
"target_percentile",
")",
":",
"assert",
"x",
"<=",
"1.0",
"if",
"local_densities",
"is",
"None",
":",
"local_densities",
"=",
"get_local_densities",
"(",
"data",
",",
"kernel_mult",
",",
"metric",
")",
"if",
"reduce",
"(",
"operator",
".",
"mul",
",",
"local_densities",
".",
"shape",
",",
"1",
")",
"!=",
"max",
"(",
"local_densities",
".",
"shape",
")",
":",
"raise",
"ValueError",
"(",
"\"\\nERROR: Density_Sampling: density_sampling: problem with \"",
"\"the dimensions of the vector of local densities provided.\\n\"",
")",
"else",
":",
"local_densities",
"=",
"np",
".",
"reshape",
"(",
"local_densities",
",",
"local_densities",
".",
"size",
")",
"outlier_density",
"=",
"np",
".",
"percentile",
"(",
"local_densities",
",",
"outlier_percentile",
")",
"target_density",
"=",
"np",
".",
"percentile",
"(",
"local_densities",
",",
"target_percentile",
")",
"samples_kept",
"=",
"np",
".",
"where",
"(",
"local_densities",
">",
"outlier_density",
")",
"[",
"0",
"]",
"N_kept",
"=",
"samples_kept",
".",
"size",
"local_densities",
"=",
"local_densities",
"[",
"samples_kept",
"]",
"if",
"desired_samples",
"is",
"None",
":",
"probs",
"=",
"np",
".",
"divide",
"(",
"target_density",
"+",
"0.0",
",",
"local_densities",
")",
"ind",
"=",
"np",
".",
"where",
"(",
"probs",
">",
"random_state",
".",
"uniform",
"(",
"size",
"=",
"N_kept",
")",
")",
"[",
"0",
"]",
"samples_kept",
"=",
"samples_kept",
"[",
"ind",
"]",
"elif",
"desired_samples",
"<=",
"N_kept",
":",
"sorted_densities",
"=",
"np",
".",
"sort",
"(",
"local_densities",
")",
"temp",
"=",
"np",
".",
"reciprocal",
"(",
"sorted_densities",
"[",
":",
":",
"-",
"1",
"]",
".",
"astype",
"(",
"float",
")",
")",
"cdf",
"=",
"np",
".",
"cumsum",
"(",
"temp",
")",
"[",
":",
":",
"-",
"1",
"]",
"target_density",
"=",
"(",
"desired_samples",
"+",
"0.0",
")",
"/",
"cdf",
"[",
"0",
"]",
"if",
"target_density",
">",
"sorted_densities",
"[",
"0",
"]",
":",
"temp",
"=",
"desired_samples",
"-",
"np",
".",
"arange",
"(",
"1.0",
",",
"N_kept",
"+",
"1.0",
")",
"possible_targets",
"=",
"np",
".",
"divide",
"(",
"temp",
",",
"cdf",
")",
"ind",
"=",
"np",
".",
"argmax",
"(",
"possible_targets",
"<",
"sorted_densities",
")",
"target_density",
"=",
"possible_targets",
"[",
"ind",
"]",
"probs",
"=",
"np",
".",
"divide",
"(",
"target_density",
"+",
"0.0",
",",
"local_densities",
")",
"ind",
"=",
"np",
".",
"where",
"(",
"probs",
">",
"random_state",
".",
"uniform",
"(",
"size",
"=",
"N_kept",
")",
")",
"[",
"0",
"]",
"samples_kept",
"=",
"samples_kept",
"[",
"ind",
"]",
"else",
":",
"print",
"(",
"\"\\nERROR: Density_Sampling: density_sampling: 'desired_samples' has been \"",
"\"assigned a value of {desired_samples}, larger than {N_kept}, \"",
"\"the number of samples whose local densities are high enough \"",
"\"(i.e. excluded are the local densities in the lowest {outlier_percentile} \"",
"\"percentile).\\n\"",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
")",
"exit",
"(",
"1",
")",
"return",
"samples_kept"
] |
8c8e6c63a97fecf958238e12947e5e6542b64102
|
test
|
Client.new
|
Creates a new cross-service client.
|
hbp_service_client/client.py
|
def new(cls, access_token, environment='prod'):
'''Creates a new cross-service client.'''
return cls(
storage_client=StorageClient.new(access_token, environment=environment))
|
def new(cls, access_token, environment='prod'):
'''Creates a new cross-service client.'''
return cls(
storage_client=StorageClient.new(access_token, environment=environment))
|
[
"Creates",
"a",
"new",
"cross",
"-",
"service",
"client",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/client.py#L16-L20
|
[
"def",
"new",
"(",
"cls",
",",
"access_token",
",",
"environment",
"=",
"'prod'",
")",
":",
"return",
"cls",
"(",
"storage_client",
"=",
"StorageClient",
".",
"new",
"(",
"access_token",
",",
"environment",
"=",
"environment",
")",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.new
|
Create a new storage service REST client.
Arguments:
environment: The service environment to be used for the client
access_token: The access token used to authenticate with the
service
Returns:
A storage_service.api.ApiClient instance
Example:
>>> storage_client = ApiClient.new(my_access_token)
|
hbp_service_client/storage_service/api.py
|
def new(cls, access_token, environment='prod'):
'''Create a new storage service REST client.
Arguments:
environment: The service environment to be used for the client
access_token: The access token used to authenticate with the
service
Returns:
A storage_service.api.ApiClient instance
Example:
>>> storage_client = ApiClient.new(my_access_token)
'''
request = RequestBuilder \
.request(environment) \
.to_service(cls.SERVICE_NAME, cls.SERVICE_VERSION) \
.throw(
StorageForbiddenException,
lambda resp: 'You are forbidden to do this.'
if resp.status_code == 403 else None
) \
.throw(
StorageNotFoundException,
lambda resp: 'The entity is not found'
if resp.status_code == 404 else None
) \
.throw(
StorageException,
lambda resp: 'Server response: {0} - {1}'.format(resp.status_code, resp.text)
if not resp.ok else None
)
authenticated_request = request.with_token(access_token)
return cls(request, authenticated_request)
|
def new(cls, access_token, environment='prod'):
'''Create a new storage service REST client.
Arguments:
environment: The service environment to be used for the client
access_token: The access token used to authenticate with the
service
Returns:
A storage_service.api.ApiClient instance
Example:
>>> storage_client = ApiClient.new(my_access_token)
'''
request = RequestBuilder \
.request(environment) \
.to_service(cls.SERVICE_NAME, cls.SERVICE_VERSION) \
.throw(
StorageForbiddenException,
lambda resp: 'You are forbidden to do this.'
if resp.status_code == 403 else None
) \
.throw(
StorageNotFoundException,
lambda resp: 'The entity is not found'
if resp.status_code == 404 else None
) \
.throw(
StorageException,
lambda resp: 'Server response: {0} - {1}'.format(resp.status_code, resp.text)
if not resp.ok else None
)
authenticated_request = request.with_token(access_token)
return cls(request, authenticated_request)
|
[
"Create",
"a",
"new",
"storage",
"service",
"REST",
"client",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L46-L82
|
[
"def",
"new",
"(",
"cls",
",",
"access_token",
",",
"environment",
"=",
"'prod'",
")",
":",
"request",
"=",
"RequestBuilder",
".",
"request",
"(",
"environment",
")",
".",
"to_service",
"(",
"cls",
".",
"SERVICE_NAME",
",",
"cls",
".",
"SERVICE_VERSION",
")",
".",
"throw",
"(",
"StorageForbiddenException",
",",
"lambda",
"resp",
":",
"'You are forbidden to do this.'",
"if",
"resp",
".",
"status_code",
"==",
"403",
"else",
"None",
")",
".",
"throw",
"(",
"StorageNotFoundException",
",",
"lambda",
"resp",
":",
"'The entity is not found'",
"if",
"resp",
".",
"status_code",
"==",
"404",
"else",
"None",
")",
".",
"throw",
"(",
"StorageException",
",",
"lambda",
"resp",
":",
"'Server response: {0} - {1}'",
".",
"format",
"(",
"resp",
".",
"status_code",
",",
"resp",
".",
"text",
")",
"if",
"not",
"resp",
".",
"ok",
"else",
"None",
")",
"authenticated_request",
"=",
"request",
".",
"with_token",
"(",
"access_token",
")",
"return",
"cls",
"(",
"request",
",",
"authenticated_request",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient._prep_params
|
Remove empty (None) valued keywords and self from function parameters
|
hbp_service_client/storage_service/api.py
|
def _prep_params(params):
'''Remove empty (None) valued keywords and self from function parameters'''
return {k: v for (k, v) in params.items() if v is not None and k != 'self'}
|
def _prep_params(params):
'''Remove empty (None) valued keywords and self from function parameters'''
return {k: v for (k, v) in params.items() if v is not None and k != 'self'}
|
[
"Remove",
"empty",
"(",
"None",
")",
"valued",
"keywords",
"and",
"self",
"from",
"function",
"parameters"
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L85-L88
|
[
"def",
"_prep_params",
"(",
"params",
")",
":",
"return",
"{",
"k",
":",
"v",
"for",
"(",
"k",
",",
"v",
")",
"in",
"params",
".",
"items",
"(",
")",
"if",
"v",
"is",
"not",
"None",
"and",
"k",
"!=",
"'self'",
"}"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.get_entity_details
|
Get generic entity by UUID.
Args:
entity_id (str): The UUID of the requested entity.
Returns:
A dictionary describing the entity::
{
u'collab_id': 2271,
u'created_by': u'303447',
u'created_on': u'2017-03-10T12:50:06.077891Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-10T12:50:06.077946Z',
u'name': u'2271',
u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def get_entity_details(self, entity_id):
'''Get generic entity by UUID.
Args:
entity_id (str): The UUID of the requested entity.
Returns:
A dictionary describing the entity::
{
u'collab_id': 2271,
u'created_by': u'303447',
u'created_on': u'2017-03-10T12:50:06.077891Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-10T12:50:06.077946Z',
u'name': u'2271',
u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
return self._authenticated_request \
.to_endpoint('entity/{}/'.format(entity_id)) \
.return_body() \
.get()
|
def get_entity_details(self, entity_id):
'''Get generic entity by UUID.
Args:
entity_id (str): The UUID of the requested entity.
Returns:
A dictionary describing the entity::
{
u'collab_id': 2271,
u'created_by': u'303447',
u'created_on': u'2017-03-10T12:50:06.077891Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-10T12:50:06.077946Z',
u'name': u'2271',
u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
return self._authenticated_request \
.to_endpoint('entity/{}/'.format(entity_id)) \
.return_body() \
.get()
|
[
"Get",
"generic",
"entity",
"by",
"UUID",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L90-L123
|
[
"def",
"get_entity_details",
"(",
"self",
",",
"entity_id",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"entity_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for entity_id: {0}'",
".",
"format",
"(",
"entity_id",
")",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'entity/{}/'",
".",
"format",
"(",
"entity_id",
")",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.get_entity_by_query
|
Retrieve entity by query param which can be either uuid/path/metadata.
Args:
uuid (str): The UUID of the requested entity.
path (str): The path of the requested entity.
metadata (dict): A dictionary of one metadata {key: value} of the
requested entitity.
Returns:
The details of the entity, if found::
{
u'content_type': u'plain/text',
u'created_by': u'303447',
u'created_on': u'2017-03-13T10:52:23.275087Z',
u'description': u'',
u'entity_type': u'file',
u'modified_by': u'303447',
u'modified_on': u'2017-03-13T10:52:23.275126Z',
u'name': u'myfile',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'e2c25c1b-f6a9-4cf6-b8d2-271e628a9a56'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def get_entity_by_query(self, uuid=None, path=None, metadata=None):
'''Retrieve entity by query param which can be either uuid/path/metadata.
Args:
uuid (str): The UUID of the requested entity.
path (str): The path of the requested entity.
metadata (dict): A dictionary of one metadata {key: value} of the
requested entitity.
Returns:
The details of the entity, if found::
{
u'content_type': u'plain/text',
u'created_by': u'303447',
u'created_on': u'2017-03-13T10:52:23.275087Z',
u'description': u'',
u'entity_type': u'file',
u'modified_by': u'303447',
u'modified_on': u'2017-03-13T10:52:23.275126Z',
u'name': u'myfile',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'e2c25c1b-f6a9-4cf6-b8d2-271e628a9a56'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not (uuid or path or metadata):
raise StorageArgumentException('No parameter given for the query.')
if uuid and not is_valid_uuid(uuid):
raise StorageArgumentException(
'Invalid UUID for uuid: {0}'.format(uuid))
params = locals().copy()
if metadata:
if not isinstance(metadata, dict):
raise StorageArgumentException('The metadata needs to be provided'
' as a dictionary.')
key, value = next(iter(metadata.items()))
params[key] = value
del params['metadata']
params = self._prep_params(params)
return self._authenticated_request \
.to_endpoint('entity/') \
.with_params(params) \
.return_body() \
.get()
|
def get_entity_by_query(self, uuid=None, path=None, metadata=None):
'''Retrieve entity by query param which can be either uuid/path/metadata.
Args:
uuid (str): The UUID of the requested entity.
path (str): The path of the requested entity.
metadata (dict): A dictionary of one metadata {key: value} of the
requested entitity.
Returns:
The details of the entity, if found::
{
u'content_type': u'plain/text',
u'created_by': u'303447',
u'created_on': u'2017-03-13T10:52:23.275087Z',
u'description': u'',
u'entity_type': u'file',
u'modified_by': u'303447',
u'modified_on': u'2017-03-13T10:52:23.275126Z',
u'name': u'myfile',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'e2c25c1b-f6a9-4cf6-b8d2-271e628a9a56'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not (uuid or path or metadata):
raise StorageArgumentException('No parameter given for the query.')
if uuid and not is_valid_uuid(uuid):
raise StorageArgumentException(
'Invalid UUID for uuid: {0}'.format(uuid))
params = locals().copy()
if metadata:
if not isinstance(metadata, dict):
raise StorageArgumentException('The metadata needs to be provided'
' as a dictionary.')
key, value = next(iter(metadata.items()))
params[key] = value
del params['metadata']
params = self._prep_params(params)
return self._authenticated_request \
.to_endpoint('entity/') \
.with_params(params) \
.return_body() \
.get()
|
[
"Retrieve",
"entity",
"by",
"query",
"param",
"which",
"can",
"be",
"either",
"uuid",
"/",
"path",
"/",
"metadata",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L175-L225
|
[
"def",
"get_entity_by_query",
"(",
"self",
",",
"uuid",
"=",
"None",
",",
"path",
"=",
"None",
",",
"metadata",
"=",
"None",
")",
":",
"if",
"not",
"(",
"uuid",
"or",
"path",
"or",
"metadata",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'No parameter given for the query.'",
")",
"if",
"uuid",
"and",
"not",
"is_valid_uuid",
"(",
"uuid",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for uuid: {0}'",
".",
"format",
"(",
"uuid",
")",
")",
"params",
"=",
"locals",
"(",
")",
".",
"copy",
"(",
")",
"if",
"metadata",
":",
"if",
"not",
"isinstance",
"(",
"metadata",
",",
"dict",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'The metadata needs to be provided'",
"' as a dictionary.'",
")",
"key",
",",
"value",
"=",
"next",
"(",
"iter",
"(",
"metadata",
".",
"items",
"(",
")",
")",
")",
"params",
"[",
"key",
"]",
"=",
"value",
"del",
"params",
"[",
"'metadata'",
"]",
"params",
"=",
"self",
".",
"_prep_params",
"(",
"params",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'entity/'",
")",
".",
"with_params",
"(",
"params",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.set_metadata
|
Set metadata for an entity.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (str): The UUID of the entity to be modified.
metadata (dict): A dictionary of key/value pairs to be written as
metadata.
Warning:
It will replace all existing metadata with the provided dictionary.
Returns:
A dictionary of the updated metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def set_metadata(self, entity_type, entity_id, metadata):
'''Set metadata for an entity.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (str): The UUID of the entity to be modified.
metadata (dict): A dictionary of key/value pairs to be written as
metadata.
Warning:
It will replace all existing metadata with the provided dictionary.
Returns:
A dictionary of the updated metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
if not isinstance(metadata, dict):
raise StorageArgumentException('The metadata was not provided as a '
'dictionary')
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.with_json_body(metadata) \
.return_body() \
.post()
|
def set_metadata(self, entity_type, entity_id, metadata):
'''Set metadata for an entity.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (str): The UUID of the entity to be modified.
metadata (dict): A dictionary of key/value pairs to be written as
metadata.
Warning:
It will replace all existing metadata with the provided dictionary.
Returns:
A dictionary of the updated metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
if not isinstance(metadata, dict):
raise StorageArgumentException('The metadata was not provided as a '
'dictionary')
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.with_json_body(metadata) \
.return_body() \
.post()
|
[
"Set",
"metadata",
"for",
"an",
"entity",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L231-L269
|
[
"def",
"set_metadata",
"(",
"self",
",",
"entity_type",
",",
"entity_id",
",",
"metadata",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"entity_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for entity_id: {0}'",
".",
"format",
"(",
"entity_id",
")",
")",
"if",
"not",
"isinstance",
"(",
"metadata",
",",
"dict",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'The metadata was not provided as a '",
"'dictionary'",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'{}/{}/metadata/'",
".",
"format",
"(",
"entity_type",
",",
"entity_id",
")",
")",
".",
"with_json_body",
"(",
"metadata",
")",
".",
"return_body",
"(",
")",
".",
"post",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.get_metadata
|
Get metadata of an entity.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (str): The UUID of the entity to be modified.
Returns:
A dictionary of the metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def get_metadata(self, entity_type, entity_id):
'''Get metadata of an entity.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (str): The UUID of the entity to be modified.
Returns:
A dictionary of the metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.return_body() \
.get()
|
def get_metadata(self, entity_type, entity_id):
'''Get metadata of an entity.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (str): The UUID of the entity to be modified.
Returns:
A dictionary of the metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.return_body() \
.get()
|
[
"Get",
"metadata",
"of",
"an",
"entity",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L271-L300
|
[
"def",
"get_metadata",
"(",
"self",
",",
"entity_type",
",",
"entity_id",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"entity_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for entity_id: {0}'",
".",
"format",
"(",
"entity_id",
")",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'{}/{}/metadata/'",
".",
"format",
"(",
"entity_type",
",",
"entity_id",
")",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.update_metadata
|
Update the metadata of an entity.
Existing non-modified metadata will not be affected.
Args:
entity_type (str): Type of the entity. Admitted values: 'project',
'folder', 'file'.
entity_id (str): The UUID of the entity to be modified.
metadata (dict): A dictionary of key/value pairs to be written as
metadata.
Returns:
A dictionary of the updated object metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def update_metadata(self, entity_type, entity_id, metadata):
'''Update the metadata of an entity.
Existing non-modified metadata will not be affected.
Args:
entity_type (str): Type of the entity. Admitted values: 'project',
'folder', 'file'.
entity_id (str): The UUID of the entity to be modified.
metadata (dict): A dictionary of key/value pairs to be written as
metadata.
Returns:
A dictionary of the updated object metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
if not isinstance(metadata, dict):
raise StorageArgumentException('The metadata was not provided as a '
'dictionary')
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.with_json_body(metadata) \
.return_body() \
.put()
|
def update_metadata(self, entity_type, entity_id, metadata):
'''Update the metadata of an entity.
Existing non-modified metadata will not be affected.
Args:
entity_type (str): Type of the entity. Admitted values: 'project',
'folder', 'file'.
entity_id (str): The UUID of the entity to be modified.
metadata (dict): A dictionary of key/value pairs to be written as
metadata.
Returns:
A dictionary of the updated object metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
if not isinstance(metadata, dict):
raise StorageArgumentException('The metadata was not provided as a '
'dictionary')
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.with_json_body(metadata) \
.return_body() \
.put()
|
[
"Update",
"the",
"metadata",
"of",
"an",
"entity",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L302-L339
|
[
"def",
"update_metadata",
"(",
"self",
",",
"entity_type",
",",
"entity_id",
",",
"metadata",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"entity_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for entity_id: {0}'",
".",
"format",
"(",
"entity_id",
")",
")",
"if",
"not",
"isinstance",
"(",
"metadata",
",",
"dict",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'The metadata was not provided as a '",
"'dictionary'",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'{}/{}/metadata/'",
".",
"format",
"(",
"entity_type",
",",
"entity_id",
")",
")",
".",
"with_json_body",
"(",
"metadata",
")",
".",
"return_body",
"(",
")",
".",
"put",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.delete_metadata
|
Delete the selected metadata entries of an entity.
Only deletes selected metadata keys, for a complete wipe, use set_metadata.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (srt): The UUID of the entity to be modified.
metadata_keys (lst): A list of metada keys to be deleted.
Returns:
A dictionary of the updated object metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def delete_metadata(self, entity_type, entity_id, metadata_keys):
'''Delete the selected metadata entries of an entity.
Only deletes selected metadata keys, for a complete wipe, use set_metadata.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (srt): The UUID of the entity to be modified.
metadata_keys (lst): A list of metada keys to be deleted.
Returns:
A dictionary of the updated object metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
if not isinstance(metadata_keys, list):
raise StorageArgumentException('The metadata was not provided as a '
'dictionary')
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.with_json_body({'keys': metadata_keys}) \
.return_body() \
.delete()
|
def delete_metadata(self, entity_type, entity_id, metadata_keys):
'''Delete the selected metadata entries of an entity.
Only deletes selected metadata keys, for a complete wipe, use set_metadata.
Args:
entity_type (str): Type of the entity. Admitted values: ['project',
'folder', 'file'].
entity_id (srt): The UUID of the entity to be modified.
metadata_keys (lst): A list of metada keys to be deleted.
Returns:
A dictionary of the updated object metadata::
{
u'bar': u'200',
u'foo': u'100'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(entity_id):
raise StorageArgumentException(
'Invalid UUID for entity_id: {0}'.format(entity_id))
if not isinstance(metadata_keys, list):
raise StorageArgumentException('The metadata was not provided as a '
'dictionary')
return self._authenticated_request \
.to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \
.with_json_body({'keys': metadata_keys}) \
.return_body() \
.delete()
|
[
"Delete",
"the",
"selected",
"metadata",
"entries",
"of",
"an",
"entity",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L341-L377
|
[
"def",
"delete_metadata",
"(",
"self",
",",
"entity_type",
",",
"entity_id",
",",
"metadata_keys",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"entity_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for entity_id: {0}'",
".",
"format",
"(",
"entity_id",
")",
")",
"if",
"not",
"isinstance",
"(",
"metadata_keys",
",",
"list",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'The metadata was not provided as a '",
"'dictionary'",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'{}/{}/metadata/'",
".",
"format",
"(",
"entity_type",
",",
"entity_id",
")",
")",
".",
"with_json_body",
"(",
"{",
"'keys'",
":",
"metadata_keys",
"}",
")",
".",
"return_body",
"(",
")",
".",
"delete",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.list_projects
|
List all the projects the user have access to.
This function does not retrieve all results, pages have
to be manually retrieved by the caller.
Args:
hpc (bool): If 'true', the result will contain only the HPC projects
(Unicore projects).
access (str): If provided, the result will contain only projects
where the user has the provided acccess.
Admitted values: ['read', 'write'].
name (str): Filter on the project name.
collab_id (int): Filter on the collab id.
page_size (int): Number of elements per page.
page (int): Number of the page
ordering (str): Indicate on which fields to sort the result.
Prepend '-' to invert order. Multiple values can be provided.
Ordering is supported on: ['name', 'created_on', 'modified_on'].
Example: ordering='name,created_on'
Returns:
A dictionary of the results::
{
u'count': 256,
u'next': u'http://link.to.next/page',
u'previous': None,
u'results': [{u'collab_id': 2079,
u'created_by': u'258666',
u'created_on': u'2017-02-23T15:09:27.626973Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'258666',
u'modified_on': u'2017-02-23T15:09:27.627025Z',
u'name': u'2079',
u'uuid': u'64a6ad2e-acd1-44a3-a4cd-6bd96e3da2b0'}]
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def list_projects(self, hpc=None, access=None, name=None, collab_id=None,
page_size=DEFAULT_PAGE_SIZE, page=None, ordering=None):
'''List all the projects the user have access to.
This function does not retrieve all results, pages have
to be manually retrieved by the caller.
Args:
hpc (bool): If 'true', the result will contain only the HPC projects
(Unicore projects).
access (str): If provided, the result will contain only projects
where the user has the provided acccess.
Admitted values: ['read', 'write'].
name (str): Filter on the project name.
collab_id (int): Filter on the collab id.
page_size (int): Number of elements per page.
page (int): Number of the page
ordering (str): Indicate on which fields to sort the result.
Prepend '-' to invert order. Multiple values can be provided.
Ordering is supported on: ['name', 'created_on', 'modified_on'].
Example: ordering='name,created_on'
Returns:
A dictionary of the results::
{
u'count': 256,
u'next': u'http://link.to.next/page',
u'previous': None,
u'results': [{u'collab_id': 2079,
u'created_by': u'258666',
u'created_on': u'2017-02-23T15:09:27.626973Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'258666',
u'modified_on': u'2017-02-23T15:09:27.627025Z',
u'name': u'2079',
u'uuid': u'64a6ad2e-acd1-44a3-a4cd-6bd96e3da2b0'}]
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
return self._authenticated_request \
.to_endpoint('project/') \
.with_params(self._prep_params(locals())) \
.return_body() \
.get()
|
def list_projects(self, hpc=None, access=None, name=None, collab_id=None,
page_size=DEFAULT_PAGE_SIZE, page=None, ordering=None):
'''List all the projects the user have access to.
This function does not retrieve all results, pages have
to be manually retrieved by the caller.
Args:
hpc (bool): If 'true', the result will contain only the HPC projects
(Unicore projects).
access (str): If provided, the result will contain only projects
where the user has the provided acccess.
Admitted values: ['read', 'write'].
name (str): Filter on the project name.
collab_id (int): Filter on the collab id.
page_size (int): Number of elements per page.
page (int): Number of the page
ordering (str): Indicate on which fields to sort the result.
Prepend '-' to invert order. Multiple values can be provided.
Ordering is supported on: ['name', 'created_on', 'modified_on'].
Example: ordering='name,created_on'
Returns:
A dictionary of the results::
{
u'count': 256,
u'next': u'http://link.to.next/page',
u'previous': None,
u'results': [{u'collab_id': 2079,
u'created_by': u'258666',
u'created_on': u'2017-02-23T15:09:27.626973Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'258666',
u'modified_on': u'2017-02-23T15:09:27.627025Z',
u'name': u'2079',
u'uuid': u'64a6ad2e-acd1-44a3-a4cd-6bd96e3da2b0'}]
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
return self._authenticated_request \
.to_endpoint('project/') \
.with_params(self._prep_params(locals())) \
.return_body() \
.get()
|
[
"List",
"all",
"the",
"projects",
"the",
"user",
"have",
"access",
"to",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L383-L433
|
[
"def",
"list_projects",
"(",
"self",
",",
"hpc",
"=",
"None",
",",
"access",
"=",
"None",
",",
"name",
"=",
"None",
",",
"collab_id",
"=",
"None",
",",
"page_size",
"=",
"DEFAULT_PAGE_SIZE",
",",
"page",
"=",
"None",
",",
"ordering",
"=",
"None",
")",
":",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'project/'",
")",
".",
"with_params",
"(",
"self",
".",
"_prep_params",
"(",
"locals",
"(",
")",
")",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.get_project_details
|
Get information on a given project
Args:
project_id (str): The UUID of the requested project.
Returns:
A dictionary describing the project::
{
u'collab_id': 2271,
u'created_by': u'303447',
u'created_on': u'2017-03-10T12:50:06.077891Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-10T12:50:06.077946Z',
u'name': u'2271',
u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682'
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def get_project_details(self, project_id):
'''Get information on a given project
Args:
project_id (str): The UUID of the requested project.
Returns:
A dictionary describing the project::
{
u'collab_id': 2271,
u'created_by': u'303447',
u'created_on': u'2017-03-10T12:50:06.077891Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-10T12:50:06.077946Z',
u'name': u'2271',
u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682'
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(project_id):
raise StorageArgumentException(
'Invalid UUID for project_id: {0}'.format(project_id))
return self._authenticated_request \
.to_endpoint('project/{}/'.format(project_id)) \
.return_body() \
.get()
|
def get_project_details(self, project_id):
'''Get information on a given project
Args:
project_id (str): The UUID of the requested project.
Returns:
A dictionary describing the project::
{
u'collab_id': 2271,
u'created_by': u'303447',
u'created_on': u'2017-03-10T12:50:06.077891Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-10T12:50:06.077946Z',
u'name': u'2271',
u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682'
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(project_id):
raise StorageArgumentException(
'Invalid UUID for project_id: {0}'.format(project_id))
return self._authenticated_request \
.to_endpoint('project/{}/'.format(project_id)) \
.return_body() \
.get()
|
[
"Get",
"information",
"on",
"a",
"given",
"project"
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L435-L468
|
[
"def",
"get_project_details",
"(",
"self",
",",
"project_id",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"project_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for project_id: {0}'",
".",
"format",
"(",
"project_id",
")",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'project/{}/'",
".",
"format",
"(",
"project_id",
")",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.create_project
|
Create a new project.
Args:
collab_id (int): The id of the collab the project should be created in.
Returns:
A dictionary of details of the created project::
{
u'collab_id': 12998,
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'12998',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def create_project(self, collab_id):
'''Create a new project.
Args:
collab_id (int): The id of the collab the project should be created in.
Returns:
A dictionary of details of the created project::
{
u'collab_id': 12998,
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'12998',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
return self._authenticated_request \
.to_endpoint('project/') \
.with_json_body(self._prep_params(locals())) \
.return_body() \
.post()
|
def create_project(self, collab_id):
'''Create a new project.
Args:
collab_id (int): The id of the collab the project should be created in.
Returns:
A dictionary of details of the created project::
{
u'collab_id': 12998,
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'project',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'12998',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
return self._authenticated_request \
.to_endpoint('project/') \
.with_json_body(self._prep_params(locals())) \
.return_body() \
.post()
|
[
"Create",
"a",
"new",
"project",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L527-L557
|
[
"def",
"create_project",
"(",
"self",
",",
"collab_id",
")",
":",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'project/'",
")",
".",
"with_json_body",
"(",
"self",
".",
"_prep_params",
"(",
"locals",
"(",
")",
")",
")",
".",
"return_body",
"(",
")",
".",
"post",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.delete_project
|
Delete a project. It will recursively delete all the content.
Args:
project (str): The UUID of the project to be deleted.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: 403
StorageNotFoundException: 404
HTTPError: other non-20x error codes
|
hbp_service_client/storage_service/api.py
|
def delete_project(self, project):
'''Delete a project. It will recursively delete all the content.
Args:
project (str): The UUID of the project to be deleted.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: 403
StorageNotFoundException: 404
HTTPError: other non-20x error codes
'''
if not is_valid_uuid(project):
raise StorageArgumentException(
'Invalid UUID for project: {0}'.format(project))
self._authenticated_request \
.to_endpoint('project/{}/'.format(project)) \
.delete()
|
def delete_project(self, project):
'''Delete a project. It will recursively delete all the content.
Args:
project (str): The UUID of the project to be deleted.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: 403
StorageNotFoundException: 404
HTTPError: other non-20x error codes
'''
if not is_valid_uuid(project):
raise StorageArgumentException(
'Invalid UUID for project: {0}'.format(project))
self._authenticated_request \
.to_endpoint('project/{}/'.format(project)) \
.delete()
|
[
"Delete",
"a",
"project",
".",
"It",
"will",
"recursively",
"delete",
"all",
"the",
"content",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L559-L579
|
[
"def",
"delete_project",
"(",
"self",
",",
"project",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"project",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for project: {0}'",
".",
"format",
"(",
"project",
")",
")",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'project/{}/'",
".",
"format",
"(",
"project",
")",
")",
".",
"delete",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.create_folder
|
Create a new folder.
Args:
name (srt): The name of the folder.
parent (str): The UUID of the parent entity. The parent must be a
project or a folder.
Returns:
A dictionary of details of the created folder::
{
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'folder',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'myfolder',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def create_folder(self, name, parent):
'''Create a new folder.
Args:
name (srt): The name of the folder.
parent (str): The UUID of the parent entity. The parent must be a
project or a folder.
Returns:
A dictionary of details of the created folder::
{
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'folder',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'myfolder',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(parent):
raise StorageArgumentException(
'Invalid UUID for parent: {0}'.format(parent))
return self._authenticated_request \
.to_endpoint('folder/') \
.with_json_body(self._prep_params(locals())) \
.return_body() \
.post()
|
def create_folder(self, name, parent):
'''Create a new folder.
Args:
name (srt): The name of the folder.
parent (str): The UUID of the parent entity. The parent must be a
project or a folder.
Returns:
A dictionary of details of the created folder::
{
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'folder',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'myfolder',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(parent):
raise StorageArgumentException(
'Invalid UUID for parent: {0}'.format(parent))
return self._authenticated_request \
.to_endpoint('folder/') \
.with_json_body(self._prep_params(locals())) \
.return_body() \
.post()
|
[
"Create",
"a",
"new",
"folder",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L585-L622
|
[
"def",
"create_folder",
"(",
"self",
",",
"name",
",",
"parent",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"parent",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for parent: {0}'",
".",
"format",
"(",
"parent",
")",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'folder/'",
")",
".",
"with_json_body",
"(",
"self",
".",
"_prep_params",
"(",
"locals",
"(",
")",
")",
")",
".",
"return_body",
"(",
")",
".",
"post",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.get_folder_details
|
Get information on a given folder.
Args:
folder (str): The UUID of the requested folder.
Returns:
A dictionary of the folder details if found::
{
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'folder',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'myfolder',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def get_folder_details(self, folder):
'''Get information on a given folder.
Args:
folder (str): The UUID of the requested folder.
Returns:
A dictionary of the folder details if found::
{
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'folder',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'myfolder',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(folder):
raise StorageArgumentException(
'Invalid UUID for folder: {0}'.format(folder))
return self._authenticated_request \
.to_endpoint('folder/{}/'.format(folder)) \
.return_body() \
.get()
|
def get_folder_details(self, folder):
'''Get information on a given folder.
Args:
folder (str): The UUID of the requested folder.
Returns:
A dictionary of the folder details if found::
{
u'created_by': u'303447',
u'created_on': u'2017-03-21T14:06:32.293902Z',
u'description': u'',
u'entity_type': u'folder',
u'modified_by': u'303447',
u'modified_on': u'2017-03-21T14:06:32.293967Z',
u'name': u'myfolder',
u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682',
u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40'
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(folder):
raise StorageArgumentException(
'Invalid UUID for folder: {0}'.format(folder))
return self._authenticated_request \
.to_endpoint('folder/{}/'.format(folder)) \
.return_body() \
.get()
|
[
"Get",
"information",
"on",
"a",
"given",
"folder",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L624-L657
|
[
"def",
"get_folder_details",
"(",
"self",
",",
"folder",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"folder",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for folder: {0}'",
".",
"format",
"(",
"folder",
")",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'folder/{}/'",
".",
"format",
"(",
"folder",
")",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.list_folder_content
|
List files and folders (not recursively) contained in the folder.
This function does not retrieve all results, pages have
to be manually retrieved by the caller.
Args:
folder (str): The UUID of the requested folder.
name (str): Optional filter on entity name.
entity_type (str): Optional filter on entity type.
Admitted values: ['file', 'folder'].
content_type (str): Optional filter on entity content type (only
files are returned).
page_size (int): Number of elements per page.
page (int): Number of the page.
ordering (str): Indicate on which fields to sort the result. Prepend
'-' to invert order. Multiple values can be provided.
Ordering is supported on: ['name', 'created_on', 'modified_on'].
Example: 'ordering=name,created_on'
Returns:
A dictionary of the results::
{
u'count': 1,
u'next': None,
u'previous': None,
u'results': [{u'content_type': u'plain/text',
u'created_by': u'303447',
u'created_on': u'2017-03-13T10:17:01.688472Z',
u'description': u'',
u'entity_type': u'file',
u'modified_by': u'303447',
u'modified_on': u'2017-03-13T10:17:01.688632Z',
u'name': u'file_1',
u'parent': u'eac11058-4ae0-4ea9-ada8-d3ea23887509',
u'uuid': u'0e17eaac-cb00-4336-b9d7-657026844281'}]
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def list_folder_content(self, folder, name=None, entity_type=None,
content_type=None, page_size=DEFAULT_PAGE_SIZE,
page=None, ordering=None):
'''List files and folders (not recursively) contained in the folder.
This function does not retrieve all results, pages have
to be manually retrieved by the caller.
Args:
folder (str): The UUID of the requested folder.
name (str): Optional filter on entity name.
entity_type (str): Optional filter on entity type.
Admitted values: ['file', 'folder'].
content_type (str): Optional filter on entity content type (only
files are returned).
page_size (int): Number of elements per page.
page (int): Number of the page.
ordering (str): Indicate on which fields to sort the result. Prepend
'-' to invert order. Multiple values can be provided.
Ordering is supported on: ['name', 'created_on', 'modified_on'].
Example: 'ordering=name,created_on'
Returns:
A dictionary of the results::
{
u'count': 1,
u'next': None,
u'previous': None,
u'results': [{u'content_type': u'plain/text',
u'created_by': u'303447',
u'created_on': u'2017-03-13T10:17:01.688472Z',
u'description': u'',
u'entity_type': u'file',
u'modified_by': u'303447',
u'modified_on': u'2017-03-13T10:17:01.688632Z',
u'name': u'file_1',
u'parent': u'eac11058-4ae0-4ea9-ada8-d3ea23887509',
u'uuid': u'0e17eaac-cb00-4336-b9d7-657026844281'}]
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(folder):
raise StorageArgumentException(
'Invalid UUID for folder: {0}'.format(folder))
params = self._prep_params(locals())
del params['folder'] # not a query parameter
return self._authenticated_request \
.to_endpoint('folder/{}/children/'.format(folder)) \
.with_params(params) \
.return_body() \
.get()
|
def list_folder_content(self, folder, name=None, entity_type=None,
content_type=None, page_size=DEFAULT_PAGE_SIZE,
page=None, ordering=None):
'''List files and folders (not recursively) contained in the folder.
This function does not retrieve all results, pages have
to be manually retrieved by the caller.
Args:
folder (str): The UUID of the requested folder.
name (str): Optional filter on entity name.
entity_type (str): Optional filter on entity type.
Admitted values: ['file', 'folder'].
content_type (str): Optional filter on entity content type (only
files are returned).
page_size (int): Number of elements per page.
page (int): Number of the page.
ordering (str): Indicate on which fields to sort the result. Prepend
'-' to invert order. Multiple values can be provided.
Ordering is supported on: ['name', 'created_on', 'modified_on'].
Example: 'ordering=name,created_on'
Returns:
A dictionary of the results::
{
u'count': 1,
u'next': None,
u'previous': None,
u'results': [{u'content_type': u'plain/text',
u'created_by': u'303447',
u'created_on': u'2017-03-13T10:17:01.688472Z',
u'description': u'',
u'entity_type': u'file',
u'modified_by': u'303447',
u'modified_on': u'2017-03-13T10:17:01.688632Z',
u'name': u'file_1',
u'parent': u'eac11058-4ae0-4ea9-ada8-d3ea23887509',
u'uuid': u'0e17eaac-cb00-4336-b9d7-657026844281'}]
}
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(folder):
raise StorageArgumentException(
'Invalid UUID for folder: {0}'.format(folder))
params = self._prep_params(locals())
del params['folder'] # not a query parameter
return self._authenticated_request \
.to_endpoint('folder/{}/children/'.format(folder)) \
.with_params(params) \
.return_body() \
.get()
|
[
"List",
"files",
"and",
"folders",
"(",
"not",
"recursively",
")",
"contained",
"in",
"the",
"folder",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L659-L715
|
[
"def",
"list_folder_content",
"(",
"self",
",",
"folder",
",",
"name",
"=",
"None",
",",
"entity_type",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"page_size",
"=",
"DEFAULT_PAGE_SIZE",
",",
"page",
"=",
"None",
",",
"ordering",
"=",
"None",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"folder",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for folder: {0}'",
".",
"format",
"(",
"folder",
")",
")",
"params",
"=",
"self",
".",
"_prep_params",
"(",
"locals",
"(",
")",
")",
"del",
"params",
"[",
"'folder'",
"]",
"# not a query parameter",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'folder/{}/children/'",
".",
"format",
"(",
"folder",
")",
")",
".",
"with_params",
"(",
"params",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.delete_folder
|
Delete a folder. It will recursively delete all the content.
Args:
folder_id (str): The UUID of the folder to be deleted.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: 403
StorageNotFoundException: 404
HTTPError: other non-20x error codes
|
hbp_service_client/storage_service/api.py
|
def delete_folder(self, folder):
'''Delete a folder. It will recursively delete all the content.
Args:
folder_id (str): The UUID of the folder to be deleted.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: 403
StorageNotFoundException: 404
HTTPError: other non-20x error codes
'''
if not is_valid_uuid(folder):
raise StorageArgumentException(
'Invalid UUID for folder: {0}'.format(folder))
self._authenticated_request \
.to_endpoint('folder/{}/'.format(folder)) \
.delete()
|
def delete_folder(self, folder):
'''Delete a folder. It will recursively delete all the content.
Args:
folder_id (str): The UUID of the folder to be deleted.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: 403
StorageNotFoundException: 404
HTTPError: other non-20x error codes
'''
if not is_valid_uuid(folder):
raise StorageArgumentException(
'Invalid UUID for folder: {0}'.format(folder))
self._authenticated_request \
.to_endpoint('folder/{}/'.format(folder)) \
.delete()
|
[
"Delete",
"a",
"folder",
".",
"It",
"will",
"recursively",
"delete",
"all",
"the",
"content",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L717-L737
|
[
"def",
"delete_folder",
"(",
"self",
",",
"folder",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"folder",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for folder: {0}'",
".",
"format",
"(",
"folder",
")",
")",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'folder/{}/'",
".",
"format",
"(",
"folder",
")",
")",
".",
"delete",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.upload_file_content
|
Upload a file content. The file entity must already exist.
If an ETag is provided the file stored on the server is verified
against it. If it does not match, StorageException is raised.
This means the client needs to update its knowledge of the resource
before attempting to update again. This can be used for optimistic
concurrency control.
Args:
file_id (str): The UUID of the file whose content is written.
etag (str): The etag to match the contents against.
source (str): The path of the local file whose content to be uploaded.
content (str): A string of the content to be uploaded.
Note:
ETags should be enclosed in double quotes::
my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"'
Returns:
The ETag of the file upload::
'"71e1ed9ee52e565a56aec66bc648a32c"'
Raises:
IOError: The source cannot be opened.
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def upload_file_content(self, file_id, etag=None, source=None, content=None):
'''Upload a file content. The file entity must already exist.
If an ETag is provided the file stored on the server is verified
against it. If it does not match, StorageException is raised.
This means the client needs to update its knowledge of the resource
before attempting to update again. This can be used for optimistic
concurrency control.
Args:
file_id (str): The UUID of the file whose content is written.
etag (str): The etag to match the contents against.
source (str): The path of the local file whose content to be uploaded.
content (str): A string of the content to be uploaded.
Note:
ETags should be enclosed in double quotes::
my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"'
Returns:
The ETag of the file upload::
'"71e1ed9ee52e565a56aec66bc648a32c"'
Raises:
IOError: The source cannot be opened.
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
if not (source or content) or (source and content):
raise StorageArgumentException('Either one of source file or content '
'has to be provided.')
resp = self._authenticated_request \
.to_endpoint('file/{}/content/upload/'.format(file_id)) \
.with_body(content or open(source, 'rb')) \
.with_headers({'If-Match': etag} if etag else {}) \
.post()
if 'ETag' not in resp.headers:
raise StorageException('No ETag received from the service after the upload')
return resp.headers['ETag']
|
def upload_file_content(self, file_id, etag=None, source=None, content=None):
'''Upload a file content. The file entity must already exist.
If an ETag is provided the file stored on the server is verified
against it. If it does not match, StorageException is raised.
This means the client needs to update its knowledge of the resource
before attempting to update again. This can be used for optimistic
concurrency control.
Args:
file_id (str): The UUID of the file whose content is written.
etag (str): The etag to match the contents against.
source (str): The path of the local file whose content to be uploaded.
content (str): A string of the content to be uploaded.
Note:
ETags should be enclosed in double quotes::
my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"'
Returns:
The ETag of the file upload::
'"71e1ed9ee52e565a56aec66bc648a32c"'
Raises:
IOError: The source cannot be opened.
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
if not (source or content) or (source and content):
raise StorageArgumentException('Either one of source file or content '
'has to be provided.')
resp = self._authenticated_request \
.to_endpoint('file/{}/content/upload/'.format(file_id)) \
.with_body(content or open(source, 'rb')) \
.with_headers({'If-Match': etag} if etag else {}) \
.post()
if 'ETag' not in resp.headers:
raise StorageException('No ETag received from the service after the upload')
return resp.headers['ETag']
|
[
"Upload",
"a",
"file",
"content",
".",
"The",
"file",
"entity",
"must",
"already",
"exist",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L819-L868
|
[
"def",
"upload_file_content",
"(",
"self",
",",
"file_id",
",",
"etag",
"=",
"None",
",",
"source",
"=",
"None",
",",
"content",
"=",
"None",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"file_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for file_id: {0}'",
".",
"format",
"(",
"file_id",
")",
")",
"if",
"not",
"(",
"source",
"or",
"content",
")",
"or",
"(",
"source",
"and",
"content",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Either one of source file or content '",
"'has to be provided.'",
")",
"resp",
"=",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'file/{}/content/upload/'",
".",
"format",
"(",
"file_id",
")",
")",
".",
"with_body",
"(",
"content",
"or",
"open",
"(",
"source",
",",
"'rb'",
")",
")",
".",
"with_headers",
"(",
"{",
"'If-Match'",
":",
"etag",
"}",
"if",
"etag",
"else",
"{",
"}",
")",
".",
"post",
"(",
")",
"if",
"'ETag'",
"not",
"in",
"resp",
".",
"headers",
":",
"raise",
"StorageException",
"(",
"'No ETag received from the service after the upload'",
")",
"return",
"resp",
".",
"headers",
"[",
"'ETag'",
"]"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.copy_file_content
|
Copy file content from source file to target file.
Args:
file_id (str): The UUID of the file whose content is written.
source_file (str): The UUID of the file whose content is copied.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def copy_file_content(self, file_id, source_file):
'''Copy file content from source file to target file.
Args:
file_id (str): The UUID of the file whose content is written.
source_file (str): The UUID of the file whose content is copied.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
if not is_valid_uuid(source_file):
raise StorageArgumentException(
'Invalid UUID for source_file: {0}'.format(source_file))
self._authenticated_request \
.to_endpoint('file/{}/content/'.format(file_id)) \
.with_headers({'X-Copy-From': source_file}) \
.put()
|
def copy_file_content(self, file_id, source_file):
'''Copy file content from source file to target file.
Args:
file_id (str): The UUID of the file whose content is written.
source_file (str): The UUID of the file whose content is copied.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
if not is_valid_uuid(source_file):
raise StorageArgumentException(
'Invalid UUID for source_file: {0}'.format(source_file))
self._authenticated_request \
.to_endpoint('file/{}/content/'.format(file_id)) \
.with_headers({'X-Copy-From': source_file}) \
.put()
|
[
"Copy",
"file",
"content",
"from",
"source",
"file",
"to",
"target",
"file",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L870-L897
|
[
"def",
"copy_file_content",
"(",
"self",
",",
"file_id",
",",
"source_file",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"file_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for file_id: {0}'",
".",
"format",
"(",
"file_id",
")",
")",
"if",
"not",
"is_valid_uuid",
"(",
"source_file",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for source_file: {0}'",
".",
"format",
"(",
"source_file",
")",
")",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'file/{}/content/'",
".",
"format",
"(",
"file_id",
")",
")",
".",
"with_headers",
"(",
"{",
"'X-Copy-From'",
":",
"source_file",
"}",
")",
".",
"put",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.download_file_content
|
Download file content.
Args:
file_id (str): The UUID of the file whose content is requested
etag (str): If the content is not changed since the provided ETag,
the content won't be downloaded. If the content is changed, it
will be downloaded and returned with its new ETag.
Note:
ETags should be enclosed in double quotes::
my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"'
Returns:
A tuple of ETag and content (etag, content) if the content was
retrieved. If an etag was provided, and content didn't change
returns (None, None)::
('"71e1ed9ee52e565a56aec66bc648a32c"', 'Hello world!')
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def download_file_content(self, file_id, etag=None):
'''Download file content.
Args:
file_id (str): The UUID of the file whose content is requested
etag (str): If the content is not changed since the provided ETag,
the content won't be downloaded. If the content is changed, it
will be downloaded and returned with its new ETag.
Note:
ETags should be enclosed in double quotes::
my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"'
Returns:
A tuple of ETag and content (etag, content) if the content was
retrieved. If an etag was provided, and content didn't change
returns (None, None)::
('"71e1ed9ee52e565a56aec66bc648a32c"', 'Hello world!')
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
headers = {'Accept': '*/*'}
if etag:
headers['If-None-Match'] = etag
resp = self._authenticated_request \
.to_endpoint('file/{}/content/'.format(file_id)) \
.with_headers(headers) \
.get()
if resp.status_code == 304:
return (None, None)
if 'ETag' not in resp.headers:
raise StorageException('No ETag received from the service with the download')
return (resp.headers['ETag'], resp.content)
|
def download_file_content(self, file_id, etag=None):
'''Download file content.
Args:
file_id (str): The UUID of the file whose content is requested
etag (str): If the content is not changed since the provided ETag,
the content won't be downloaded. If the content is changed, it
will be downloaded and returned with its new ETag.
Note:
ETags should be enclosed in double quotes::
my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"'
Returns:
A tuple of ETag and content (etag, content) if the content was
retrieved. If an etag was provided, and content didn't change
returns (None, None)::
('"71e1ed9ee52e565a56aec66bc648a32c"', 'Hello world!')
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
headers = {'Accept': '*/*'}
if etag:
headers['If-None-Match'] = etag
resp = self._authenticated_request \
.to_endpoint('file/{}/content/'.format(file_id)) \
.with_headers(headers) \
.get()
if resp.status_code == 304:
return (None, None)
if 'ETag' not in resp.headers:
raise StorageException('No ETag received from the service with the download')
return (resp.headers['ETag'], resp.content)
|
[
"Download",
"file",
"content",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L899-L946
|
[
"def",
"download_file_content",
"(",
"self",
",",
"file_id",
",",
"etag",
"=",
"None",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"file_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for file_id: {0}'",
".",
"format",
"(",
"file_id",
")",
")",
"headers",
"=",
"{",
"'Accept'",
":",
"'*/*'",
"}",
"if",
"etag",
":",
"headers",
"[",
"'If-None-Match'",
"]",
"=",
"etag",
"resp",
"=",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'file/{}/content/'",
".",
"format",
"(",
"file_id",
")",
")",
".",
"with_headers",
"(",
"headers",
")",
".",
"get",
"(",
")",
"if",
"resp",
".",
"status_code",
"==",
"304",
":",
"return",
"(",
"None",
",",
"None",
")",
"if",
"'ETag'",
"not",
"in",
"resp",
".",
"headers",
":",
"raise",
"StorageException",
"(",
"'No ETag received from the service with the download'",
")",
"return",
"(",
"resp",
".",
"headers",
"[",
"'ETag'",
"]",
",",
"resp",
".",
"content",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.get_signed_url
|
Get a signed unauthenticated URL.
It can be used to download the file content without the need for a
token. The signed URL expires after 5 seconds.
Args:
file_id (str): The UUID of the file to get the link for.
Returns:
The signed url as a string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def get_signed_url(self, file_id):
'''Get a signed unauthenticated URL.
It can be used to download the file content without the need for a
token. The signed URL expires after 5 seconds.
Args:
file_id (str): The UUID of the file to get the link for.
Returns:
The signed url as a string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
return self._authenticated_request \
.to_endpoint('file/{}/content/secure_link/'.format(file_id)) \
.return_body() \
.get()['signed_url']
|
def get_signed_url(self, file_id):
'''Get a signed unauthenticated URL.
It can be used to download the file content without the need for a
token. The signed URL expires after 5 seconds.
Args:
file_id (str): The UUID of the file to get the link for.
Returns:
The signed url as a string
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
return self._authenticated_request \
.to_endpoint('file/{}/content/secure_link/'.format(file_id)) \
.return_body() \
.get()['signed_url']
|
[
"Get",
"a",
"signed",
"unauthenticated",
"URL",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L948-L973
|
[
"def",
"get_signed_url",
"(",
"self",
",",
"file_id",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"file_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for file_id: {0}'",
".",
"format",
"(",
"file_id",
")",
")",
"return",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'file/{}/content/secure_link/'",
".",
"format",
"(",
"file_id",
")",
")",
".",
"return_body",
"(",
")",
".",
"get",
"(",
")",
"[",
"'signed_url'",
"]"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
ApiClient.delete_file
|
Delete a file.
Args:
file_id (str): The UUID of the file to delete.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
|
hbp_service_client/storage_service/api.py
|
def delete_file(self, file_id):
'''Delete a file.
Args:
file_id (str): The UUID of the file to delete.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
self._authenticated_request \
.to_endpoint('file/{}/'.format(file_id)) \
.delete()
|
def delete_file(self, file_id):
'''Delete a file.
Args:
file_id (str): The UUID of the file to delete.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: Server response code 403
StorageNotFoundException: Server response code 404
StorageException: other 400-600 error codes
'''
if not is_valid_uuid(file_id):
raise StorageArgumentException(
'Invalid UUID for file_id: {0}'.format(file_id))
self._authenticated_request \
.to_endpoint('file/{}/'.format(file_id)) \
.delete()
|
[
"Delete",
"a",
"file",
"."
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/storage_service/api.py#L975-L996
|
[
"def",
"delete_file",
"(",
"self",
",",
"file_id",
")",
":",
"if",
"not",
"is_valid_uuid",
"(",
"file_id",
")",
":",
"raise",
"StorageArgumentException",
"(",
"'Invalid UUID for file_id: {0}'",
".",
"format",
"(",
"file_id",
")",
")",
"self",
".",
"_authenticated_request",
".",
"to_endpoint",
"(",
"'file/{}/'",
".",
"format",
"(",
"file_id",
")",
")",
".",
"delete",
"(",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
test
|
MongoDBHandler.emit
|
pymongo expects a dict
|
jsonklog/handlers/mongodbhandler.py
|
def emit(self, record):
""" pymongo expects a dict """
msg = self.format(record)
if not isinstance(msg, dict):
msg = json.loads(msg)
self.collection.insert(msg)
|
def emit(self, record):
""" pymongo expects a dict """
msg = self.format(record)
if not isinstance(msg, dict):
msg = json.loads(msg)
self.collection.insert(msg)
|
[
"pymongo",
"expects",
"a",
"dict"
] |
neogenix/jsonklog
|
python
|
https://github.com/neogenix/jsonklog/blob/ac4b8f5b75b4a0be60ecad9e71d624bad08c3fa1/jsonklog/handlers/mongodbhandler.py#L35-L43
|
[
"def",
"emit",
"(",
"self",
",",
"record",
")",
":",
"msg",
"=",
"self",
".",
"format",
"(",
"record",
")",
"if",
"not",
"isinstance",
"(",
"msg",
",",
"dict",
")",
":",
"msg",
"=",
"json",
".",
"loads",
"(",
"msg",
")",
"self",
".",
"collection",
".",
"insert",
"(",
"msg",
")"
] |
ac4b8f5b75b4a0be60ecad9e71d624bad08c3fa1
|
test
|
RequestBuilder.to_service
|
Sets the service name and version the request should target
Args:
service (str): The name of the service as displayed in the services.json file
version (str): The version of the service as displayed in the services.json file
Returns:
The request builder instance in order to chain calls
|
hbp_service_client/request/request_builder.py
|
def to_service(self, service, version):
'''Sets the service name and version the request should target
Args:
service (str): The name of the service as displayed in the services.json file
version (str): The version of the service as displayed in the services.json file
Returns:
The request builder instance in order to chain calls
'''
service_url = self._service_locator.get_service_url(service, version)
return self.__copy_and_set('service_url', self.__strip_trailing_slashes(service_url))
|
def to_service(self, service, version):
'''Sets the service name and version the request should target
Args:
service (str): The name of the service as displayed in the services.json file
version (str): The version of the service as displayed in the services.json file
Returns:
The request builder instance in order to chain calls
'''
service_url = self._service_locator.get_service_url(service, version)
return self.__copy_and_set('service_url', self.__strip_trailing_slashes(service_url))
|
[
"Sets",
"the",
"service",
"name",
"and",
"version",
"the",
"request",
"should",
"target"
] |
HumanBrainProject/hbp-service-client
|
python
|
https://github.com/HumanBrainProject/hbp-service-client/blob/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d/hbp_service_client/request/request_builder.py#L85-L96
|
[
"def",
"to_service",
"(",
"self",
",",
"service",
",",
"version",
")",
":",
"service_url",
"=",
"self",
".",
"_service_locator",
".",
"get_service_url",
"(",
"service",
",",
"version",
")",
"return",
"self",
".",
"__copy_and_set",
"(",
"'service_url'",
",",
"self",
".",
"__strip_trailing_slashes",
"(",
"service_url",
")",
")"
] |
b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.