sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def get_media_backend(fail_silently=True, handles_media_types=None,
handles_file_extensions=None, supports_thumbnails=None):
"""
Returns the MediaBackend subclass that is configured for use with
media_tree.
"""
backends = app_settings.MEDIA_TREE_MEDIA_BACKENDS
if not len(backends):
if not fail_silently:
raise ImproperlyConfigured('There is no media backend configured.' \
+ ' Please define `MEDIA_TREE_MEDIA_BACKENDS` in your settings.')
else:
return False
for path in backends:
# Traverse backends until there is one supporting what's requested:
backend = get_module_attr(path)
if (not handles_media_types or backend.handles_media_types(handles_media_types)) \
and (not handles_file_extensions or backend.handles_file_extensions(handles_file_extensions)) \
and (not supports_thumbnails or backend.supports_thumbnails()):
return backend
if not fail_silently:
raise ImproperlyConfigured('There is no media backend configured to handle' \
' the specified file types.')
return False | Returns the MediaBackend subclass that is configured for use with
media_tree. | entailment |
def thumbnail_size(parser, token):
"""Returns a pre-configured thumbnail size, or assigns it to a context
variable.
Basic tag syntax::
{% thumbnail_size [size_name] [as var_name] %}
The ``size`` parameter can be any of the size names configured using the
setting ``MEDIA_TREE_THUMBNAIL_SIZES``. If omitted, the default size will
be returned.
If the ``as var_name`` clause is present, the size will be assigned to
the respective context variable instead of being returned.
You can use this template tag in conjunction with :func:`thumbnail` in order
to use pre-configured thumbnail sizes in your templates.
For example::
{% thumbnail_size "large" as large_size %}
{% thumbnail some_file large_size as large_thumb %}
<img src="{{ large_thumb.url }} width="{{ large_thumb.width }} ... />
"""
args = token.split_contents()
tag = args.pop(0)
if len(args) >= 2 and args[-2] == 'as':
context_name = args[-1]
args = args[:-2]
else:
context_name = None
if len(args) > 1:
raise template.TemplateSyntaxError("Invalid syntax. Expected "
"'{%% %s [\"size_name\"] [as context_var] %%}'" % tag)
elif len(args) == 1:
size_name = args[0]
else:
size_name = None
return ThumbnailSizeNode(size_name=size_name, context_name=context_name) | Returns a pre-configured thumbnail size, or assigns it to a context
variable.
Basic tag syntax::
{% thumbnail_size [size_name] [as var_name] %}
The ``size`` parameter can be any of the size names configured using the
setting ``MEDIA_TREE_THUMBNAIL_SIZES``. If omitted, the default size will
be returned.
If the ``as var_name`` clause is present, the size will be assigned to
the respective context variable instead of being returned.
You can use this template tag in conjunction with :func:`thumbnail` in order
to use pre-configured thumbnail sizes in your templates.
For example::
{% thumbnail_size "large" as large_size %}
{% thumbnail some_file large_size as large_thumb %}
<img src="{{ large_thumb.url }} width="{{ large_thumb.width }} ... /> | entailment |
def request(method, path, params=None, data=None, auto_retry=True):
"""
method - HTTP method. e.g. get, put, post, etc.
path - Path to resource. e.g. /loss_sets/1234
params - Parameter to pass in the query string
data - Dictionary of parameters to pass in the request body
"""
body = None
if data is not None:
body = json.dumps(data, cls=utils.DateTimeEncoder)
headers = {
'accept': 'application/json',
'content-type': 'application/json',
'user-agent': analyzere.user_agent,
}
resp = request_raw(method, path, params=params, body=body, headers=headers,
auto_retry=auto_retry)
content = resp.text
if content:
try:
content = json.loads(content, cls=utils.DateTimeDecoder)
except ValueError:
raise errors.ServerError('Unable to parse JSON response returned '
'from server.', resp, resp.status_code)
return content | method - HTTP method. e.g. get, put, post, etc.
path - Path to resource. e.g. /loss_sets/1234
params - Parameter to pass in the query string
data - Dictionary of parameters to pass in the request body | entailment |
def file_length(file_obj):
"""
Returns the length in bytes of a given file object.
Necessary because os.fstat only works on real files and not file-like
objects. This works on more types of streams, primarily StringIO.
"""
file_obj.seek(0, 2)
length = file_obj.tell()
file_obj.seek(0)
return length | Returns the length in bytes of a given file object.
Necessary because os.fstat only works on real files and not file-like
objects. This works on more types of streams, primarily StringIO. | entailment |
def read_in_chunks(file_obj, chunk_size):
"""Generator to read a file piece by piece."""
offset = 0
while True:
data = file_obj.read(chunk_size)
if not data:
break
yield data, offset
offset += len(data) | Generator to read a file piece by piece. | entailment |
def parse_href(href):
"""Parses an Analyze Re href into collection name and ID"""
url = urlparse(href)
path = url.path.split('/')
collection_name = path[1]
id_ = path[2]
return collection_name, id_ | Parses an Analyze Re href into collection name and ID | entailment |
def vectorize(values):
"""
Takes a value or list of values and returns a single result, joined by ","
if necessary.
"""
if isinstance(values, list):
return ','.join(str(v) for v in values)
return values | Takes a value or list of values and returns a single result, joined by ","
if necessary. | entailment |
def vectorize_range(values):
"""
This function is for url encoding.
Takes a value or a tuple or list of tuples and returns a single result,
tuples are joined by "," if necessary, elements in tuple are joined by '_'
"""
if isinstance(values, tuple):
return '_'.join(str(i) for i in values)
if isinstance(values, list):
if not all([isinstance(item, tuple) for item in values]):
raise TypeError('Items in the list must be tuples')
return ','.join('_'.join(str(i) for i in v) for v in values)
return str(values) | This function is for url encoding.
Takes a value or a tuple or list of tuples and returns a single result,
tuples are joined by "," if necessary, elements in tuple are joined by '_' | entailment |
def label_from_instance(self, obj):
"""
Creates labels which represent the tree level of each node when
generating option labels.
"""
return u'%s %s %i' % (self.level_indicator * (getattr(obj, 'depth') - 1), smart_unicode(obj), obj.depth) | Creates labels which represent the tree level of each node when
generating option labels. | entailment |
def upload_data(self, file_or_str, chunk_size=analyzere.upload_chunk_size,
poll_interval=analyzere.upload_poll_interval,
upload_callback=lambda x: None,
commit_callback=lambda x: None):
"""
Accepts a file-like object or string and uploads it. Files are
automatically uploaded in chunks. The default chunk size is 16MiB and
can be overwritten by specifying the number of bytes in the
``chunk_size`` variable.
Accepts an optional poll_interval for temporarily overriding the
default value `analyzere.upload_poll_interval`.
Implements the tus protocol.
Takes optional callbacks that return the percentage complete for the
given "phase" of upload: upload/commit.
Callback values are returned as 10.0 for 10%
"""
if not callable(upload_callback):
raise Exception('provided upload_callback is not callable')
if not callable(commit_callback):
raise Exception('provided commit_callback is not callable')
file_obj = StringIO(file_or_str) if isinstance(
file_or_str, six.string_types) else file_or_str
# Upload file with known entity size if file object supports random
# access.
length = None
if hasattr(file_obj, 'seek'):
length = utils.file_length(file_obj)
# Initiate upload session
request_raw('post', self._data_path,
headers={'Entity-Length': str(length)})
else:
request_raw('post', self._data_path)
# Upload chunks
for chunk, offset in utils.read_in_chunks(file_obj, chunk_size):
headers = {'Offset': str(offset),
'Content-Type': 'application/offset+octet-stream'}
request_raw('patch', self._data_path, headers=headers, body=chunk)
# if there is a known size, and an upload callback, call it
if length:
upload_callback(offset * 100.0 / length)
upload_callback(100.0)
# Commit the session
request_raw('post', self._commit_path)
# Block until data has finished processing
while True:
resp = self.upload_status
if (resp.status == 'Processing Successful' or resp.status == 'Processing Failed'):
commit_callback(100.0)
return resp
else:
commit_callback(float(resp.commit_progress))
time.sleep(poll_interval) | Accepts a file-like object or string and uploads it. Files are
automatically uploaded in chunks. The default chunk size is 16MiB and
can be overwritten by specifying the number of bytes in the
``chunk_size`` variable.
Accepts an optional poll_interval for temporarily overriding the
default value `analyzere.upload_poll_interval`.
Implements the tus protocol.
Takes optional callbacks that return the percentage complete for the
given "phase" of upload: upload/commit.
Callback values are returned as 10.0 for 10% | entailment |
def delete_orphaned_files(modeladmin, request, queryset=None):
"""
Deletes orphaned files, i.e. media files existing in storage that are not in the database.
"""
execute = request.POST.get('execute')
storage = get_media_storage()
broken_node_links = []
orphaned_files_choices = []
broken_nodes, orphaned_files = get_broken_media()
for node in broken_nodes:
link = mark_safe('<a href="%s">%s</a>' % (node.get_admin_url(), node.__unicode__()))
broken_node_links.append(link)
for storage_name in orphaned_files:
file_path = storage.path(storage_name)
link = mark_safe('<a href="%s">%s</a>' % (
storage.url(storage_name), file_path))
orphaned_files_choices.append((storage_name, link))
if not len(orphaned_files_choices) and not len(broken_node_links):
messages.success(request, message=_('There are no orphaned files.'))
return HttpResponseRedirect('')
if execute:
form = DeleteOrphanedFilesForm(queryset, orphaned_files_choices, request.POST)
if form.is_valid():
form.save()
node = FileNode.get_top_node()
messages.success(request, message=ungettext('Deleted %i file from storage.', 'Deleted %i files from storage.', len(form.success_files)) % len(form.success_files))
if form.error_files:
messages.error(request, message=_('The following files could not be deleted from storage:')+' '+repr(form.error_files))
return HttpResponseRedirect(node.get_admin_url())
if not execute:
if len(orphaned_files_choices) > 0:
form = DeleteOrphanedFilesForm(queryset, orphaned_files_choices)
else:
form = None
c = get_actions_context(modeladmin)
c.update({
'title': _('Orphaned files'),
'submit_label': _('Delete selected files'),
'form': form,
'select_all': 'selected_files',
'node_list_title': _('The following files in the database do not exist in storage. You should fix these media objects:'),
'node_list': broken_node_links,
})
return render_to_response('admin/media_tree/filenode/actions_form.html', c, context_instance=RequestContext(request)) | Deletes orphaned files, i.e. media files existing in storage that are not in the database. | entailment |
def rebuild_tree(modeladmin, request, queryset=None):
"""
Rebuilds whole tree in database using `parent` link.
"""
tree = FileNode.tree.rebuild()
messages.success(request, message=_('The node tree was rebuilt.'))
return HttpResponseRedirect('') | Rebuilds whole tree in database using `parent` link. | entailment |
def clear_cache(modeladmin, request, queryset=None):
"""
Clears media cache files such as thumbnails.
"""
execute = request.POST.get('execute')
files_in_storage = []
storage = get_media_storage()
cache_files_choices = []
for storage_name in get_cache_files():
link = mark_safe('<a href="%s">%s</a>' % (
storage.url(storage_name), storage_name))
cache_files_choices.append((storage_name, link))
if not len(cache_files_choices):
messages.warning(request, message=_('There are no cache files.'))
return HttpResponseRedirect('')
if execute:
form = DeleteCacheFilesForm(queryset, cache_files_choices, request.POST)
if form.is_valid():
form.save()
node = FileNode.get_top_node()
message = ungettext('Deleted %i cache file.', 'Deleted %i cache files.', len(form.success_files)) % len(form.success_files)
if len(form.success_files) == len(cache_files_choices):
message = '%s %s' % (_('The cache was cleared.'), message)
messages.success(request, message=message)
if form.error_files:
messages.error(request, message=_('The following files could not be deleted:')+' '+repr(form.error_files))
return HttpResponseRedirect(node.get_admin_url())
if not execute:
if len(cache_files_choices) > 0:
form = DeleteCacheFilesForm(queryset, cache_files_choices)
else:
form = None
c = get_actions_context(modeladmin)
c.update({
'title': _('Clear cache'),
'submit_label': _('Delete selected files'),
'form': form,
'select_all': 'selected_files',
})
return render_to_response('admin/media_tree/filenode/actions_form.html', c, context_instance=RequestContext(request))
return HttpResponseRedirect('') | Clears media cache files such as thumbnails. | entailment |
def south_field_triple(self):
"Returns a suitable description of this field for South."
from south.modelsinspector import introspector
field_class = "django.db.models.fields.CharField"
args, kwargs = introspector(self)
return (field_class, args, kwargs) | Returns a suitable description of this field for South. | entailment |
def autodiscover_media_extensions():
"""
Auto-discover INSTALLED_APPS media_extensions.py modules and fail silently when
not present. This forces an import on them to register any media extension bits
they may want.
Rip of django.contrib.admin.autodiscover()
"""
import copy
from django.conf import settings
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
try:
import_module('%s.media_extension' % app)
except:
if module_has_submodule(mod, 'media_extension'):
raise | Auto-discover INSTALLED_APPS media_extensions.py modules and fail silently when
not present. This forces an import on them to register any media extension bits
they may want.
Rip of django.contrib.admin.autodiscover() | entailment |
def multi_splitext(basename):
"""
Similar to os.path.slittext(), but with special handling for files with multiple extensions,
such as "archive.tar.gz": Returns a list containg three elements, the first being the
name without any extensions (taking into account hidden files/leading periods),
the second being the "full" extension, the third being the extension as returned by
os.path.splitext.
Examples:
os.path.join('foo.bar') # => ('foo', '.bar')
multi_splitext('foo.bar') # => ['foo', '.bar', '.bar']
os.path.join('foo.tar.gz') # => ('foo.tar', '.gz')
multi_splitext('foo.tar.gz') # => ['foo', '.tar.gz', '.gz']
os.path.join('.foo.tar.gz') # => ('.foo.tar', '.gz')
multi_splitext('.foo.tar.gz') # => ['.foo', '.tar.gz', '.gz']
os.path.join('.htaccess') # => ('.htaccess', '')
multi_splitext('.htaccess') # => ['.htaccess', '', '']
os.path.join('.foo.bar.') # => ('.foo.bar', '.')
multi_splitext('.foo.bar.') # => ['.foo.bar', '.', '.']
"""
groups = list(RE_SPLITEXT.match(basename).groups())
if not groups[2]:
groups[2] = groups[1]
return groups | Similar to os.path.slittext(), but with special handling for files with multiple extensions,
such as "archive.tar.gz": Returns a list containg three elements, the first being the
name without any extensions (taking into account hidden files/leading periods),
the second being the "full" extension, the third being the extension as returned by
os.path.splitext.
Examples:
os.path.join('foo.bar') # => ('foo', '.bar')
multi_splitext('foo.bar') # => ['foo', '.bar', '.bar']
os.path.join('foo.tar.gz') # => ('foo.tar', '.gz')
multi_splitext('foo.tar.gz') # => ['foo', '.tar.gz', '.gz']
os.path.join('.foo.tar.gz') # => ('.foo.tar', '.gz')
multi_splitext('.foo.tar.gz') # => ['.foo', '.tar.gz', '.gz']
os.path.join('.htaccess') # => ('.htaccess', '')
multi_splitext('.htaccess') # => ['.htaccess', '', '']
os.path.join('.foo.bar.') # => ('.foo.bar', '.')
multi_splitext('.foo.bar.') # => ['.foo.bar', '.', '.'] | entailment |
def join_formatted(text, new_text, glue_format_if_true = u'%s%s', glue_format_if_false = u'%s%s', condition=None, format = u'%s', escape=False):
"""
Joins two strings, optionally escaping the second, and using one of two
string formats for glueing them together, depending on whether a condition
is True or False.
This function is a shorthand for complicated code blocks when you want to
format some strings and link them together. A typical use case might be:
Wrap string B with <strong> tags, but only if it is not empty, and join it
with A with a comma in between, but only if A is not empty, etc.
"""
if condition is None:
condition = text and new_text
add_text = new_text
if escape:
add_text = conditional_escape(add_text)
if add_text:
add_text = format % add_text
glue_format = glue_format_if_true if condition else glue_format_if_false
return glue_format % (text, add_text) | Joins two strings, optionally escaping the second, and using one of two
string formats for glueing them together, depending on whether a condition
is True or False.
This function is a shorthand for complicated code blocks when you want to
format some strings and link them together. A typical use case might be:
Wrap string B with <strong> tags, but only if it is not empty, and join it
with A with a comma in between, but only if A is not empty, etc. | entailment |
def widthratio(value, max_value, max_width):
"""
Does the same like Django's `widthratio` template tag (scales max_width to factor value/max_value)
"""
ratio = float(value) / float(max_value)
return int(round(ratio * max_width)) | Does the same like Django's `widthratio` template tag (scales max_width to factor value/max_value) | entailment |
def _initialize(self, path):
""" Find metadata tree root, detect format version """
# Find the tree root
root = os.path.abspath(path)
try:
while ".fmf" not in next(os.walk(root))[1]:
if root == "/":
raise utils.RootError(
"Unable to find tree root for '{0}'.".format(
os.path.abspath(path)))
root = os.path.abspath(os.path.join(root, os.pardir))
except StopIteration:
raise utils.FileError("Invalid directory path: {0}".format(root))
log.info("Root directory found: {0}".format(root))
self.root = root
# Detect format version
try:
with open(os.path.join(self.root, ".fmf", "version")) as version:
self.version = int(version.read())
log.info("Format version detected: {0}".format(self.version))
except IOError as error:
raise utils.FormatError(
"Unable to detect format version: {0}".format(error))
except ValueError:
raise utils.FormatError("Invalid version format") | Find metadata tree root, detect format version | entailment |
def merge(self, parent=None):
""" Merge parent data """
# Check parent, append source files
if parent is None:
parent = self.parent
if parent is None:
return
self.sources = parent.sources + self.sources
# Merge child data with parent data
data = copy.deepcopy(parent.data)
for key, value in sorted(self.data.items()):
# Handle attribute adding
if key.endswith('+'):
key = key.rstrip('+')
if key in data:
# Use dict.update() for merging dictionaries
if type(data[key]) == type(value) == dict:
data[key].update(value)
continue
try:
value = data[key] + value
except TypeError as error:
raise utils.MergeError(
"MergeError: Key '{0}' in {1} ({2}).".format(
key, self.name, str(error)))
# And finally update the value
data[key] = value
self.data = data | Merge parent data | entailment |
def inherit(self):
""" Apply inheritance """
# Preserve original data and merge parent
# (original data needed for custom inheritance extensions)
self.original_data = self.data
self.merge()
log.debug("Data for '{0}' inherited.".format(self))
log.data(pretty(self.data))
# Apply inheritance to all children
for child in self.children.values():
child.inherit() | Apply inheritance | entailment |
def update(self, data):
""" Update metadata, handle virtual hierarchy """
# Nothing to do if no data
if data is None:
return
for key, value in sorted(data.items()):
# Handle child attributes
if key.startswith('/'):
name = key.lstrip('/')
# Handle deeper nesting (e.g. keys like /one/two/three) by
# extracting only the first level of the hierarchy as name
match = re.search("([^/]+)(/.*)", name)
if match:
name = match.groups()[0]
value = {match.groups()[1]: value}
# Update existing child or create a new one
self.child(name, value)
# Update regular attributes
else:
self.data[key] = value
log.debug("Data for '{0}' updated.".format(self))
log.data(pretty(self.data)) | Update metadata, handle virtual hierarchy | entailment |
def get(self, name=None, default=None):
"""
Get attribute value or return default
Whole data dictionary is returned when no attribute provided.
Supports direct values retrieval from deep dictionaries as well.
Dictionary path should be provided as list. The following two
examples are equal:
tree.data['hardware']['memory']['size']
tree.get(['hardware', 'memory', 'size'])
However the latter approach will also correctly handle providing
default value when any of the dictionary keys does not exist.
"""
# Return the whole dictionary if no attribute specified
if name is None:
return self.data
if not isinstance(name, list):
name = [name]
data = self.data
try:
for key in name:
data = data[key]
except KeyError:
return default
return data | Get attribute value or return default
Whole data dictionary is returned when no attribute provided.
Supports direct values retrieval from deep dictionaries as well.
Dictionary path should be provided as list. The following two
examples are equal:
tree.data['hardware']['memory']['size']
tree.get(['hardware', 'memory', 'size'])
However the latter approach will also correctly handle providing
default value when any of the dictionary keys does not exist. | entailment |
def child(self, name, data, source=None):
""" Create or update child with given data """
try:
if isinstance(data, dict):
self.children[name].update(data)
else:
self.children[name].grow(data)
except KeyError:
self.children[name] = Tree(data, name, parent=self)
# Save source file
if source is not None:
self.children[name].sources.append(source) | Create or update child with given data | entailment |
def grow(self, path):
"""
Grow the metadata tree for the given directory path
Note: For each path, grow() should be run only once. Growing the tree
from the same path multiple times with attribute adding using the "+"
sign leads to adding the value more than once!
"""
if path is None:
return
path = path.rstrip("/")
log.info("Walking through directory {0}".format(
os.path.abspath(path)))
dirpath, dirnames, filenames = next(os.walk(path))
# Investigate main.fmf as the first file (for correct inheritance)
filenames = sorted(
[filename for filename in filenames if filename.endswith(SUFFIX)])
try:
filenames.insert(0, filenames.pop(filenames.index(MAIN)))
except ValueError:
pass
# Check every metadata file and load data (ignore hidden)
for filename in filenames:
if filename.startswith("."):
continue
fullpath = os.path.abspath(os.path.join(dirpath, filename))
log.info("Checking file {0}".format(fullpath))
try:
with open(fullpath) as datafile:
data = yaml.load(datafile, Loader=FullLoader)
except yaml.scanner.ScannerError as error:
raise(utils.FileError("Failed to parse '{0}'\n{1}".format(
fullpath, error)))
log.data(pretty(data))
# Handle main.fmf as data for self
if filename == MAIN:
self.sources.append(fullpath)
self.update(data)
# Handle other *.fmf files as children
else:
self.child(os.path.splitext(filename)[0], data, fullpath)
# Explore every child directory (ignore hidden dirs and subtrees)
for dirname in sorted(dirnames):
if dirname.startswith("."):
continue
# Ignore metadata subtrees
if os.path.isdir(os.path.join(path, dirname, SUFFIX)):
log.debug("Ignoring metadata tree '{0}'.".format(dirname))
continue
self.child(dirname, os.path.join(path, dirname))
# Remove empty children (ignore directories without metadata)
for name in list(self.children.keys()):
child = self.children[name]
if not child.data and not child.children:
del(self.children[name])
log.debug("Empty tree '{0}' removed.".format(child.name))
# Apply inheritance when all scattered data are gathered.
# This is done only once, from the top parent object.
if self.parent is None:
self.inherit() | Grow the metadata tree for the given directory path
Note: For each path, grow() should be run only once. Growing the tree
from the same path multiple times with attribute adding using the "+"
sign leads to adding the value more than once! | entailment |
def climb(self, whole=False):
""" Climb through the tree (iterate leaf/all nodes) """
if whole or not self.children:
yield self
for name, child in self.children.items():
for node in child.climb(whole):
yield node | Climb through the tree (iterate leaf/all nodes) | entailment |
def find(self, name):
""" Find node with given name """
for node in self.climb(whole=True):
if node.name == name:
return node
return None | Find node with given name | entailment |
def prune(self, whole=False, keys=[], names=[], filters=[]):
""" Filter tree nodes based on given criteria """
for node in self.climb(whole):
# Select only nodes with key content
if not all([key in node.data for key in keys]):
continue
# Select nodes with name matching regular expression
if names and not any(
[re.search(name, node.name) for name in names]):
continue
# Apply advanced filters if given
try:
if not all([utils.filter(filter, node.data, regexp=True)
for filter in filters]):
continue
# Handle missing attribute as if filter failed
except utils.FilterError:
continue
# All criteria met, thus yield the node
yield node | Filter tree nodes based on given criteria | entailment |
def show(self, brief=False, formatting=None, values=[]):
""" Show metadata """
# Show nothing if there's nothing
if not self.data:
return None
# Custom formatting
if formatting is not None:
formatting = re.sub("\\\\n", "\n", formatting)
name = self.name
data = self.data
root = self.root
sources = self.sources
evaluated = []
for value in values:
evaluated.append(eval(value))
return formatting.format(*evaluated)
# Show the name
output = utils.color(self.name, 'red')
if brief:
return output + "\n"
# List available attributes
for key, value in sorted(self.data.items()):
output += "\n{0}: ".format(utils.color(key, 'green'))
if isinstance(value, type("")):
output += value
elif isinstance(value, list) and all(
[isinstance(item, type("")) for item in value]):
output += utils.listed(value)
else:
output += pretty(value)
output
return output + "\n" | Show metadata | entailment |
def label_from_instance(self, obj):
"""
Creates labels which represent the tree level of each node when
generating option labels.
"""
level = getattr(obj, obj._mptt_meta.level_attr)
level_indicator = mark_safe(conditional_escape(self.level_indicator) * level)
return mark_safe(u'%s %s' % (level_indicator, conditional_escape(smart_unicode(obj)))) | Creates labels which represent the tree level of each node when
generating option labels. | entailment |
def save(self):
"""
Attempts to move the node using the selected target and
position.
If an invalid move is attempted, the related error message will
be added to the form's non-field errors and the error will be
re-raised. Callers should attempt to catch ``InvalidNode`` to
redisplay the form with the error, should it occur.
"""
try:
self.node.move_to(self.cleaned_data['target'],
self.cleaned_data['position'])
return self.node
except InvalidMove, e:
self.errors[NON_FIELD_ERRORS] = ErrorList(e)
raise | Attempts to move the node using the selected target and
position.
If an invalid move is attempted, the related error message will
be added to the form's non-field errors and the error will be
re-raised. Callers should attempt to catch ``InvalidNode`` to
redisplay the form with the error, should it occur. | entailment |
def find(names, dirs, file_ext):
"""
Iterating a set of dirs under the static root, this method tries to find
a file named like one of the names and file ext passed, and returns the
storage path to the first file it encounters.
Usage this method makes it possible to override static files (such as
icon sets) in a similar way like templates in different locations can
override others that have the same file name.
"""
if not isinstance(names, list) or isinstance(names, tuple):
names = (names,)
for dir_name in dirs:
for name in names:
path = os.path.join(dir_name, name + file_ext)
if not path in EXISTING_PATHS:
# check on file system, then cache
EXISTING_PATHS[path] = STATIC_STORAGE.exists(path)
if EXISTING_PATHS[path]:
return path | Iterating a set of dirs under the static root, this method tries to find
a file named like one of the names and file ext passed, and returns the
storage path to the first file it encounters.
Usage this method makes it possible to override static files (such as
icon sets) in a similar way like templates in different locations can
override others that have the same file name. | entailment |
def find(file_node, dirs=ICON_DIRS, default_name=None, file_ext='.png'):
"""
Iterating all icon dirs, try to find a file called like the node's
extension / mime subtype / mime type (in that order).
For instance, for an MP3 file ("audio/mpeg"), this would look for:
"mp3.png" / "audio/mpeg.png" / "audio.png"
"""
names = []
for attr_name in ('extension', 'mimetype', 'mime_supertype'):
attr = getattr(file_node, attr_name)
if attr:
names.append(attr)
if default_name:
names.append(default_name)
icon_path = StaticPathFinder.find(names, dirs, file_ext)
if icon_path:
return StaticIconFile(file_node, icon_path) | Iterating all icon dirs, try to find a file called like the node's
extension / mime subtype / mime type (in that order).
For instance, for an MP3 file ("audio/mpeg"), this would look for:
"mp3.png" / "audio/mpeg.png" / "audio.png" | entailment |
def result_tree_flat(context, cl, request):
"""
Added 'filtered' param, so the template's js knows whether the results have
been affected by a GET param or not. Only when the results are not filtered
you can drag and sort the tree
"""
return {
#'filtered': is_filtered_cl(cl, request),
'results': (th_for_result(cl, res) for res in list(cl.result_list)),
} | Added 'filtered' param, so the template's js knows whether the results have
been affected by a GET param or not. Only when the results are not filtered
you can drag and sort the tree | entailment |
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
By default this requires `self.queryset` and a `pk` or `path` argument
in the URLconf, but subclasses can override this to return any object.
"""
# Use a custom queryset if provided; this is required for subclasses
# like DateDetailView
if queryset is None:
queryset = self.get_queryset()
path = self.kwargs.get('path', None)
# Next, try looking up by path.
if path is not None:
queryset = queryset.filter(**FileNode.objects.get_filter_args_with_path(
for_self=True, path=path))
try:
obj = queryset.get()
except FileNode.DoesNotExist:
raise Http404(_(u"No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
return super(FileNodeDetailView, self).get_object(queryset) | Returns the object the view is displaying.
By default this requires `self.queryset` and a `pk` or `path` argument
in the URLconf, but subclasses can override this to return any object. | entailment |
def get_detail_view(self, request, object, opts=None):
"""
Instantiates and returns the view class that will generate the actual
context for this plugin.
"""
view = self.get_view(request, self.view_class, opts)
view.object = object
return view | Instantiates and returns the view class that will generate the actual
context for this plugin. | entailment |
def get_listing_view(self, request, queryset, opts=None):
"""
Instantiates and returns the view class that will generate the
actual context for this plugin.
``queryset`` can be an actual QuerySet or any iterable.
"""
view = self.get_view(request, self.view_class, opts)
view.queryset = queryset
return view | Instantiates and returns the view class that will generate the
actual context for this plugin.
``queryset`` can be an actual QuerySet or any iterable. | entailment |
def get_view(self, request, view_class, opts=None):
"""
Instantiates and returns the view class that will generate the
actual context for this plugin.
"""
kwargs = {}
if opts:
if not isinstance(opts, dict):
opts = opts.__dict__
else:
opts = {}
if not view_class in VALID_MIXIN_OPTIONS:
valid_options = view_class.__dict__.keys()
for cls in view_class.__bases__:
if cls != object:
valid_options += cls.__dict__.keys()
VALID_MIXIN_OPTIONS[view_class] = valid_options
for key in VALID_MIXIN_OPTIONS[view_class]:
if key in opts:
kwargs[key] = opts[key]
elif hasattr(self, key):
kwargs[key] = getattr(self, key)
view = view_class(**kwargs)
view.request = request
view.kwargs = {}
return view | Instantiates and returns the view class that will generate the
actual context for this plugin. | entailment |
def options_formatting(self):
""" Formating options """
group = self.parser.add_argument_group("Format")
group.add_argument(
"--format", dest="formatting", default=None,
help="Custom output format using the {} expansion")
group.add_argument(
"--value", dest="values", action="append", default=[],
help="Values for the custom formatting string") | Formating options | entailment |
def options_utils(self):
""" Utilities """
group = self.parser.add_argument_group("Utils")
group.add_argument(
"--path", action="append", dest="paths",
help="Path to the metadata tree (default: current directory)")
group.add_argument(
"--verbose", action="store_true",
help="Print information about parsed files to stderr")
group.add_argument(
"--debug", action="store_true",
help="Turn on debugging output, do not catch exceptions") | Utilities | entailment |
def command_ls(self):
""" List names """
self.parser = argparse.ArgumentParser(
description="List names of available objects")
self.options_select()
self.options_utils()
self.options = self.parser.parse_args(self.arguments[2:])
self.show(brief=True) | List names | entailment |
def command_show(self):
""" Show metadata """
self.parser = argparse.ArgumentParser(
description="Show metadata of available objects")
self.options_select()
self.options_formatting()
self.options_utils()
self.options = self.parser.parse_args(self.arguments[2:])
self.show(brief=False) | Show metadata | entailment |
def command_init(self):
""" Initialize tree """
self.parser = argparse.ArgumentParser(
description="Initialize a new metadata tree")
self.options_utils()
self.options = self.parser.parse_args(self.arguments[2:])
# For each path create an .fmf directory and version file
for path in self.options.paths or ["."]:
root = os.path.abspath(os.path.join(path, ".fmf"))
if os.path.exists(root):
raise utils.FileError("{0} '{1}' already exists.".format(
"Directory" if os.path.isdir(root) else "File", root))
os.makedirs(root)
with open(os.path.join(root, "version"), "w") as version:
version.write("{0}\n".format(utils.VERSION))
print("Metadata tree '{0}' successfully initialized.".format(root)) | Initialize tree | entailment |
def show(self, brief=False):
""" Show metadata for each path given """
output = []
for path in self.options.paths or ["."]:
if self.options.verbose:
utils.info("Checking {0} for metadata.".format(path))
tree = fmf.Tree(path)
for node in tree.prune(
self.options.whole, self.options.keys, self.options.names,
self.options.filters):
if brief:
show = node.show(brief=True)
else:
show = node.show(
brief=False,
formatting=self.options.formatting,
values=self.options.values)
# List source files when in debug mode
if self.options.debug:
for source in node.sources:
show += utils.color("{0}\n".format(source), "blue")
if show is not None:
output.append(show)
# Print output and summary
if brief or self.options.formatting:
joined = "".join(output)
else:
joined = "\n".join(output)
try: # pragma: no cover
print(joined, end="")
except UnicodeEncodeError: # pragma: no cover
print(joined.encode('utf-8'), end="")
if self.options.verbose:
utils.info("Found {0}.".format(
utils.listed(len(output), "object")))
self.output = joined | Show metadata for each path given | entailment |
def filter(filter, data, sensitive=True, regexp=False):
"""
Return true if provided filter matches given dictionary of values
Filter supports disjunctive normal form with '|' used for OR, '&'
for AND and '-' for negation. Individual values are prefixed with
'value:', leading/trailing white-space is stripped. For example::
tag: Tier1 | tag: Tier2 | tag: Tier3
category: Sanity, Security & tag: -destructive
Note that multiple comma-separated values can be used as a syntactic
sugar to shorten the filter notation::
tag: A, B, C ---> tag: A | tag: B | tag: C
Values should be provided as a dictionary of lists each describing
the values against which the filter is to be matched. For example::
data = {tag: ["Tier1", "TIPpass"], category: ["Sanity"]}
Other types of dictionary values are converted into a string.
A FilterError exception is raised when a dimension parsed from the
filter is not found in the data dictionary. Set option 'sensitive'
to False to enable case-insensitive matching. If 'regexp' option is
True, regular expressions can be used in the filter values as well.
"""
def match_value(pattern, text):
""" Match value against data (simple or regexp) """
if regexp:
return re.match("^{0}$".format(pattern), text)
else:
return pattern == text
def check_value(dimension, value):
""" Check whether the value matches data """
# E.g. value = 'A, B' or value = "C" or value = "-D"
# If there are multiple values, at least one must match
for atom in re.split("\s*,\s*", value):
# Handle negative values (check the whole data for non-presence)
if atom.startswith("-"):
atom = atom[1:]
# Check each value for given dimension
for dato in data[dimension]:
if match_value(atom, dato):
break
# Pattern not found ---> good
else:
return True
# Handle positive values (return True upon first successful match)
else:
# Check each value for given dimension
for dato in data[dimension]:
if match_value(atom, dato):
# Pattern found ---> good
return True
# No value matched the data
return False
def check_dimension(dimension, values):
""" Check whether all values for given dimension match data """
# E.g. dimension = 'tag', values = ['A, B', 'C', '-D']
# Raise exception upon unknown dimension
if dimension not in data:
raise FilterError("Invalid filter '{0}'".format(dimension))
# Every value must match at least one value for data
return all([check_value(dimension, value) for value in values])
def check_clause(clause):
""" Split into literals and check whether all match """
# E.g. clause = 'tag: A, B & tag: C & tag: -D'
# Split into individual literals by dimension
literals = dict()
for literal in re.split("\s*&\s*", clause):
# E.g. literal = 'tag: A, B'
# Make sure the literal matches dimension:value format
matched = re.match("^(.*)\s*:\s*(.*)$", literal)
if not matched:
raise FilterError("Invalid filter '{0}'".format(literal))
dimension, value = matched.groups()
values = [value]
# Append the literal value(s) to corresponding dimension list
literals.setdefault(dimension, []).extend(values)
# For each dimension all literals must match given data
return all([check_dimension(dimension, values)
for dimension, values in literals.items()])
# Default to True if no filter given, bail out if weird data given
if filter is None or filter == "": return True
if not isinstance(data, dict):
raise FilterError("Invalid data type '{0}'".format(type(data)))
# Make sure that data dictionary contains lists of strings
data = copy.deepcopy(data)
try: # pragma: no cover
for key in data:
if isinstance(data[key], list):
data[key] = [unicode(item) for item in data[key]]
else:
data[key] = [unicode(data[key])]
except NameError: # pragma: no cover
for key in data:
if isinstance(data[key], list):
data[key] = [str(item) for item in data[key]]
else:
data[key] = [str(data[key])]
# Turn all data into lowercase if sensitivity is off
if not sensitive:
filter = filter.lower()
lowered = dict()
for key, values in data.items():
lowered[key.lower()] = [value.lower() for value in values]
data = lowered
# At least one clause must be true
return any([check_clause(clause)
for clause in re.split("\s*\|\s*", filter)]) | Return true if provided filter matches given dictionary of values
Filter supports disjunctive normal form with '|' used for OR, '&'
for AND and '-' for negation. Individual values are prefixed with
'value:', leading/trailing white-space is stripped. For example::
tag: Tier1 | tag: Tier2 | tag: Tier3
category: Sanity, Security & tag: -destructive
Note that multiple comma-separated values can be used as a syntactic
sugar to shorten the filter notation::
tag: A, B, C ---> tag: A | tag: B | tag: C
Values should be provided as a dictionary of lists each describing
the values against which the filter is to be matched. For example::
data = {tag: ["Tier1", "TIPpass"], category: ["Sanity"]}
Other types of dictionary values are converted into a string.
A FilterError exception is raised when a dimension parsed from the
filter is not found in the data dictionary. Set option 'sensitive'
to False to enable case-insensitive matching. If 'regexp' option is
True, regular expressions can be used in the filter values as well. | entailment |
def color(text, color=None, background=None, light=False, enabled="auto"):
"""
Return text in desired color if coloring enabled
Available colors: black red green yellow blue magenta cyan white.
Alternatively color can be prefixed with "light", e.g. lightgreen.
"""
colors = {"black": 30, "red": 31, "green": 32, "yellow": 33,
"blue": 34, "magenta": 35, "cyan": 36, "white": 37}
# Nothing do do if coloring disabled
if enabled == "auto":
enabled = Coloring().enabled()
if not enabled:
return text
# Prepare colors (strip 'light' if present in color)
if color and color.startswith("light"):
light = True
color = color[5:]
color = color and ";{0}".format(colors[color]) or ""
background = background and ";{0}".format(colors[background] + 10) or ""
light = light and 1 or 0
# Starting and finishing sequence
start = "\033[{0}{1}{2}m".format(light, color, background)
finish = "\033[1;m"
return "".join([start, text, finish]) | Return text in desired color if coloring enabled
Available colors: black red green yellow blue magenta cyan white.
Alternatively color can be prefixed with "light", e.g. lightgreen. | entailment |
def _create_logger(name='fmf', level=None):
""" Create fmf logger """
# Create logger, handler and formatter
logger = logging.getLogger(name)
handler = logging.StreamHandler()
handler.setFormatter(Logging.ColoredFormatter())
logger.addHandler(handler)
# Save log levels in the logger itself (backward compatibility)
for level in Logging.LEVELS:
setattr(logger, level, getattr(logging, level))
# Additional logging constants and methods for cache and xmlrpc
logger.DATA = LOG_DATA
logger.CACHE = LOG_CACHE
logger.ALL = LOG_ALL
logger.cache = lambda message: logger.log(LOG_CACHE, message) # NOQA
logger.data = lambda message: logger.log(LOG_DATA, message) # NOQA
logger.all = lambda message: logger.log(LOG_ALL, message) # NOQA
return logger | Create fmf logger | entailment |
def filter(self, *args, **kwargs):
"""
Works just like the default Manager's :func:`filter` method, but
you can pass an additional keyword argument named ``path`` specifying
the full **path of the folder whose immediate child objects** you
want to retrieve, e.g. ``"path/to/folder"``.
"""
if 'path' in kwargs:
kwargs = self.get_filter_args_with_path(False, **kwargs)
return super(FileNodeManager, self).filter(*args, **kwargs) | Works just like the default Manager's :func:`filter` method, but
you can pass an additional keyword argument named ``path`` specifying
the full **path of the folder whose immediate child objects** you
want to retrieve, e.g. ``"path/to/folder"``. | entailment |
def exclude(self, *args, **kwargs):
"""
Works just like the default Manager's :func:`exclude` method, but
you can pass an additional keyword argument named ``path`` specifying
the full **path of the folder whose immediate child objects** you
want to exclude, e.g. ``"path/to/folder"``.
"""
if 'path' in kwargs:
kwargs = self.get_filter_args_with_path(False, **kwargs)
return super(FileNodeManager, self).exclude(*args, **kwargs) | Works just like the default Manager's :func:`exclude` method, but
you can pass an additional keyword argument named ``path`` specifying
the full **path of the folder whose immediate child objects** you
want to exclude, e.g. ``"path/to/folder"``. | entailment |
def get(self, *args, **kwargs):
"""
Works just like the default Manager's :func:`get` method, but
you can pass an additional keyword argument named ``path`` specifying
the full path of the object you want to retrieve, e.g.
``"path/to/folder/readme.txt"``.
"""
if 'path' in kwargs:
kwargs = self.get_filter_args_with_path(True, **kwargs)
return super(FileNodeManager, self).get(
*args, **kwargs) | Works just like the default Manager's :func:`get` method, but
you can pass an additional keyword argument named ``path`` specifying
the full path of the object you want to retrieve, e.g.
``"path/to/folder/readme.txt"``. | entailment |
def validate(self, value, model_instance):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self._choices and value:
print(value)
l = value
if type(value) != list:
l = [ value ]
for v in value:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for options.
for optgroup_key, optgroup_value in option_value:
if v == optgroup_key:
return
elif v == option_key:
return
raise ValidationError(self.error_messages['invalid_choice'] % {'value': value})
if value is None and not self.null:
raise ValidationError(self.error_messages['null'])
if not self.blank and value in validators.EMPTY_VALUES:
raise ValidationError(self.error_messages['blank'])
return super(MultipleChoiceCommaSeparatedIntegerField, self).validate(value, model_instance) | Validates value and throws ValidationError. Subclasses should override
this to provide validation logic. | entailment |
def get_qualified_file_url(self, field_name='file'):
"""Returns a fully qualified URL for the :attr:`file` field, including
protocol, domain and port. In most cases, you can just use ``file.url``
instead, which (depending on your ``MEDIA_URL``) may or may not contain
the domain. In some cases however, you always need a fully qualified
URL. This includes, for instance, embedding a flash video player from a
remote domain and passing it a video URL.
"""
url = getattr(self, field_name).url
if '://' in url:
# `MEDIA_URL` already contains domain
return url
protocol = getattr(settings, 'PROTOCOL', 'http')
domain = Site.objects.get_current().domain
port = getattr(settings, 'PORT', '')
return '%(protocol)s://%(domain)s%(port)s%(url)s' % {
'protocol': 'http',
'domain': domain.rstrip('/'),
'port': ':'+port if port else '',
'url': url,
} | Returns a fully qualified URL for the :attr:`file` field, including
protocol, domain and port. In most cases, you can just use ``file.url``
instead, which (depending on your ``MEDIA_URL``) may or may not contain
the domain. In some cases however, you always need a fully qualified
URL. This includes, for instance, embedding a flash video player from a
remote domain and passing it a video URL. | entailment |
def get_admin_url(self, query_params=None, use_path=False):
"""Returns the URL for viewing a FileNode in the admin."""
if not query_params:
query_params = {}
url = ''
if self.is_top_node():
url = reverse('admin:media_tree_filenode_changelist');
elif use_path and (self.is_folder() or self.pk):
url = reverse('admin:media_tree_filenode_open_path', args=(self.get_path(),));
elif self.is_folder():
url = reverse('admin:media_tree_filenode_changelist');
query_params['folder_id'] = self.pk
elif self.pk:
return reverse('admin:media_tree_filenode_change', args=(self.pk,));
if len(query_params):
params = ['%s=%s' % (key, value) for key, value in query_params.items()]
url = '%s?%s' % (url, "&".join(params))
return url | Returns the URL for viewing a FileNode in the admin. | entailment |
def get_metadata_display(self, field_formats = {}, escape=True):
"""Returns object metadata that has been selected to be displayed to
users, compiled as a string.
"""
def field_format(field):
if field in field_formats:
return field_formats[field]
return u'%s'
t = join_formatted('', self.title, format=field_format('title'), escape=escape)
t = join_formatted(t, self.description, u'%s: %s', escape=escape)
if self.publish_author:
t = join_formatted(t, self.author, u'%s' + u' – ' + u'Author: %s', u'%s' + u'Author: %s', escape=escape)
if self.publish_copyright:
t = join_formatted(t, self.copyright, u'%s, %s', escape=escape)
if self.publish_date_time and self.date_time:
date_time_formatted = dateformat.format(self.date_time, get_format('DATE_FORMAT'))
t = join_formatted(t, date_time_formatted, u'%s (%s)', '%s%s', escape=escape)
return t | Returns object metadata that has been selected to be displayed to
users, compiled as a string. | entailment |
def get_caption_formatted(self, field_formats = app_settings.MEDIA_TREE_METADATA_FORMATS, escape=True):
"""Returns object metadata that has been selected to be displayed to
users, compiled as a string including default formatting, for example
bold titles.
You can use this method in templates where you want to output image
captions.
"""
if self.override_caption != '':
return self.override_caption
else:
return mark_safe(self.get_metadata_display(field_formats, escape=escape)) | Returns object metadata that has been selected to be displayed to
users, compiled as a string including default formatting, for example
bold titles.
You can use this method in templates where you want to output image
captions. | entailment |
def alt(self):
"""Returns object metadata suitable for use as the HTML ``alt``
attribute. You can use this method in templates::
<img src="{{ node.file.url }}" alt="{{ node.alt }}" />
"""
if self.override_alt != '' and self.override_alt is not None:
return self.override_alt
elif self.override_caption != '' and self.override_caption is not None:
return self.override_caption
else:
return self.get_metadata_display() | Returns object metadata suitable for use as the HTML ``alt``
attribute. You can use this method in templates::
<img src="{{ node.file.url }}" alt="{{ node.alt }}" /> | entailment |
def save(self):
"""
Attempts to move the nodes using the selected target and
position.
If an invalid move is attempted, the related error message will
be added to the form's non-field errors and the error will be
re-raised. Callers should attempt to catch ``InvalidMove`` to
redisplay the form with the error, should it occur.
"""
self.success_count = 0
for node in self.get_selected_nodes():
self.move_node(node, self.cleaned_data['target_node']) | Attempts to move the nodes using the selected target and
position.
If an invalid move is attempted, the related error message will
be added to the form's non-field errors and the error will be
re-raised. Callers should attempt to catch ``InvalidMove`` to
redisplay the form with the error, should it occur. | entailment |
def save(self):
"""
Deletes the selected files from storage
"""
storage = get_media_storage()
for storage_name in self.cleaned_data['selected_files']:
full_path = storage.path(storage_name)
try:
storage.delete(storage_name)
self.success_files.append(full_path)
except OSError:
self.error_files.append(full_path) | Deletes the selected files from storage | entailment |
def get_merged_filenode_list(nodes, filter_media_types=None, exclude_media_types=None, filter=None, ordering=None, processors=None, max_depth=None, max_nodes=None):
"""
Almost the same as :func:`get_nested_filenode_list`, but returns a flat (one-dimensional) list.
Using the same QuerySet as in the example for `get_nested_filenode_list`, this method would return::
[
<FileNode: Empty folder>,
<FileNode: Photo folder>,
<FileNode: photo1.jpg>,
<FileNode: photo2.jpg>,
<FileNode: photo3.jpg>,
<FileNode: file.txt>
]
"""
return __get_filenode_list(nodes, filter_media_types=filter_media_types, exclude_media_types=exclude_media_types,
filter=filter, ordering=ordering, processors=processors, list_method='extend', max_depth=max_depth, max_nodes=max_nodes) | Almost the same as :func:`get_nested_filenode_list`, but returns a flat (one-dimensional) list.
Using the same QuerySet as in the example for `get_nested_filenode_list`, this method would return::
[
<FileNode: Empty folder>,
<FileNode: Photo folder>,
<FileNode: photo1.jpg>,
<FileNode: photo2.jpg>,
<FileNode: photo3.jpg>,
<FileNode: file.txt>
] | entailment |
def get_file_link(node, use_metadata=False, include_size=False, include_extension=False, include_icon=False, href=None, extra_class='', extra=''):
"""
Returns a formatted HTML link tag to the FileNode's file, optionally including some meta information about the file.
"""
link_text = None
if use_metadata:
link_text = node.get_metadata_display()
if not link_text:
link_text = node.__unicode__()
if node.node_type != media_types.FOLDER:
if include_extension:
if extra != '':
extra += ' '
extra = '<span class="file-extension">%s</span>' % node.extension.upper()
if include_size:
if extra != '':
extra += ', '
extra += '<span class="file-size">%s</span>' % filesizeformat(node.size)
if extra:
extra = ' <span class="details">(%s)</span>' % extra
link_class = 'file %s' % node.extension
else:
link_class = 'folder'
if extra_class:
link_class = '%s %s' % (link_class, extra_class)
if node.node_type != media_types.FOLDER and not href:
href = node.file.url
icon = ''
if include_icon:
icon_file = node.get_icon_file()
if icon_file:
icon = '<span class="icon"><img src="%s" alt="%s" /></span>' % (
icon_file.url, node.alt)
if href:
link = u'<a class="%s" href="%s">%s%s</a>%s' % (
link_class, href, icon, link_text, extra)
else:
link = u'<span class="%s">%s%s</span>%s' % (
link_class, icon, link_text, extra)
return force_unicode(mark_safe(link)) | Returns a formatted HTML link tag to the FileNode's file, optionally including some meta information about the file. | entailment |
def delete_selected_tree(self, modeladmin, request, queryset):
"""
Deletes multiple instances and makes sure the MPTT fields get recalculated properly.
(Because merely doing a bulk delete doesn't trigger the post_delete hooks.)
"""
# If the user has not yet confirmed the deletion, call the regular delete
# action that will present a confirmation page
if not request.POST.get('post'):
return actions.delete_selected(modeladmin, request, queryset)
# Otherwise, delete objects one by one
n = 0
for obj in queryset:
obj.delete()
n += 1
self.message_user(request, _("Successfully deleted %s items." % n)) | Deletes multiple instances and makes sure the MPTT fields get recalculated properly.
(Because merely doing a bulk delete doesn't trigger the post_delete hooks.) | entailment |
def norm_name(build_module: str, target_name: str):
"""Return a normalized canonical target name for the `target_name`
observed in build module `build_module`.
A normalized canonical target name is of the form "<build module>:<name>",
where <build module> is the relative normalized path from the project root
to the target build module (POSIX), and <name> is a valid target name
(see `validate_name()`).
"""
if ':' not in target_name:
raise ValueError(
"Must provide fully-qualified target name (with `:') to avoid "
"possible ambiguity - `{}' not valid".format(target_name))
mod, name = split(target_name)
return '{}:{}'.format(
PurePath(norm_proj_path(mod, build_module)).as_posix().strip('.'),
validate_name(name)) | Return a normalized canonical target name for the `target_name`
observed in build module `build_module`.
A normalized canonical target name is of the form "<build module>:<name>",
where <build module> is the relative normalized path from the project root
to the target build module (POSIX), and <name> is a valid target name
(see `validate_name()`). | entailment |
def expand_target_selector(target_selector: str, conf: Config):
"""Return a normalized target name (where `**:*` is the normalized form of
itself).
Target specifier can be:
- `**:*` - means to recursively build all targets under current
working dir.
- relative path from current working directory to another directory -
means to build all targets defined in that build module.
- a name of a target - means to build this named target in the build module
in the current working directory.
- a named target in another build module, with the build module given as a
relative path from the current working directory (e.g. `../foo:bar`) -
means to build the specified named target in the specified build
module.
- in cases where a relative path can be specified, it should be given using
standard POSIX relative path construction.
"""
if target_selector == '**:*':
return target_selector
if ':' not in target_selector:
target_selector += ':*'
build_module, target_name = split(target_selector)
build_module = normpath(join(conf.get_rel_work_dir(), build_module))
return '{}:{}'.format(PurePath(build_module).as_posix().strip('.'),
validate_name(target_name)) | Return a normalized target name (where `**:*` is the normalized form of
itself).
Target specifier can be:
- `**:*` - means to recursively build all targets under current
working dir.
- relative path from current working directory to another directory -
means to build all targets defined in that build module.
- a name of a target - means to build this named target in the build module
in the current working directory.
- a named target in another build module, with the build module given as a
relative path from the current working directory (e.g. `../foo:bar`) -
means to build the specified named target in the specified build
module.
- in cases where a relative path can be specified, it should be given using
standard POSIX relative path construction. | entailment |
def hashify_targets(targets: list, build_context) -> list:
"""Return sorted hashes of `targets`."""
return sorted(build_context.targets[target_name].hash(build_context)
for target_name in listify(targets)) | Return sorted hashes of `targets`. | entailment |
def hashify_files(files: list) -> dict:
"""Return mapping from file path to file hash."""
return {filepath.replace('\\', '/'): hash_tree(filepath)
for filepath in listify(files)} | Return mapping from file path to file hash. | entailment |
def process_prop(prop_type: PT, value, build_context):
"""Return a cachable representation of the prop `value` given its type."""
if prop_type in (PT.Target, PT.TargetList):
return hashify_targets(value, build_context)
elif prop_type in (PT.File, PT.FileList):
return hashify_files(value)
return value | Return a cachable representation of the prop `value` given its type. | entailment |
def compute_json(self, build_context):
"""Compute and store a JSON serialization of this target for caching
purposes.
The serialization includes:
- The build flavor
- The builder name
- Target tags
- Hashes of target dependencies & buildenv
- Processed props (where target props are replaced with their hashes,
and file props are replaced with mapping from file name to its hash)
It specifically does NOT include:
- Artifacts produced by the target
The target name is currently included, although it would be better off
to leave it out, and allow targets to be renamed without affecting
their caching status (if it's just a rename).
It is currently included because it's the easy way to account for the
fact that when cached artifacts are restored, their path may be a
function of the target name in non-essential ways (such as a workspace
dir name).
"""
props = {}
test_props = {}
for prop in self.props:
if prop in self._prop_json_blacklist:
continue
sig_spec = Plugin.builders[self.builder_name].sig.get(prop)
if sig_spec is None:
continue
if prop in self._prop_json_testlist:
test_props[prop] = process_prop(sig_spec.type,
self.props[prop],
build_context)
else:
props[prop] = process_prop(sig_spec.type, self.props[prop],
build_context)
json_dict = dict(
# TODO: avoid including the name in the hashed json...
name=self.name,
builder_name=self.builder_name,
deps=hashify_targets(self.deps, build_context),
props=props,
buildenv=hashify_targets(self.buildenv, build_context),
tags=sorted(list(self.tags)),
flavor=build_context.conf.flavor, # TODO: any other conf args?
# yabt_version=__version__, # TODO: is this needed?
)
json_test_dict = dict(
props=test_props,
)
self._json = json.dumps(json_dict, sort_keys=True, indent=4)
self._test_json = json.dumps(json_test_dict, sort_keys=True, indent=4) | Compute and store a JSON serialization of this target for caching
purposes.
The serialization includes:
- The build flavor
- The builder name
- Target tags
- Hashes of target dependencies & buildenv
- Processed props (where target props are replaced with their hashes,
and file props are replaced with mapping from file name to its hash)
It specifically does NOT include:
- Artifacts produced by the target
The target name is currently included, although it would be better off
to leave it out, and allow targets to be renamed without affecting
their caching status (if it's just a rename).
It is currently included because it's the easy way to account for the
fact that when cached artifacts are restored, their path may be a
function of the target name in non-essential ways (such as a workspace
dir name). | entailment |
def json(self, build_context) -> str:
"""Return JSON serialization of this target for caching purposes."""
if self._json is None:
self.compute_json(build_context)
return self._json | Return JSON serialization of this target for caching purposes. | entailment |
def compute_hash(self, build_context):
"""Compute and store the hash of this target for caching purposes.
The hash is computed over the target JSON representation.
"""
m = md5()
m.update(self.json(build_context).encode('utf8'))
self._hash = m.hexdigest()
m = md5()
m.update(self.test_json(build_context).encode('utf8'))
self._test_hash = m.hexdigest() | Compute and store the hash of this target for caching purposes.
The hash is computed over the target JSON representation. | entailment |
def hash(self, build_context) -> str:
"""Return the hash of this target for caching purposes."""
if self._hash is None:
self.compute_hash(build_context)
return self._hash | Return the hash of this target for caching purposes. | entailment |
def handle_build_cache(
conf: Config, name: str, tag: str, icb: ImageCachingBehavior):
"""Handle Docker image build cache.
Return image ID if image is cached, and there's no need to redo the build.
Return None if need to build the image (whether cached locally or not).
Raise RuntimeError if not allowed to build the image because of state of
local cache.
TODO(itamar): figure out a better name for this function, that reflects
what it returns (e.g. `get_cached_image_id`),
without "surprising" the caller with the potential of long
and non-trivial operations that are not usually expected from functions
with such names.
"""
if icb.pull_if_cached or (icb.pull_if_not_cached and
get_cached_image_id(icb.remote_image) is None):
try:
pull_docker_image(icb.remote_image, conf.docker_pull_cmd)
except CalledProcessError:
pass
local_image = '{}:{}'.format(name, tag)
if (icb.skip_build_if_cached and
get_cached_image_id(icb.remote_image) is not None):
tag_docker_image(icb.remote_image, local_image)
return get_cached_image_id(local_image)
if ((not icb.allow_build_if_not_cached) and
get_cached_image_id(icb.remote_image) is None):
raise RuntimeError('No cached image for {}'.format(local_image))
return None | Handle Docker image build cache.
Return image ID if image is cached, and there's no need to redo the build.
Return None if need to build the image (whether cached locally or not).
Raise RuntimeError if not allowed to build the image because of state of
local cache.
TODO(itamar): figure out a better name for this function, that reflects
what it returns (e.g. `get_cached_image_id`),
without "surprising" the caller with the potential of long
and non-trivial operations that are not usually expected from functions
with such names. | entailment |
def build_docker_image(
build_context, name: str, tag: str, base_image, deps: list=None,
env: dict=None, work_dir: str=None,
entrypoint: list=None, cmd: list=None, full_path_cmd: bool=False,
distro: dict=None, image_caching_behavior: dict=None,
runtime_params: dict=None, ybt_bin_path: str=None,
build_user: str=None, run_user: str=None, labels: dict=None,
no_artifacts: bool=False):
"""Build Docker image, and return a (image_id, image_name:tag) tuple of
built image, if built successfully.
Notes:
Using the given image name & tag as they are, but using the global host
Docker image namespace (as opposed to a private-project-workspace),
so collisions between projects are possible (and very likely, e.g., when
used in a CI environment, or shared machine use-case).
Trying to address this issue to some extent by using the image ID after
it is built, which is unique.
There's a race condition between "build" and "get ID" - ignoring this at
the moment.
Also, I'm not sure about Docker's garbage collection...
If I use the image ID in other places, and someone else "grabbed" my image
name and tag (so now my image ID is floating), is it still safe to use
the ID? Or is it going to be garbage collected and cleaned up sometime?
From my experiments, the "floating image ID" was left alone (usable),
but prone to "manual cleanups".
Also ignoring this at the moment...
Thought about an alternative approach based on first building an image
with a randomly generated tag, so I can use that safely later, and tag it
to the requested tag.
Decided against it, seeing that every run increases the local Docker
images spam significantly with a bunch of random tags, making it even less
useful.
Documenting it here to remember it was considered, and to discuss it
further in case anyone thinks it's a better idea than what I went with.
"""
docker_image = '{}:{}'.format(name, tag)
# create directory for this target under a private builder workspace
workspace_dir = build_context.get_workspace('DockerBuilder', docker_image)
# generate Dockerfile and build it
dockerfile_path = join(workspace_dir, 'Dockerfile')
dockerfile = [
'FROM {}\n'.format(format_qualified_image_name(base_image)),
'ARG DEBIAN_FRONTEND=noninteractive\n',
]
if build_user:
dockerfile.append('USER {}\n'.format(build_user))
workspace_src_dir = join(workspace_dir, 'src')
rmtree(workspace_src_dir)
num_linked = 0
apt_repo_deps = []
effective_env = {}
effective_labels = {}
KNOWN_RUNTIME_PARAMS = frozenset((
'ports', 'volumes', 'container_name', 'daemonize', 'interactive',
'term', 'auto_it', 'rm', 'env', 'work_dir', 'impersonate'))
if runtime_params is None:
runtime_params = {}
runtime_params['ports'] = listify(runtime_params.get('ports'))
runtime_params['volumes'] = listify(runtime_params.get('volumes'))
runtime_params['env'] = dict(runtime_params.get('env', {}))
env_manipulations = {}
packaging_layers = []
def add_package(pkg_type, pkg_spec):
"""Add package specification of certain package type.
Uses last layer if matches package type, otherwise opens a new layer.
This can result "Docker layer framgantation", by opening and closing
many layers.
No optimization is performed on detecting opportunities to merge layers
that were split just because of arbitrary topological sort decision
(tie breaker), and not a real topology in the target graph.
Such an optimization could be done here by inspecting the graph
directly, but decided not to go into it at this stage, since it's not
clear it's beneficial overall (e.g. better to have more layers so
some of them can remain cached if others change).
A better optimization (also not implemented) could be to do topological
sort tie breaking based on Docker-cache optimization - e.g., move new
things to new layers in order to keep old things in cached layers.
"""
if len(packaging_layers) == 0:
layer = (pkg_type, list())
packaging_layers.append(layer)
else:
layer = packaging_layers[-1]
if pkg_type != layer[0]:
layer = (pkg_type, list())
packaging_layers.append(layer)
if isinstance(pkg_spec, list):
layer[1].extend(pkg_spec)
else:
layer[1].append(pkg_spec)
def check_env_overrides(new_vars: set, op_kind: str, vars_source: str):
overridden_vars = new_vars.intersection(effective_env.keys())
if overridden_vars:
raise ValueError(
'Following env vars {} from {} override previously set vars '
'during build of Docker image "{}": {}'.format(
op_kind, vars_source, docker_image,
', '.join(overridden_vars)))
if op_kind == 'set':
overridden_vars = new_vars.intersection(env_manipulations.keys())
if overridden_vars:
raise ValueError(
'Following env vars {} from {} override previous '
'manipulations during build of Docker image "{}": {}'
.format(op_kind, vars_source, docker_image,
', '.join(overridden_vars)))
def check_label_overrides(new_labels: set, labels_source: str):
overridden_labels = new_labels.intersection(effective_labels.keys())
if overridden_labels:
raise ValueError(
'Following labels set from {} override previously set labels '
'during build of Docker image "{}": {}'.format(
labels_source, docker_image, ', '.join(overridden_labels)))
def update_runtime_params(new_rt_param: dict, params_source: str):
invalid_keys = set(
new_rt_param.keys()).difference(KNOWN_RUNTIME_PARAMS)
if invalid_keys:
raise ValueError(
'Unknown keys in runtime params of {}: {}'.format(
params_source, ', '.join(invalid_keys)))
# TODO(itamar): check for invalid values and inconsistencies
runtime_params['ports'].extend(listify(new_rt_param.get('ports')))
runtime_params['volumes'].extend(listify(new_rt_param.get('volumes')))
runtime_params['env'].update(dict(runtime_params.get('env', {})))
for param in ('container_name', 'daemonize', 'interactive', 'term',
'auto_it', 'rm', 'work_dir', 'impersonate'):
if param in new_rt_param:
# TODO(itamar): check conflicting overrides
runtime_params[param] = new_rt_param[param]
if deps is None:
deps = []
# Get all base image deps, so when building this image we can skip adding
# deps that already exist in the base image.
base_image_deps = set(build_context.generate_dep_names(base_image))
for dep in deps:
if not distro and 'distro' in dep.props:
distro = dep.props.distro
if 'runtime_params' in dep.props:
update_runtime_params(dep.props.runtime_params,
'dependency {}'.format(dep.name))
if dep.name in base_image_deps:
logger.debug('Skipping base image dep {}', dep.name)
continue
if not no_artifacts:
num_linked += dep.artifacts.link_for_image(
workspace_src_dir, build_context.conf)
PACKAGING_PARAMS = frozenset(
('set_env', 'semicolon_join_env', 'set_label'))
invalid_keys = set(
dep.props.packaging_params.keys()).difference(PACKAGING_PARAMS)
if invalid_keys:
raise ValueError(
'Unknown keys in packaging params of target "{}": {}'.format(
dep.name, ', '.join(invalid_keys)))
if 'set_env' in dep.props.packaging_params:
dep_env = dep.props.packaging_params['set_env']
check_env_overrides(
set(dep_env.keys()), 'set', 'dependency {}'.format(dep.name))
effective_env.update(dep_env)
if 'semicolon_join_env' in dep.props.packaging_params:
append_env = dep.props.packaging_params['semicolon_join_env']
check_env_overrides(set(append_env.keys()), 'manipulations',
'dependency {}'.format(dep.name))
for key, value in append_env.items():
env_manip = env_manipulations.setdefault(
key, ['${{{}}}'.format(key)])
if value not in env_manip:
env_manip.append(value)
if 'set_label' in dep.props.packaging_params:
dep_labels = dep.props.packaging_params['set_label']
check_label_overrides(
set(dep_labels.keys()), 'dependency {}'.format(dep.name))
effective_labels.update(dep_labels)
if 'apt-repository' in dep.tags:
apt_repo_deps.append(dep)
if 'apt-installable' in dep.tags:
add_package('apt', format_apt_specifier(dep))
if 'pip-installable' in dep.tags:
add_package(dep.props.pip, format_pypi_specifier(dep))
if 'custom-installer' in dep.tags:
add_package('custom', get_installer_desc(build_context, dep))
if 'npm-installable' in dep.tags:
if dep.props.global_install:
add_package('npm-global', format_npm_specifier(dep))
else:
add_package('npm-local', format_npm_specifier(dep))
if 'gem-installable' in dep.tags:
add_package('gem', format_gem_specifier(dep))
# Add environment variables (one layer)
if env:
check_env_overrides(set(env.keys()), 'set', 'the target')
effective_env.update(env)
for key, value in env_manipulations.items():
effective_env[key] = ':'.join(value)
if effective_env:
dockerfile.append(
'ENV {}\n'.format(
' '.join('{}="{}"'.format(key, value)
for key, value in sorted(effective_env.items()))))
apt_key_cmds = []
apt_repositories = []
for dep in apt_repo_deps:
source_line, apt_key_cmd = parse_apt_repository(
build_context, dep, distro)
apt_repositories.append(source_line)
if apt_key_cmd:
apt_key_cmds.append(apt_key_cmd)
# Handle apt keys (one layer for all)
if apt_key_cmds:
dockerfile.append(
'RUN {}\n'.format(' && '.join(apt_key_cmds)))
# Handle apt repositories (one layer for all)
if apt_repositories:
list_name = '{}.list'.format(name)
apt_src_file = join(workspace_dir, list_name)
if make_apt_sources_list(apt_repositories, apt_src_file):
dockerfile.append(
'COPY {} /etc/apt/sources.list.d/\n'.format(list_name))
custom_cnt = 0
pip_req_cnt = defaultdict(int)
def install_npm(npm_packages: list, global_install: bool):
if npm_packages:
if not global_install:
dockerfile.append('WORKDIR /usr/src\n')
dockerfile.append(
'RUN npm install {} {}\n'.format(
' '.join(npm_packages),
'--global' if global_install else '&& npm dedupe'))
for layer in packaging_layers:
pkg_type, packages = layer
if pkg_type == 'apt':
dockerfile.append(
'RUN apt-get update -y && apt-get install '
'--no-install-recommends -y {} '
'&& rm -rf /var/lib/apt/lists/*\n'.format(
' '.join(sorted(packages))))
elif pkg_type == 'custom':
# Handle custom installers (2 layers per occurrence)
custom_cnt += 1
packages_dir = 'packages{}'.format(custom_cnt)
tmp_install = '/tmp/install{}'.format(custom_cnt)
workspace_packages_dir = join(workspace_dir, packages_dir)
rmtree(workspace_packages_dir)
os.makedirs(workspace_packages_dir)
run_installers = []
for custom_installer_desc in packages:
target_name, install_script, package = custom_installer_desc
package_tar = basename(package)
link_node(package, join(workspace_packages_dir, package_tar))
run_installers.extend([
'tar -xf {0}/{1} -C {0}'.format(tmp_install, package_tar),
'cd {}/{}'.format(tmp_install, target_name),
'cat {} | tr -d \'\\r\' | bash'.format(install_script),
])
dockerfile.extend([
'COPY {} {}\n'.format(packages_dir, tmp_install),
'RUN {} && cd / && rm -rf {}\n'.format(
' && '.join(run_installers), tmp_install),
])
elif pkg_type.startswith('pip'):
# Handle pip packages (2 layers per occurrence)
req_fname = 'requirements_{}_{}.txt'.format(
pkg_type, pip_req_cnt[pkg_type] + 1)
pip_req_file = join(workspace_dir, req_fname)
if make_pip_requirements(packages, pip_req_file):
upgrade_pip = (
'{pip} install --no-cache-dir --upgrade pip && '
.format(pip=pkg_type)
if pip_req_cnt[pkg_type] == 0 else '')
dockerfile.extend([
'COPY {} /usr/src/\n'.format(req_fname),
'RUN {upgrade_pip}'
'{pip} install --no-cache-dir -r /usr/src/{reqs}\n'
.format(upgrade_pip=upgrade_pip, pip=pkg_type,
reqs=req_fname)
])
pip_req_cnt[pkg_type] += 1
elif pkg_type == 'npm-global':
# Handle npm global packages (1 layer per occurrence)
install_npm(packages, True)
elif pkg_type == 'npm-local':
# Handle npm local packages (1 layer per occurrence)
install_npm(packages, False)
elif pkg_type == 'gem':
# Handle gem (ruby) packages (1 layer per occurrence)
dockerfile.append(
'RUN gem install {}\n'.format(' '.join(packages)))
if work_dir:
dockerfile.append('WORKDIR {}\n'.format(work_dir))
if num_linked > 0:
dockerfile.append('COPY src /usr/src\n')
# Add labels (one layer)
if labels:
check_label_overrides(set(labels.keys()), 'the target')
effective_labels.update(labels)
if effective_labels:
dockerfile.append(
'LABEL {}\n'.format(
' '.join('"{}"="{}"'.format(key, value)
for key, value in sorted(effective_labels.items()))))
def format_docker_cmd(docker_cmd):
return ('"{}"'.format(cmd) for cmd in docker_cmd)
if run_user:
dockerfile.append('USER {}\n'.format(run_user))
# Add ENTRYPOINT (one layer)
if entrypoint:
# TODO(itamar): Consider adding tini as entrypoint also if given
# Docker CMD without a Docker ENTRYPOINT?
entrypoint[0] = PurePath(entrypoint[0]).as_posix()
if full_path_cmd:
entrypoint[0] = (PurePath('/usr/src/app') /
entrypoint[0]).as_posix()
if build_context.conf.with_tini_entrypoint:
entrypoint = ['tini', '--'] + entrypoint
dockerfile.append(
'ENTRYPOINT [{}]\n'.format(
', '.join(format_docker_cmd(entrypoint))))
# Add CMD (one layer)
if cmd:
cmd[0] = PurePath(cmd[0]).as_posix()
if full_path_cmd:
cmd[0] = (PurePath('/usr/src/app') / cmd[0]).as_posix()
dockerfile.append(
'CMD [{}]\n'.format(', '.join(format_docker_cmd(cmd))))
# TODO(itamar): write only if changed?
with open(dockerfile_path, 'w') as dockerfile_f:
dockerfile_f.writelines(dockerfile)
docker_build_cmd = ['docker', 'build']
if build_context.conf.no_docker_cache:
docker_build_cmd.append('--no-cache')
docker_build_cmd.extend(['-t', docker_image, workspace_dir])
logger.info('Building docker image "{}" using command {}',
docker_image, docker_build_cmd)
run(docker_build_cmd, check=True)
# TODO(itamar): race condition here
image_id = get_cached_image_id(docker_image)
metadata = {
'image_id': image_id,
'images': [{
'name': docker_image,
'pushed': False,
}],
}
icb = ImageCachingBehavior(name, tag, image_caching_behavior)
if icb.push_image_after_build:
tag_docker_image(image_id, icb.remote_image)
push_docker_image(icb.remote_image, build_context.conf.docker_push_cmd)
metadata['images'].append({
'name': icb.remote_image,
'pushed': True,
})
# Generate ybt_bin scripts
if ybt_bin_path:
if ybt_bin_path.startswith('//'):
ybt_bin_path = join(build_context.conf.get_bin_path(),
ybt_bin_path[2:])
# Make sure ybt_bin's are created only under bin_path
assert (build_context.conf.get_bin_path() ==
commonpath([build_context.conf.get_bin_path(), ybt_bin_path]))
def format_docker_run_params(params: dict):
param_strings = []
if 'container_name' in params:
param_strings.extend(['--name', params['container_name']])
if params.get('interactive'):
param_strings.append('-i')
if params.get('term'):
param_strings.append('-t')
if params.get('rm'):
param_strings.append('--rm')
if params.get('daemonize'):
param_strings.append('-d')
if params.get('impersonate') and platform.system() == 'Linux':
param_strings.extend([
'-u', '$( id -u ):$( id -g )',
'-v', '/etc/passwd:/etc/passwd:ro',
'-v', '/etc/group:/etc/group:ro'])
for port in params['ports']:
param_strings.extend(['-p', port])
for volume in params['volumes']:
param_strings.extend(['-v', volume])
if params.get('work_dir'):
param_strings.extend(['-w', params['work_dir']])
for var, value in params['env'].items():
param_strings.extend(['-e', '{}="{}"'.format(var, value)])
return ' '.join(param_strings)
with open(join(dirname(abspath(__file__)),
'ybtbin.sh.tmpl'), 'r') as tmpl_f:
ybt_bin = tmpl_f.read().format(
image_name=docker_image, image_id=image_id,
docker_opts=format_docker_run_params(runtime_params),
default_opts='$IT' if runtime_params.get('auto_it') else '')
with open(ybt_bin_path, 'w') as ybt_bin_f:
ybt_bin_f.write(ybt_bin)
os.chmod(ybt_bin_path, 0o755)
metadata['ybt_bin'] = ybt_bin_path
return metadata | Build Docker image, and return a (image_id, image_name:tag) tuple of
built image, if built successfully.
Notes:
Using the given image name & tag as they are, but using the global host
Docker image namespace (as opposed to a private-project-workspace),
so collisions between projects are possible (and very likely, e.g., when
used in a CI environment, or shared machine use-case).
Trying to address this issue to some extent by using the image ID after
it is built, which is unique.
There's a race condition between "build" and "get ID" - ignoring this at
the moment.
Also, I'm not sure about Docker's garbage collection...
If I use the image ID in other places, and someone else "grabbed" my image
name and tag (so now my image ID is floating), is it still safe to use
the ID? Or is it going to be garbage collected and cleaned up sometime?
From my experiments, the "floating image ID" was left alone (usable),
but prone to "manual cleanups".
Also ignoring this at the moment...
Thought about an alternative approach based on first building an image
with a randomly generated tag, so I can use that safely later, and tag it
to the requested tag.
Decided against it, seeing that every run increases the local Docker
images spam significantly with a bunch of random tags, making it even less
useful.
Documenting it here to remember it was considered, and to discuss it
further in case anyone thinks it's a better idea than what I went with. | entailment |
def add_stream_handler(logger, stream):
"""Add a brace-handler stream-handler using `stream` to `logger`."""
handler = logging.StreamHandler(stream=stream)
# Using Brace Formatter (see
# https://docs.python.org/3.5/howto/logging-cookbook.html#use-of-alternative-formatting-styles)
formatter = logging.Formatter(
'{asctime} {name:24s} {levelname:8s} {message}', style='{')
handler.setFormatter(formatter)
logger.addHandler(handler) | Add a brace-handler stream-handler using `stream` to `logger`. | entailment |
def configure_logging(conf):
"""Initialize and configure logging."""
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, conf.loglevel.upper()))
if conf.logtostderr:
add_stream_handler(root_logger, sys.stderr)
if conf.logtostdout:
add_stream_handler(root_logger, sys.stdout) | Initialize and configure logging. | entailment |
def iglob(pathname, *, recursive=False):
"""Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la
fnmatch. However, unlike fnmatch, filenames starting with a
dot are special cases that are not matched by '*' and '?'
patterns.
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
"""
it = _iglob(pathname, recursive)
if recursive and _isrecursive(pathname):
s = next(it) # skip empty string
assert not s
return it | Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la
fnmatch. However, unlike fnmatch, filenames starting with a
dot are special cases that are not matched by '*' and '?'
patterns.
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories. | entailment |
def register_sig(self, builder_name: str, sig: list, docstring: str,
cachable: bool=True, attempts=1):
"""Register a builder signature & docstring for `builder_name`.
The input for the builder signature is a list of "sig-spec"s
representing the builder function arguments.
Each sig-spec in the list can be:
1. A string. This represents a simple untyped positional argument name,
with no default value.
2. A 1-tuple with one string element. Same as #1.
3. A 2-tuple with ('arg-name', arg_type). This represents a typed
positional argument, if arg_type is an instance of PropType enum.
4. A 2-tuple with ('arg-name', default_value). This represents an
un-typed keyword argument with a default value.
5. A 3-tuple with ('arg-name', arg_type, default_value). This
represents a typed keyword argument with a default value,
if arg_type is an instance of PropType enum.
In addition to the args specified in the `sig` list, there are several
*injected* args:
1. A positional arg `name` of type TargetName is always the first arg.
2. A keyword arg `deps` of type TargetList and default value `None`
(or empty list) is always the first after all builder args.
3. A keyword arg `cachable` of type bool and default value taken from
the signature registration call (`cachable` arg).
4. A keyword arg `license` of type StrList and default value [].
5. A keyword arg `policies` of type StrList and default value [].
6. A keyword arg `packaging_params` of type dict and default value {}
(empty dict).
7. A keyword arg `runtime_params` of type dict and default value {}
(empty dict).
8. A keyword arg `build_params` of type dict and default value {}
(empty dict).
9. A keyword arg `attempts` of type int and default value 1.
"""
if self.sig is not None:
raise KeyError('{} already registered a signature!'
.format(builder_name))
self.sig = OrderedDict(name=ArgSpec(PropType.TargetName, Empty))
self.docstring = docstring
kwargs_section = False
for arg_spec in listify(sig):
arg_name, sig_spec = evaluate_arg_spec(arg_spec)
if arg_name in self.sig or arg_name in INJECTED_ARGS:
raise SyntaxError(
"duplicate argument '{}' in function definition"
.format(arg_name))
self.sig[arg_name] = sig_spec
if sig_spec.default == Empty:
if kwargs_section:
# TODO(itamar): how to give syntax error source annotation?
# (see: http://stackoverflow.com/questions/33717804)
raise SyntaxError(
'non-default argument follows default argument')
self.min_positional_args += 1
else:
kwargs_section = True
self.sig['deps'] = ArgSpec(PropType.TargetList, None)
self.sig['cachable'] = ArgSpec(PropType.bool, cachable)
self.sig['license'] = ArgSpec(PropType.StrList, None)
self.sig['policies'] = ArgSpec(PropType.StrList, None)
self.sig['packaging_params'] = ArgSpec(PropType.dict, None)
self.sig['runtime_params'] = ArgSpec(PropType.dict, None)
self.sig['build_params'] = ArgSpec(PropType.dict, None)
self.sig['attempts'] = ArgSpec(PropType.numeric, 1) | Register a builder signature & docstring for `builder_name`.
The input for the builder signature is a list of "sig-spec"s
representing the builder function arguments.
Each sig-spec in the list can be:
1. A string. This represents a simple untyped positional argument name,
with no default value.
2. A 1-tuple with one string element. Same as #1.
3. A 2-tuple with ('arg-name', arg_type). This represents a typed
positional argument, if arg_type is an instance of PropType enum.
4. A 2-tuple with ('arg-name', default_value). This represents an
un-typed keyword argument with a default value.
5. A 3-tuple with ('arg-name', arg_type, default_value). This
represents a typed keyword argument with a default value,
if arg_type is an instance of PropType enum.
In addition to the args specified in the `sig` list, there are several
*injected* args:
1. A positional arg `name` of type TargetName is always the first arg.
2. A keyword arg `deps` of type TargetList and default value `None`
(or empty list) is always the first after all builder args.
3. A keyword arg `cachable` of type bool and default value taken from
the signature registration call (`cachable` arg).
4. A keyword arg `license` of type StrList and default value [].
5. A keyword arg `policies` of type StrList and default value [].
6. A keyword arg `packaging_params` of type dict and default value {}
(empty dict).
7. A keyword arg `runtime_params` of type dict and default value {}
(empty dict).
8. A keyword arg `build_params` of type dict and default value {}
(empty dict).
9. A keyword arg `attempts` of type int and default value 1. | entailment |
def remove_builder(cls, builder_name: str):
"""Remove a registered builder `builder_name`.
No reason to use this except for tests.
"""
cls.builders.pop(builder_name, None)
for hook_spec in cls.hooks.values():
hook_spec.pop(builder_name, None) | Remove a registered builder `builder_name`.
No reason to use this except for tests. | entailment |
def to_build_module(build_file_path: str, conf: Config) -> str:
"""Return a normalized build module name for `build_file_path`."""
build_file = Path(build_file_path)
root = Path(conf.project_root)
return build_file.resolve().relative_to(root).parent.as_posix().strip('.') | Return a normalized build module name for `build_file_path`. | entailment |
def cmd_version(unused_conf):
"""Print out version information about YABT and detected builders."""
import pkg_resources
print('This is {} version {}, imported from {}'
.format(__oneliner__, __version__, __file__))
if len(Plugin.builders) > 0:
print('setuptools registered builders:')
for entry_point in pkg_resources.iter_entry_points('yabt.builders'):
print(' {0.module_name}.{0.name} (dist {0.dist})'.format(entry_point)) | Print out version information about YABT and detected builders. | entailment |
def cmd_list(unused_conf: Config):
"""Print out information on loaded builders and hooks."""
for name, builder in sorted(Plugin.builders.items()):
if builder.func:
print('+- {0:16s} implemented in {1.__module__}.{1.__name__}()'
.format(name, builder.func))
else:
print('+- {0:16s} loaded with no builder function'.format(name))
for hook_name, hook_func in sorted(Plugin.get_hooks_for_builder(name)):
print(' +- {0} hook implemented in '
'{1.__module__}.{1.__name__}()'
.format(hook_name, hook_func)) | Print out information on loaded builders and hooks. | entailment |
def cmd_build(conf: Config, run_tests: bool=False):
"""Build requested targets, and their dependencies."""
build_context = BuildContext(conf)
populate_targets_graph(build_context, conf)
build_context.build_graph(run_tests=run_tests)
build_context.write_artifacts_metadata() | Build requested targets, and their dependencies. | entailment |
def cmd_dot(conf: Config):
"""Print out a neat targets dependency tree based on requested targets.
Use graphviz to render the dot file, e.g.:
> ybt dot :foo :bar | dot -Tpng -o graph.png
"""
build_context = BuildContext(conf)
populate_targets_graph(build_context, conf)
if conf.output_dot_file is None:
write_dot(build_context, conf, sys.stdout)
else:
with open(conf.output_dot_file, 'w') as out_file:
write_dot(build_context, conf, out_file) | Print out a neat targets dependency tree based on requested targets.
Use graphviz to render the dot file, e.g.:
> ybt dot :foo :bar | dot -Tpng -o graph.png | entailment |
def cmd_tree(conf: Config):
"""Print out a neat targets dependency tree based on requested targets."""
build_context = BuildContext(conf)
populate_targets_graph(build_context, conf)
def print_target_with_deps(target, depth=2):
print('{: >{}}{}'.format('+-', depth, target.name))
for dep in sorted(
build_context.target_graph.neighbors(target.name)):
print_target_with_deps(build_context.targets[dep], depth + 2)
if conf.targets:
for target_name in sorted(parse_target_selectors(conf.targets, conf)):
mod, name = split(target_name)
if name == '*':
for target_name in sorted(
build_context.targets_by_module[mod]):
print_target_with_deps(build_context.targets[target_name])
else:
print_target_with_deps(build_context.targets[target_name])
else:
for _, target in sorted(build_context.targets.items()):
print_target_with_deps(target) | Print out a neat targets dependency tree based on requested targets. | entailment |
def main():
"""Main `ybt` console script entry point - run YABT from command-line."""
conf = init_and_get_conf()
logger = make_logger(__name__)
logger.info('YaBT version {}', __version__)
handlers = {
'build': YabtCommand(func=cmd_build, requires_project=True),
'dot': YabtCommand(func=cmd_dot, requires_project=True),
'test': YabtCommand(func=cmd_test, requires_project=True),
'tree': YabtCommand(func=cmd_tree, requires_project=True),
'version': YabtCommand(func=cmd_version, requires_project=False),
'list-builders': YabtCommand(func=cmd_list, requires_project=False),
}
command = handlers[conf.cmd]
if command.requires_project and not conf.in_yabt_project():
fatal('Not a YABT project (or any of the parent directories): {}',
BUILD_PROJ_FILE)
try:
command.func(conf)
except Exception as ex:
fatal('{}', ex) | Main `ybt` console script entry point - run YABT from command-line. | entailment |
def cpp_app_builder(build_context, target):
"""Pack a C++ binary as a Docker image with its runtime dependencies.
TODO(itamar): Dynamically analyze the binary and copy shared objects
from its buildenv image to the runtime image, unless they're installed.
"""
yprint(build_context.conf, 'Build CppApp', target)
if target.props.executable and target.props.main:
raise KeyError(
'`main` and `executable` arguments are mutually exclusive')
if target.props.executable:
if target.props.executable not in target.artifacts.get(AT.app):
target.artifacts.add(AT.app, target.props.executable)
entrypoint = [target.props.executable]
elif target.props.main:
prog = build_context.targets[target.props.main]
binary = list(prog.artifacts.get(AT.binary).keys())[0]
entrypoint = ['/usr/src/bin/' + binary]
else:
raise KeyError('Must specify either `main` or `executable` argument')
build_app_docker_and_bin(
build_context, target, entrypoint=entrypoint) | Pack a C++ binary as a Docker image with its runtime dependencies.
TODO(itamar): Dynamically analyze the binary and copy shared objects
from its buildenv image to the runtime image, unless they're installed. | entailment |
def make_pre_build_hook(extra_compiler_config_params):
"""Return a pre-build hook function for C++ builders.
When called, during graph build, it computes and stores the compiler-config
object on the target, as well as adding it to the internal_dict prop for
hashing purposes.
"""
def pre_build_hook(build_context, target):
target.compiler_config = CompilerConfig(
build_context, target, extra_compiler_config_params)
target.props._internal_dict_['compiler_config'] = (
target.compiler_config.as_dict())
return pre_build_hook | Return a pre-build hook function for C++ builders.
When called, during graph build, it computes and stores the compiler-config
object on the target, as well as adding it to the internal_dict prop for
hashing purposes. | entailment |
def compile_cc(build_context, compiler_config, buildenv, sources,
workspace_dir, buildenv_workspace, cmd_env):
"""Compile list of C++ source files in a buildenv image
and return list of generated object file.
"""
objects = []
for src in sources:
obj_rel_path = '{}.o'.format(splitext(src)[0])
obj_file = join(buildenv_workspace, obj_rel_path)
include_paths = [buildenv_workspace] + compiler_config.include_path
compile_cmd = (
[compiler_config.compiler, '-o', obj_file, '-c'] +
compiler_config.compile_flags +
['-I{}'.format(path) for path in include_paths] +
[join(buildenv_workspace, src)])
# TODO: capture and transform error messages from compiler so file
# paths match host paths for smooth(er) editor / IDE integration
build_context.run_in_buildenv(buildenv, compile_cmd, cmd_env)
objects.append(
join(relpath(workspace_dir, build_context.conf.project_root),
obj_rel_path))
return objects | Compile list of C++ source files in a buildenv image
and return list of generated object file. | entailment |
def link_cpp_artifacts(build_context, target, workspace_dir,
include_objects: bool):
"""Link required artifacts from dependencies under target workspace dir.
Return list of object files of dependencies (if `include_objects`).
Includes:
- Generated code from proto dependencies
- Header files from all dependencies
- Generated header files from all dependencies
- If `include_objects` is True, also object files from all dependencies
(these will be returned without linking)
"""
# include the source & header files of the current target
# add objects of all dependencies (direct & transitive), if needed
source_files = target.props.sources + target.props.headers
generated_srcs = {}
objects = []
# add headers of dependencies
for dep in build_context.generate_all_deps(target):
source_files.extend(dep.props.get('headers', []))
link_files(source_files, workspace_dir, None, build_context.conf)
# add generated headers and collect objects of dependencies
for dep in build_context.generate_all_deps(target):
dep.artifacts.link_types(workspace_dir, [AT.gen_h], build_context.conf)
if include_objects:
objects.extend(dep.artifacts.get(AT.object).values())
# add generated code from proto dependencies
for proto_dep_name in target.props.protos:
proto_dep = build_context.targets[proto_dep_name]
proto_dep.artifacts.link_types(workspace_dir, [AT.gen_cc],
build_context.conf)
return objects | Link required artifacts from dependencies under target workspace dir.
Return list of object files of dependencies (if `include_objects`).
Includes:
- Generated code from proto dependencies
- Header files from all dependencies
- Generated header files from all dependencies
- If `include_objects` is True, also object files from all dependencies
(these will be returned without linking) | entailment |
def get_source_files(target, build_context) -> list:
"""Return list of source files for `target`."""
all_sources = list(target.props.sources)
for proto_dep_name in target.props.protos:
proto_dep = build_context.targets[proto_dep_name]
all_sources.extend(proto_dep.artifacts.get(AT.gen_cc).keys())
return all_sources | Return list of source files for `target`. | entailment |
def build_cpp(build_context, target, compiler_config, workspace_dir):
"""Compile and link a C++ binary for `target`."""
rmtree(workspace_dir)
binary = join(*split(target.name))
objects = link_cpp_artifacts(build_context, target, workspace_dir, True)
buildenv_workspace = build_context.conf.host_to_buildenv_path(
workspace_dir)
objects.extend(compile_cc(
build_context, compiler_config, target.props.in_buildenv,
get_source_files(target, build_context), workspace_dir,
buildenv_workspace, target.props.cmd_env))
bin_file = join(buildenv_workspace, binary)
link_cmd = (
[compiler_config.linker, '-o', bin_file] +
objects + compiler_config.link_flags)
build_context.run_in_buildenv(
target.props.in_buildenv, link_cmd, target.props.cmd_env)
target.artifacts.add(AT.binary, relpath(join(workspace_dir, binary),
build_context.conf.project_root), binary) | Compile and link a C++ binary for `target`. | entailment |
def cpp_prog_builder(build_context, target):
"""Build a C++ binary executable"""
yprint(build_context.conf, 'Build CppProg', target)
workspace_dir = build_context.get_workspace('CppProg', target.name)
build_cpp(build_context, target, target.compiler_config, workspace_dir) | Build a C++ binary executable | entailment |
def cpp_lib_builder(build_context, target):
"""Build C++ object files"""
yprint(build_context.conf, 'Build CppLib', target)
workspace_dir = build_context.get_workspace('CppLib', target.name)
workspace_src_dir = join(workspace_dir, 'src')
rmtree(workspace_src_dir)
link_cpp_artifacts(build_context, target, workspace_src_dir, False)
buildenv_workspace = build_context.conf.host_to_buildenv_path(
workspace_src_dir)
objects = compile_cc(
build_context, target.compiler_config, target.props.in_buildenv,
get_source_files(target, build_context), workspace_src_dir,
buildenv_workspace, target.props.cmd_env)
for obj_file in objects:
target.artifacts.add(AT.object, obj_file) | Build C++ object files | entailment |
def get(self, param, config, target, fallback):
"""Return the value of `param`, according to priority / expansion.
First priority - the target itself.
Second priority - the project config.
Third priority - a global default ("fallback").
In list-params, a '$*' term processed as "expansion term", meaning
it is replaced with all terms from the config-level.
"""
target_val = target.props.get(param)
config_val = config.get(param, fallback)
if not target_val:
return config_val
if isinstance(target_val, list):
val = []
for el in target_val:
if el == '$*':
val.extend(listify(config_val))
else:
val.append(el)
return val
return target_val | Return the value of `param`, according to priority / expansion.
First priority - the target itself.
Second priority - the project config.
Third priority - a global default ("fallback").
In list-params, a '$*' term processed as "expansion term", meaning
it is replaced with all terms from the config-level. | entailment |
def standard_licenses_only(build_context, target) -> str:
"""A policy function for allowing specifying only known licenses.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
To apply in project, include this function in the ilst returned by the
`get_policies` function implemented in the project `YSettings` file.
See example in tests/errors.
"""
for license_name in target.props.license:
if license_name not in KNOWN_LICENSES:
# TODO: include suggestion for similar known license
return 'Unknown license: {}'.format(license_name)
return None | A policy function for allowing specifying only known licenses.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
To apply in project, include this function in the ilst returned by the
`get_policies` function implemented in the project `YSettings` file.
See example in tests/errors. | entailment |
def whitelist_licenses_policy(policy_name: str, allowed_licenses: set):
"""A policy factory for making license-based whitelist policies.
To apply in project, include the function returned from this factory
in the ilst returned by the `get_policies` function implemented in the
project `YSettings` file.
The factory returns a policy function named
`whitelist_{policy_name}_licenses` that applies to targets with
`policy_name` in their policies list.
The returned policy asserts that all licenses contained in the target
(including through explicit & implicit dependencies) are in the whitelist
defined by `allowed_licenses`.
See example in tests/errors.
"""
def policy_func(build_context, target):
"""whitelist_{policy_name}_licenses policy function.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
"""
if policy_name in target.props.policies:
licenses = set(target.props.license)
for dep in build_context.generate_all_deps(target):
licenses.update(dep.props.license)
licenses.difference_update(allowed_licenses)
if licenses:
return 'Invalid licenses for {} policy: {}'.format(
policy_name, ', '.join(sorted(licenses)))
return None
policy_func.__name__ = 'whitelist_{}_licenses'.format(policy_name)
return policy_func | A policy factory for making license-based whitelist policies.
To apply in project, include the function returned from this factory
in the ilst returned by the `get_policies` function implemented in the
project `YSettings` file.
The factory returns a policy function named
`whitelist_{policy_name}_licenses` that applies to targets with
`policy_name` in their policies list.
The returned policy asserts that all licenses contained in the target
(including through explicit & implicit dependencies) are in the whitelist
defined by `allowed_licenses`.
See example in tests/errors. | entailment |
def make_parser(project_config_file: str) -> configargparse.ArgumentParser:
"""Return the argument parser.
:param project_config_file: Absolute path to project-specific config file.
If cached parser already exists - return it immediately.
Otherwise, initialize a new `ConfigArgParser` that is able to take default
values from a hierarchy of config files and environment variables, as well
as standard ArgParse command-line parsing behavior.
We take default values from configuration files:
- System-wide (see code for location)
- User-level overrides (see code for location, hopefully under home dir)
- If a project-specific config file is available, it will override both
of the above.
Environment variables will override all configuration files.
For an option `--foo-bar`, if an environment variable named `YBT_FOO_VAR`
exists, the option value will be taken from there.
Of course, options specified directly on the command-line always win.
"""
global PARSER # pylint: disable=global-statement
if PARSER is None:
config_files = ['/etc/yabt.conf', '~/.yconfig']
if project_config_file:
config_files.append(project_config_file)
PARSER = configargparse.getArgumentParser(
# Support loading default values from system-wide or
# user-specific config files (user-level overrides system-wide)
default_config_files=config_files,
# Show default values in help message
formatter_class=configargparse.DefaultsFormatter,
auto_env_var_prefix='ybt_',
args_for_setting_config_path=['--config'],
args_for_writing_out_config_file=['--write-out-config-file'])
# PARSER.add('--config', is_config_file=True, help='Config file path')
PARSER.add('--artifacts-metadata-file',
help='Output file to write artifacts metadata to')
PARSER.add('--continue-after-fail', default=False, action='store_true',
help='If a target fails continue independent targets')
PARSER.add('--bin-output-dir', default='ybt_bin')
PARSER.add('--build-file-name', default='YBuild')
PARSER.add('--build-base-images', action='store_true')
PARSER.add('--builders-workspace-dir', default='yabtwork')
PARSER.add('--default-target-name', default='@default')
PARSER.add('--docker-pull-cmd', default='docker pull',
help='Command to use for pulling images from registries')
PARSER.add('--docker-push-cmd', default='docker push',
help='Command to use for pushing images to registries')
PARSER.add('--docker-volume',
help='Use the specified docker volume as buildenv /project')
PARSER.add('-f', '--flavor', help='Choose build flavor (AKA profile)')
PARSER.add('--force-pull', action='store_true')
PARSER.add('-j', '--jobs', type=int, default=1)
# TODO(itamar): support auto-detection of interactivity-mode
PARSER.add('--non-interactive', action='store_true')
PARSER.add('--offline', action='store_true')
PARSER.add('--output-dot-file', default=None,
help='Output file for dot graph (default: stdin)')
# TODO(itamar): this flag should come from the builder, not from here
PARSER.add('--push', action='store_true')
PARSER.add('--scm-provider')
PARSER.add('--no-build-cache', action='store_true',
help='Disable YBT build cache')
PARSER.add('--no-docker-cache', action='store_true',
help='Disable YBT Docker cache')
PARSER.add('--no-policies', action='store_true')
PARSER.add('--no-test-cache', action='store_true',
help='Disable YBT test cache')
PARSER.add('--test-attempts', type=int, default=1)
PARSER.add('-v', '--verbose', action='store_true',
help='More verbose output to STDOUT')
PARSER.add('--with-tini-entrypoint', action='store_true')
# Logging flags
PARSER.add('--logtostderr', action='store_true',
help='Whether to log to STDERR')
PARSER.add('--logtostdout', action='store_true',
help='Whether to log to STDOUT')
PARSER.add('--loglevel', default='INFO', choices=LOG_LEVELS_CHOICES,
help='Log level threshold')
PARSER.add('--show-buildenv-deps', type=bool, default=False,
help='When running dot, if set to True then the buildenv '
'targets are printed to the graph too')
PARSER.add('cmd', choices=['build', 'dot', 'test', 'tree', 'version'])
PARSER.add('targets', nargs='*')
return PARSER | Return the argument parser.
:param project_config_file: Absolute path to project-specific config file.
If cached parser already exists - return it immediately.
Otherwise, initialize a new `ConfigArgParser` that is able to take default
values from a hierarchy of config files and environment variables, as well
as standard ArgParse command-line parsing behavior.
We take default values from configuration files:
- System-wide (see code for location)
- User-level overrides (see code for location, hopefully under home dir)
- If a project-specific config file is available, it will override both
of the above.
Environment variables will override all configuration files.
For an option `--foo-bar`, if an environment variable named `YBT_FOO_VAR`
exists, the option value will be taken from there.
Of course, options specified directly on the command-line always win. | entailment |
def find_project_config_file(project_root: str) -> str:
"""Return absolute path to project-specific config file, if it exists.
:param project_root: Absolute path to project root directory.
A project config file is a file named `YCONFIG_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
"""
if project_root:
project_config_file = os.path.join(project_root, YCONFIG_FILE)
if os.path.isfile(project_config_file):
return project_config_file | Return absolute path to project-specific config file, if it exists.
:param project_root: Absolute path to project root directory.
A project config file is a file named `YCONFIG_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found. | entailment |
def get_user_settings_module(project_root: str):
"""Return project-specific user settings module, if it exists.
:param project_root: Absolute path to project root directory.
A project settings file is a file named `YSETTINGS_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
Raise an exception if a file is found, but not importable.
The YSettings file can define 2 special module-level functions that
interact with the YABT CLI & config system:
1. `extend_cli`, if defined, takes the YABT `parser` object and may extend
it, to add custom command-line flags for the project.
(careful not to collide with YABT flags...)
2. `extend_config`, if defined, takes the YABT `config` object and the
parsed `args` object (returned by the the parser), and may extend the
config - should be used to reflect custom project CLI flags in the
config object.
Beyond that, the settings module is available in YBuild's under
`conf.settings` (except for the 2 special fucntions that are removed).
"""
if project_root:
project_settings_file = os.path.join(project_root, YSETTINGS_FILE)
if os.path.isfile(project_settings_file):
settings_loader = SourceFileLoader(
'settings', project_settings_file)
return settings_loader.load_module() | Return project-specific user settings module, if it exists.
:param project_root: Absolute path to project root directory.
A project settings file is a file named `YSETTINGS_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
Raise an exception if a file is found, but not importable.
The YSettings file can define 2 special module-level functions that
interact with the YABT CLI & config system:
1. `extend_cli`, if defined, takes the YABT `parser` object and may extend
it, to add custom command-line flags for the project.
(careful not to collide with YABT flags...)
2. `extend_config`, if defined, takes the YABT `config` object and the
parsed `args` object (returned by the the parser), and may extend the
config - should be used to reflect custom project CLI flags in the
config object.
Beyond that, the settings module is available in YBuild's under
`conf.settings` (except for the 2 special fucntions that are removed). | entailment |
def call_user_func(settings_module, func_name, *args, **kwargs):
"""Call a user-supplied settings function and clean it up afterwards.
settings_module may be None, or the function may not exist.
If the function exists, it is called with the specified *args and **kwargs,
and the result is returned.
"""
if settings_module:
if hasattr(settings_module, func_name):
func = getattr(settings_module, func_name)
try:
return func(*args, **kwargs)
finally:
# cleanup user function
delattr(settings_module, func_name) | Call a user-supplied settings function and clean it up afterwards.
settings_module may be None, or the function may not exist.
If the function exists, it is called with the specified *args and **kwargs,
and the result is returned. | entailment |
def get_build_flavor(settings_module, args):
"""Update the flavor arg based on the settings API"""
known_flavors = listify(call_user_func(settings_module, 'known_flavors'))
if args.flavor:
if args.flavor not in known_flavors:
raise ValueError('Unknown build flavor: {}'.format(args.flavor))
else:
args.flavor = call_user_func(settings_module, 'default_flavor')
if args.flavor and args.flavor not in known_flavors:
raise ValueError(
'Unknown default build flavor: {}'.format(args.flavor)) | Update the flavor arg based on the settings API | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.