code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def sendCommand(self, **msg): assert 'type' in msg, 'Message type is required.' msg['id'] = self.next_message_id self.next_message_id += 1 if self.next_message_id >= maxint: self.next_message_id = 1 self.sendMessage(json.dumps(msg)) return msg['id']
Sends a raw command to the Slack server, generating a message ID automatically.
def save(self, filename='classifier.dump'): ofile = open(filename,'w+') pickle.dump(self.classifier, ofile) ofile.close()
Pickles the classifier and dumps it into a file
def export_data(self): def export_field(value): try: return value.export_data() except AttributeError: return value if self.__modified_data__ is not None: return [export_field(value) for value in self.__modified_data__] return [...
Retrieves the data in a jsoned form
def _check_copy_number_changes(svtype, cn, minor_cn, data): if svtype == "LOH" and minor_cn == 0: return svtype elif svtype == "amplification" and cn > dd.get_ploidy(data): return svtype else: return "std"
Check if copy number changes match the expected svtype.
def read_resource_list(self, uri): self.logger.info("Reading resource list %s" % (uri)) try: resource_list = ResourceList(allow_multifile=self.allow_multifile, mapper=self.mapper) resource_list.read(uri=uri) except Exception as e: ...
Read resource list from specified URI else raise exception.
def _starting_consonants_only(self, letters: list) -> list: for idx, letter in enumerate(letters): if not self._contains_vowels(letter) and self._contains_consonants(letter): return [idx] if self._contains_vowels(letter): return [] if self._con...
Return a list of starting consonant positions.
def login(): " View function which handles an authentication request. " form = LoginForm(request.form) if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user and user.check_password(form.password.data): users.login(user) flash...
View function which handles an authentication request.
def manage_initial_service_status_brok(self, b): host_name = b.data['host_name'] service_description = b.data['service_description'] service_id = host_name+"/"+service_description logger.debug("got initial service status: %s", service_id) if host_name not in self.hosts_cache: ...
Prepare the known services cache
async def viewreaction(self, ctx, *, reactor : str): data = self.config.get(ctx.message.server.id, {}) keyword = data.get(reactor, {}) if not keyword: await self.bot.responses.failure(message="Reaction '{}' was not found.".format(reactor)) return response = data.g...
Views a specific reaction
def _get_help_for_estimator(prefix, estimator, defaults=None): from numpydoc.docscrape import ClassDoc defaults = defaults or {} estimator = _extract_estimator_cls(estimator) yield "<{}> options:".format(estimator.__name__) doc = ClassDoc(estimator) yield from _get_help_for_params( doc['...
Yield help lines for the given estimator and prefix.
def clip_rect(self, x:float, y:float, w:float, h:float) -> None: pass
Clip further output to this rect.
def reset(self): self._frame_counter = 0 ob_real = self.real_env.reset() self.sim_env.add_to_initial_stack(ob_real) for _ in range(3): ob_real, _, _, _ = self.real_env.step(self.name_to_action_num["NOOP"]) self.sim_env.add_to_initial_stack(ob_real) ob_sim = self.sim_env.reset() asser...
Reset simulated and real environments.
def with_legacy_dict(self, legacy_dict_object): warnings.warn(DeprecationWarning('Legacy configuration object has been used ' 'to load the ConfigResolver.')) return self.with_config_source(LegacyDictConfigSource(legacy_dict_object))
Configure a source that consumes the dict that where used on Lexicon 2.x
def arbiters(self): return [ { "_id": self.host2id(member), "host": member, "server_id": self._servers.host_to_server_id(member) } for member in self.get_members_in_state(7) ]
return list of arbiters
def validate(ctx, schema, all_schemata): database = ctx.obj['db'] if schema is None: if all_schemata is False: log('No schema given. Read the help', lvl=warn) return else: schemata = database.objectmodels.keys() else: schemata = [schema] for sc...
Validates all objects or all objects of a given schema.
def favorites_add(photo_id): method = 'flickr.favorites.add' _dopost(method, auth=True, photo_id=photo_id) return True
Add a photo to the user's favorites.
def store_oath_entry(args, nonce, aead, oath_c): data = {"key": args.uid, "aead": aead.data.encode('hex'), "nonce": nonce.encode('hex'), "key_handle": args.key_handle, "oath_C": oath_c, "oath_T": None, } entry = ValOathEntry(data) db = ...
Store the AEAD in the database.
def status(self): response = requests.get( "https://tccna.honeywell.com/WebAPI/emea/api/v1/" "location/%s/status?includeTemperatureControlSystems=True" % self.locationId, headers=self.client._headers() ) response.raise_for_status() data = r...
Retrieves the location status.
def add_restriction(self, subject, predicate, object_): if type(object_) != rdflib.URIRef: object_ = self.check_thing(object_) if type(predicate) != rdflib.URIRef: predicate = self.check_thing(predicate) if type(subject) != infixowl.Class: if type(subject) != ...
Lift normal triples into restrictions using someValuesFrom.
def tokenize_list(self, text): return [self.get_record_token(record) for record in self.analyze(text)]
Split a text into separate words.
def strip_accents(s): nfkd = unicodedata.normalize('NFKD', unicode(s)) return u''.join(ch for ch in nfkd if not unicodedata.combining(ch))
Strip accents to prepare for slugification.
def check_params(features_spec, groups_spec, num_bins, edge_range_spec, trim_outliers, trim_percentile): if isinstance(features_spec, str) and isinstance(groups_spec, str): features, groups = read_features_and_groups(features_spec, groups_spec) else: features, groups = features_spec, groups_spec...
Necessary check on values, ranges, and types.
def _initStormLibs(self): self.addStormLib(('str',), s_stormtypes.LibStr) self.addStormLib(('time',), s_stormtypes.LibTime)
Registration for built-in Storm Libraries
def alias_field(model, field): for part in field.split(LOOKUP_SEP)[:-1]: model = associate_model(model,part) return model.__name__ + "-" + field.split(LOOKUP_SEP)[-1]
Return the prefix name of a field
def save_to_file_object(self, fd, format=None, **kwargs): format = 'pickle' if format is None else format save = getattr(self, "save_%s" % format, None) if save is None: raise ValueError("Unknown format '%s'." % format) save(fd, **kwargs)
Save the object to a given file like object in the given format.
def _get_range(self, endpoint_name): url = self.build_url(self._endpoints.get(endpoint_name)) response = self.session.get(url) if not response: return None data = response.json() return self.range_constructor(parent=self, **{self._cloud_data_key: data})
Returns a Range based on the endpoint name
def build(ctx, builder="html", options=""): sourcedir = ctx.config.sphinx.sourcedir destdir = Path(ctx.config.sphinx.destdir or "build")/builder destdir = destdir.abspath() with cd(sourcedir): destdir_relative = Path(".").relpathto(destdir) command = "sphinx-build {opts} -b {builder} {so...
Build docs with sphinx-build
def shuffle_characters(s): s = list(s) random.shuffle(s) s =''.join(s) return s
Randomly shuffle the characters in a string
def convert_bam_to_fastq(in_file, work_dir, data, dirs, config): return alignprep.prep_fastq_inputs([in_file], data)
Convert BAM input file into FASTQ files.
def reactivate(credit_card_id: str) -> None: logger.info('reactivating-credit-card', credit_card_id=credit_card_id) with transaction.atomic(): cc = CreditCard.objects.get(pk=credit_card_id) cc.reactivate() cc.save()
Reactivates a credit card.
def get(self, key, default=None): if key in self: return self.items_dict[self.__dict__[str(type(key))][key]] else: return default
Return the value at index specified as key.
def clean_honeypot(self): value = self.cleaned_data["honeypot"] if value: raise forms.ValidationError(self.fields["honeypot"].label) return value
Check that nothing's been entered into the honeypot.
def with_category(category: str) -> Callable: def cat_decorator(func): categorize(func, category) return func return cat_decorator
A decorator to apply a category to a command function.
def optgroups(self, name, value, attrs=None): if not self.is_required and not self.allow_multiple_selected: self.choices = list(chain([('', '')], self.choices)) return super(Select2Mixin, self).optgroups(name, value, attrs=attrs)
Add empty option for clearable selects.
def _prompt_changer(attr, val): try: sys.ps1 = conf.color_theme.prompt(conf.prompt) except Exception: pass try: apply_ipython_style(get_ipython()) except NameError: pass
Change the current prompt theme
def term_to_binary(term, compressed=False): data_uncompressed = _term_to_binary(term) if compressed is False: return b_chr(_TAG_VERSION) + data_uncompressed else: if compressed is True: compressed = 6 if compressed < 0 or compressed > 9: raise InputException('...
Encode Python types into Erlang terms in binary data
def create_room(self, payload): response, status_code = self.__pod__.Streams.post_v2_room_create( payload=payload ).result() self.logger.debug('%s: %s' % (status_code, response)) return status_code, response
create a stream in a non-inclusive manner
def recordHostname(self, basedir): "Record my hostname in twistd.hostname, for user convenience" log.msg("recording hostname in twistd.hostname") filename = os.path.join(basedir, "twistd.hostname") try: hostname = os.uname()[1] except AttributeError: hostn...
Record my hostname in twistd.hostname, for user convenience
def parse_cdhit_clstr_file(lines): clusters = [] curr_cluster = [] for l in lines: if l.startswith('>Cluster'): if not curr_cluster: continue clusters.append(curr_cluster) curr_cluster = [] else: curr_cluster.append(clean_cluste...
Returns a list of list of sequence ids representing clusters
def __find_a_zero(self): row = -1 col = -1 i = 0 n = self.n done = False while not done: j = 0 while True: if (self.C[i][j] == 0) and \ (not self.row_covered[i]) and \ (not self.col_co...
Find the first uncovered element with value 0
def make_title(self, chan, cycle, stage, evt_type): cyc_str = None if cycle is not None: cyc_str = [str(c[2]) for c in cycle] cyc_str[0] = 'cycle ' + cyc_str[0] title = [' + '.join([str(x) for x in y]) for y in [chan, cyc_str, stage, evt_type] if y is not...
Make a title for plots, etc.
def apply_array_ufunc(func, *args, dask='forbidden'): if any(isinstance(arg, dask_array_type) for arg in args): if dask == 'forbidden': raise ValueError('apply_ufunc encountered a dask array on an ' 'argument, but handling for dask arrays has not ' ...
Apply a ndarray level function over ndarray objects.
def generate_url(self, name: str, **kwargs) -> str: return self.urlmapper.generate(name, **kwargs)
generate url with urlgenerator used by urldispatch
def drag_drop_cb(self, viewer, urls): channel = self.fv.get_current_channel() if channel is None: return self.fv.open_uris(urls, chname=channel.name, bulk_add=True) return True
Punt drag-drops to the ginga shell.
def _GetBindingNamespace(self): return (list(self.zeep_client.wsdl.bindings.itervalues())[0] .port_name.namespace)
Return a string with the namespace of the service binding in the WSDL.
def make_anchor_id(self): result = re.sub( '[^a-zA-Z0-9_]', '_', self.user + '_' + self.timestamp) return result
Return string to use as URL anchor for this comment.
def makeExecutable(fp): mode = ((os.stat(fp).st_mode) | 0o555) & 0o7777 setup_log.info("Adding executable bit to %s (mode is now %o)", fp, mode) os.chmod(fp, mode)
Adds the executable bit to the file at filepath `fp`
def _make_output(value, output_script, version=None): if 'decred' in riemann.get_current_network_name(): return tx.DecredTxOut( value=value, version=version, output_script=output_script) return tx.TxOut(value=value, output_script=output_script)
byte-like, byte-like -> TxOut
def addRequest(self, service, *args): wrapper = RequestWrapper(self, '/%d' % self.request_number, service, *args) self.request_number += 1 self.requests.append(wrapper) if self.logger: self.logger.debug('Adding request %s%r', wrapper.service, args) return ...
Adds a request to be sent to the remoting gateway.
def doc_parser(): parser = argparse.ArgumentParser( prog='ambry', description='Ambry {}. Management interface for ambry, libraries ' 'and repositories. '.format(ambry._meta.__version__)) return parser
Utility function to allow getting the arguments for a single command, for Sphinx documentation
def doctor(ctx, client): click.secho('\n'.join(textwrap.wrap(DOCTOR_INFO)) + '\n', bold=True) from . import _checks is_ok = True for attr in _checks.__all__: is_ok &= getattr(_checks, attr)(client) if is_ok: click.secho('Everything seems to be ok.', fg='green') ctx.exit(0 if is_o...
Check your system and repository for potential problems.
def _detect_timezone_windows(): global win32timezone_to_en tzi = DTZI_c() kernel32 = ctypes.windll.kernel32 getter = kernel32.GetTimeZoneInformation getter = getattr(kernel32, "GetDynamicTimeZoneInformation", getter) _ = getter(ctypes.byref(tzi)) win32tz_key_name = tzi.key_name if not win32tz_key_name: ...
Detect timezone on the windows platform.
def find_next_comma(self, node, sub): position = (sub.last_line, sub.last_col) first, last = find_next_comma(self.lcode, position) if first: node.op_pos.append(NodeWithPosition(last, first))
Find comma after sub andd add NodeWithPosition in node
def cast_item(cls, key, value): schema_type = cls.schema.get(key) if schema_type is None: if cls.strict: raise TypeError(f'Invalid key {key!r}') elif not isinstance(value, schema_type): try: return schema_type(value) exc...
Cast schema item to the appropriate tag type.
def itermovieshash(self): cur = self._db.firstkey() while cur is not None: yield cur cur = self._db.nextkey(cur)
Iterate over movies hash stored in the database.
def process_url(self, page_num, page_size, url): params = dict() if page_num is not None: url = re.sub('page=\d+', '', url) params['page'] = page_num if page_size is not None: url = re.sub('per_page=\d+', '', url) params['per_page'] = page_size ...
When slicing, remove the per_page and page parameters and pass to requests in the params dict
def next_haab(month, jd): if jd < EPOCH: raise IndexError("Input day is before Mayan epoch.") hday, hmonth = to_haab(jd) if hmonth == month: days = 1 - hday else: count1 = _haab_count(hday, hmonth) count2 = _haab_count(1, month) days = (count2 - count1) % 365 ...
For a given haab month and a julian day count, find the next start of that month on or after the JDC
def visit_paragraph(self, node): find = re.search(r'\[[^\]]+\]\([^\)]+\)', node.rawsource) if find is not None: self.document.reporter.warning( '(rst) Link is formatted in Markdown style.', base_node=node)
Check syntax of reStructuredText.
def compile(self, name, folder=None, data=None): template_name = name.replace(os.sep, "") if folder is None: folder = "" full_name = os.path.join( folder.strip(os.sep), template_name) if data is None: data = {} try: self.te...
renders template_name + self.extension file with data using jinja
def convert_to_si(self): self._values, self._header._unit = self._header.data_type.to_si( self._values, self._header.unit)
Convert the Data Collection to SI units.
def cancel(self): if self.OBSERVE_UPDATES: self.detach() self.ioloop.add_callback(self.cancel_timeouts)
Detach strategy from its sensor and cancel ioloop callbacks.
def save(self, filename): with io.open(filename,'w',encoding='utf-8') as f: f.write(self.xml())
Save metadata to XML file
def hashing_type(self, cluster='main'): if not self.config.has_section(cluster): raise SystemExit("Cluster '%s' not defined in %s" % (cluster, self.config_file)) hashing_type = 'carbon_ch' try: return self.config.get(cluster, 'hashing_type') ...
Hashing type of cluster.
def _isdictclass(obj): c = getattr(obj, '__class__', None) return c and c.__name__ in _dict_classes.get(c.__module__, ())
Return True for known dict objects.
def _map_trajectory(self): self.trajectory_map = {} with open(self.filepath, 'r') as trajectory_file: with closing( mmap( trajectory_file.fileno(), 0, access=ACCESS_READ)) as mapped_file: progress = 0 ...
Return filepath as a class attribute
def item_after(self, item): next_iter = self._next_iter_for(item) if next_iter is not None: return self._object_at_iter(next_iter)
The item after an item
def list(self, list_id): r = requests.get( "https://kippt.com/api/users/%s/lists/%s" % (self.id, list_id), headers=self.kippt.header ) return (r.json())
Retrieve the list given for the user.
def sweep(crypto, private_key, to_address, fee=None, password=None, **modes): from moneywagon.tx import Transaction tx = Transaction(crypto, verbose=modes.get('verbose', False)) tx.add_inputs(private_key=private_key, password=password, **modes) tx.change_address = to_address tx.fee(fee) return t...
Move all funds by private key to another address.
def _get_tags_by_num(self): by_revision = operator.attrgetter('revision') tags = sorted(self.get_tags(), key=by_revision) revision_tags = itertools.groupby(tags, key=by_revision) def get_id(rev): return rev.split(':', 1)[0] return dict( (get_id(rev), [tr.tag for tr in tr_list]) for rev, tr_list in re...
Return a dictionary mapping revision number to tags for that number.
def post_save_update_cache(sender, instance, created, raw, **kwargs): if raw: return name = sender.__name__ if name in cached_model_names: delay_cache = getattr(instance, '_delay_cache', False) if not delay_cache: from .tasks import update_cache_for_instance u...
Update the cache when an instance is created or modified.
def gen_and(src1, src2, dst): assert src1.size == src2.size return ReilBuilder.build(ReilMnemonic.AND, src1, src2, dst)
Return an AND instruction.
def _apply_filter(self, filters, candidates): if filters: filter_input = candidates for fetch_vector_filter in filters: filter_input = fetch_vector_filter.filter_vectors(filter_input) return filter_input else: return candidates
Apply vector filters if specified and return filtered list
def login(config, api_key=""): if not api_key: info_out( "If you don't have an API Key, go to:\n" "https://bugzilla.mozilla.org/userprefs.cgi?tab=apikey\n" ) api_key = getpass.getpass("API Key: ") url = urllib.parse.urljoin(config.bugzilla_url, "/rest/whoami") ...
Store your Bugzilla API Key
def _model_mask(self, wavelengths=None): if wavelengths is None: wavelengths = self.wavelengths wavelengths = np.array(wavelengths) mask = np.ones_like(wavelengths, dtype=bool) model_mask = self._configuration.get("masks", {}).get("model", []) logger.debug("Applying m...
Apply pre-defined model masks.
def package_ensure_apt(*packages): package = " ".join(packages) status = run("dpkg-query -W -f='${{Status}} ' {p}; true".format(p=package)) status = status.lower() if 'no packages found' in status or 'not-installed' in status: sudo("apt-get --yes install " + package) return False els...
Ensure apt packages are installed
def run_preassembly(): if request.method == 'OPTIONS': return {} response = request.body.read().decode('utf-8') body = json.loads(response) stmts_json = body.get('statements') stmts = stmts_from_json(stmts_json) scorer = body.get('scorer') return_toplevel = body.get('return_toplevel'...
Run preassembly on a list of INDRA Statements.
def cast(cls, c): if isinstance(c, Complete): return c elif isinstance(c, Translation): return Complete(np.identity(3, float), c.t) elif isinstance(c, Rotation): return Complete(c.r, np.zeros(3, float))
Convert the first argument into a Complete object
def find_cookie(self): return_cookies = [] origin_domain = self.request_object.dest_addr for cookie in self.cookiejar: for cookie_morsals in cookie[0].values(): cover_domain = cookie_morsals['domain'] if cover_domain == '': if origi...
Find a list of all cookies for a given domain
def _parse_port_ranges(pool_str): ports = set() for range_str in pool_str.split(','): try: a, b = range_str.split('-', 1) start, end = int(a), int(b) except ValueError: log.error('Ignoring unparsable port range %r.', range_str) continue if ...
Given a 'N-P,X-Y' description of port ranges, return a set of ints.
def flip(self, reactions): for reaction in reactions: if reaction in self._flipped: self._flipped.remove(reaction) else: self._flipped.add(reaction)
Flip the specified reactions.
def status(self, *msg): label = colors.yellow("STATUS") self._msg(label, *msg)
Prints a status message
def permission_required(action): def decorator(f): @wraps(f) def inner(community, *args, **kwargs): permission = current_permission_factory(community, action=action) if not permission.can(): abort(403) return f(community, *args, **kwargs) r...
Decorator to require permission.
def insertReadGroup(self, readGroup): statsJson = json.dumps(protocol.toJsonDict(readGroup.getStats())) experimentJson = json.dumps( protocol.toJsonDict(readGroup.getExperiment())) try: models.Readgroup.create( id=readGroup.getId(), readgro...
Inserts the specified readGroup into the DB.
def _get_interfaces(): v1_interfaces = INTERFACE[USB_BACKEND].get_all_connected_interfaces() v2_interfaces = INTERFACE[USB_BACKEND_V2].get_all_connected_interfaces() devices_in_both = [v1 for v1 in v1_interfaces for v2 in v2_interfaces if _get_unique_id(v1) == _get_unique_id(v2)] ...
Get the connected USB devices
def create(klass, account, **kwargs): params = {} params.update(kwargs) if 'media_ids' in params and isinstance(params['media_ids'], list): params['media_ids'] = ','.join(map(str, params['media_ids'])) resource = klass.TWEET_CREATE.format(account_id=account.id) respon...
Creates a "Promoted-Only" Tweet using the specialized Ads API end point.
def _get_buffer(self, index): if not 0 <= index < self.count: raise IndexError() size = struct.calcsize(self.format) buf = bytearray(size + 1) buf[0] = self.first_register + size * index return buf
Shared bounds checking and buffer creation.
def insert(self, table, columns, values, execute=True): cols, vals = get_col_val_str(columns) statement = "INSERT INTO {0} ({1}) VALUES ({2})".format(wrap(table), cols, vals) if execute: self._cursor.execute(statement, values) self._commit() self._printer('\tM...
Insert a single row into a table.
def _check_cats(cats, vtypes, df, prep, callers): out = [] for cat in cats: all_vals = [] for vtype in vtypes: vals, labels, maxval = _get_chart_info(df, vtype, cat, prep, callers) all_vals.extend(vals) if sum(all_vals) / float(len(all_vals)) > 2: out....
Only include categories in the final output if they have values.
def decode_cpu_id(self, cpuid): ret = () for i in cpuid.split(':'): ret += (eval('0x' + i),) return ret
Decode the CPU id into a string
def synchronise(func): def inner(request, *args): lock_id = '%s-%s-built-%s' % ( datetime.date.today(), func.__name__, ",".join([str(a) for a in args])) if cache.add(lock_id, 'true', LOCK_EXPIRE): result = func(request, *args) cache.set(lock_id, result...
If task already queued, running, or finished, don't restart.
def local_users(self): userdirs = filter(self._is_user_directory, os.listdir(self.userdata_location())) return map(lambda userdir: user.User(self, int(userdir)), userdirs)
Returns an array of user ids for users on the filesystem
def _weighted_formula(form, weight_func): for e, mf in form.items(): if e == Atom.H: continue yield e, mf, weight_func(e)
Yield weight of each formula element.
def import_csv(csv_file, **kwargs): records = get_imported_data(csv_file, **kwargs) _check_required_columns(csv_file, records.results) return records
Imports data and checks that all required columns are there.
def update_nodes_published(self): if self.pk: self.node_set.all().update(is_published=self.is_published)
publish or unpublish nodes of current layer
def function(self, new_function): self._client.change_state( self._monitor_url, {'Monitor[Function]': new_function.value})
Set the MonitorState of this Monitor.
def dotted(self): " Returns dotted-decimal reperesentation " obj = libcrypto.OBJ_nid2obj(self.nid) buf = create_string_buffer(256) libcrypto.OBJ_obj2txt(buf, 256, obj, 1) if pyver == 2: return buf.value else: return buf.value.decode('ascii')
Returns dotted-decimal reperesentation
def remove_not_requested_analyses_view(portal): logger.info("Removing 'Analyses not requested' view ...") ar_ptype = portal.portal_types.AnalysisRequest ar_ptype._actions = filter(lambda act: act.id != "analyses_not_requested", ar_ptype.listActions())
Remove the view 'Not requested analyses" from inside AR
def connectivity_matrix(cm): if cm.size == 0: return True if cm.ndim != 2: raise ValueError("Connectivity matrix must be 2-dimensional.") if cm.shape[0] != cm.shape[1]: raise ValueError("Connectivity matrix must be square.") if not np.all(np.logical_or(cm == 1, cm == 0)): ...
Validate the given connectivity matrix.
def getDctDescription(self) : "returns a dict describing the object" return {'type' : RabaFields.RABA_FIELD_TYPE_IS_RABA_OBJECT, 'className' : self._rabaClass.__name__, 'raba_id' : self.raba_id, 'raba_namespace' : self._raba_namespace}
returns a dict describing the object
def rsync(local_path, remote_path, exclude=None, extra_opts=None): if not local_path.endswith('/'): local_path += '/' exclude = exclude or [] exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore', '.gitmodules', '/build/', '/dist/']) with hide('running'): run("m...
Helper to rsync submodules across