_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q45600
Russound.close
train
def close(self): """ Disconnect from the controller. """ logger.info("Closing connection to %s:%s", self._host, self._port) self._ioloop_future.cancel() try: yield from self._ioloop_future except asyncio.CancelledError: pass
python
{ "resource": "" }
q45601
Russound.set_zone_variable
train
def set_zone_variable(self, zone_id, variable, value): """ Set a zone variable to a new value. """ return self._send_cmd("SET %s.%s=\"%s\"" % ( zone_id.device_str(), variable, value))
python
{ "resource": "" }
q45602
Russound.get_zone_variable
train
def get_zone_variable(self, zone_id, variable): """ Retrieve the current value of a zone variable. If the variable is not found in the local cache then the value is requested from the controller. """ try: return self._retrieve_cached_zone_variable(zone_id, variable) except UncachedVariable: return (yield from self._send_cmd("GET %s.%s" % ( zone_id.device_str(), variable)))
python
{ "resource": "" }
q45603
Russound.get_cached_zone_variable
train
def get_cached_zone_variable(self, zone_id, variable, default=None): """ Retrieve the current value of a zone variable from the cache or return the default value if the variable is not present. """ try: return self._retrieve_cached_zone_variable(zone_id, variable) except UncachedVariable: return default
python
{ "resource": "" }
q45604
Russound.unwatch_zone
train
def unwatch_zone(self, zone_id): """ Remove a zone from the watchlist. """ self._watched_zones.remove(zone_id) return (yield from self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
python
{ "resource": "" }
q45605
Russound.send_zone_event
train
def send_zone_event(self, zone_id, event_name, *args): """ Send an event to a zone. """ cmd = "EVENT %s!%s %s" % ( zone_id.device_str(), event_name, " ".join(str(x) for x in args)) return (yield from self._send_cmd(cmd))
python
{ "resource": "" }
q45606
Russound.set_source_variable
train
def set_source_variable(self, source_id, variable, value): """ Change the value of a source variable. """ source_id = int(source_id) return self._send_cmd("SET S[%d].%s=\"%s\"" % ( source_id, variable, value))
python
{ "resource": "" }
q45607
Russound.get_source_variable
train
def get_source_variable(self, source_id, variable): """ Get the current value of a source variable. If the variable is not in the cache it will be retrieved from the controller. """ source_id = int(source_id) try: return self._retrieve_cached_source_variable( source_id, variable) except UncachedVariable: return (yield from self._send_cmd("GET S[%d].%s" % ( source_id, variable)))
python
{ "resource": "" }
q45608
Russound.get_cached_source_variable
train
def get_cached_source_variable(self, source_id, variable, default=None): """ Get the cached value of a source variable. If the variable is not cached return the default value. """ source_id = int(source_id) try: return self._retrieve_cached_source_variable( source_id, variable) except UncachedVariable: return default
python
{ "resource": "" }
q45609
Russound.watch_source
train
def watch_source(self, source_id): """ Add a souce to the watchlist. """ source_id = int(source_id) r = yield from self._send_cmd( "WATCH S[%d] ON" % (source_id, )) self._watched_source.add(source_id) return r
python
{ "resource": "" }
q45610
Russound.unwatch_source
train
def unwatch_source(self, source_id): """ Remove a souce from the watchlist. """ source_id = int(source_id) self._watched_sources.remove(source_id) return (yield from self._send_cmd("WATCH S[%d] OFF" % ( source_id, )))
python
{ "resource": "" }
q45611
BaseContentNegotiatedView.set_renderers
train
def set_renderers(self, request=None, context=None, template_name=None, early=False): """ Makes sure that the renderers attribute on the request is up to date. renderers_for_view keeps track of the view that is attempting to render the request, so that if the request has been delegated to another view we know to recalculate the applicable renderers. When called multiple times on the same view this will be very low-cost for subsequent calls. """ request, context, template_name = self.get_render_params(request, context, template_name) args = (self.conneg, context, template_name, self._default_format, self._force_fallback_format, self._format_override_parameter) if getattr(request, 'renderers_for_args', None) != args: fallback_formats = self._force_fallback_format or () if not isinstance(fallback_formats, (list, tuple)): fallback_formats = (fallback_formats,) request.renderers = self.conneg.get_renderers(request=request, context=context, template_name=template_name, accept_header=request.META.get('HTTP_ACCEPT'), formats=self.format_override, default_format=self._default_format, fallback_formats=fallback_formats, early=early) request.renderers_for_view = args if self._include_renderer_details_in_context: self.context['renderers'] = [self.renderer_for_context(request, r) for r in self.conneg.renderers] return request.renderers
python
{ "resource": "" }
q45612
BaseContentNegotiatedView.join_template_name
train
def join_template_name(self, template_name, extension): """ Appends an extension to a template_name or list of template_names. """ if template_name is None: return None if isinstance(template_name, (list, tuple)): return tuple('.'.join([n, extension]) for n in template_name) if isinstance(template_name, str_types): return '.'.join([template_name, extension]) raise AssertionError('template_name not of correct type: %r' % type(template_name))
python
{ "resource": "" }
q45613
AzureProvider.submit
train
def submit(self, command='sleep 1', blocksize=1, job_name="parsl.auto"): """Submit command to an Azure instance. Submit returns an ID that corresponds to the task that was just submitted. Parameters ---------- command : str Command to be invoked on the remote side. blocksize : int Number of blocks requested. job_name : str Prefix for job name. Returns ------- None or str If at capacity (no more can be provisioned), None is returned. Otherwise, an identifier for the job is returned. """ job_name = "parsl.auto.{0}".format(time.time()) [instance, *rest] = self.deployer.deploy(command=command, job_name=job_name, blocksize=1) if not instance: logger.error("Failed to submit request to Azure") return None logger.debug("Started instance_id: {0}".format(instance.instance_id)) state = translate_table.get(instance.state['Name'], "PENDING") self.resources[instance.instance_id] = {"job_id": instance.instance_id, "instance": instance, "status": state} return instance.instance_id
python
{ "resource": "" }
q45614
AzureProvider.cancel
train
def cancel(self, job_ids): """Cancel jobs specified by a list of job ids. Parameters ---------- list of str List of identifiers of jobs which should be canceled. Returns ------- list of bool For each entry, True if the cancel operation is successful, otherwise False. """ for job_id in job_ids: try: self.deployer.destroy(self.resources.get(job_id)) return True except e: logger.error("Failed to cancel {}".format(repr(job_id))) logger.error(e) return False
python
{ "resource": "" }
q45615
save_positions
train
def save_positions(post_data, queryset=None): """ Function to update a queryset of position objects with a post data dict. :post_data: Typical post data dictionary like ``request.POST``, which contains the keys of the position inputs. :queryset: Queryset of the model ``ObjectPosition``. """ if not queryset: queryset = ObjectPosition.objects.all() for key in post_data: if key.startswith('position-'): try: obj_id = int(key.replace('position-', '')) except ValueError: continue queryset.filter(pk=obj_id).update(position=post_data[key])
python
{ "resource": "" }
q45616
order_by_position
train
def order_by_position(qs, reverse=False): """Template filter to return a position-ordered queryset.""" if qs: # ATTENTION: Django creates an invalid sql statement if two related # models have both generic positions, so we cannot use # qs.oder_by('generic_position__position') position = 'position' if reverse: position = '-' + position # Get content type of first queryset item c_type = ContentType.objects.get_for_model(qs[0]) # Check that every item has a valid position item for obj in qs: ObjectPosition.objects.get_or_create( content_type=c_type, object_id=obj.pk) return [ o.content_object for o in ObjectPosition.objects.filter( content_type=c_type, object_id__in=qs).order_by(position) ] return qs
python
{ "resource": "" }
q45617
position_input
train
def position_input(obj, visible=False): """Template tag to return an input field for the position of the object.""" if not obj.generic_position.all(): ObjectPosition.objects.create(content_object=obj) return {'obj': obj, 'visible': visible, 'object_position': obj.generic_position.all()[0]}
python
{ "resource": "" }
q45618
position_result_list
train
def position_result_list(change_list): """ Returns a template which iters through the models and appends a new position column. """ result = result_list(change_list) # Remove sortable attributes for x in range(0, len(result['result_headers'])): result['result_headers'][x]['sorted'] = False if result['result_headers'][x]['sortable']: result['result_headers'][x]['class_attrib'] = mark_safe( ' class="sortable"') # Append position <th> element result['result_headers'].append({ 'url_remove': '?o=', 'sort_priority': 1, 'sortable': True, 'class_attrib': mark_safe(' class="sortable sorted ascending"'), 'sorted': True, 'text': 'position', 'ascending': True, 'url_primary': '?o=-1', 'url_toggle': '?o=-1', }) # Append the editable field to every result item for x in range(0, len(result['results'])): obj = change_list.result_list[x] # Get position object c_type = ContentType.objects.get_for_model(obj) try: object_position = ObjectPosition.objects.get( content_type__pk=c_type.id, object_id=obj.id) except ObjectPosition.DoesNotExist: object_position = ObjectPosition.objects.create(content_object=obj) # Add the <td> html = ('<td><input class="vTextField" id="id_position-{0}"' ' maxlength="10" name="position-{0}" type="text"' ' value="{1}" /></td>').format(object_position.id, object_position.position) result['results'][x].append(mark_safe(html)) return result
python
{ "resource": "" }
q45619
register_model_converter
train
def register_model_converter(model, name=None, field='pk', base=IntConverter, queryset=None): """ Registers a custom path converter for a model. :param model: a Django model :param str name: name to register the converter as :param str field: name of the lookup field :param base: base path converter, either by name or as class (optional, defaults to `django.urls.converter.IntConverter`) :param queryset: a custom querset to use (optional, defaults to `model.objects.all()`) """ if name is None: name = camel_to_snake(model.__name__) converter_name = '{}Converter'.format(model.__name__) else: converter_name = '{}Converter'.format(snake_to_camel(name)) if isinstance(base, str): base = get_converter(base).__class__ converter_class = type( converter_name, (ModelConverterMixin, base,), {'model': model, 'field': field, 'queryset': queryset} ) register_converter(converter_class, name)
python
{ "resource": "" }
q45620
login
train
def login(): " View function which handles an authentication request. " form = LoginForm(request.form) # make sure data are valid, but doesn't validate password is right if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() # we use werzeug to validate user's password if user and user.check_password(form.password.data): users.login(user) flash(_('Welcome %(user)s', user=user.username)) return redirect(url_for('users.profile')) flash(_('Wrong email or password'), 'error-message') return redirect(request.referrer or url_for(users._login_manager.login_view))
python
{ "resource": "" }
q45621
logout
train
def logout(): " View function which handles a logout request. " users.logout() return redirect(request.referrer or url_for(users._login_manager.login_view))
python
{ "resource": "" }
q45622
register
train
def register(): " Registration Form. " form = RegisterForm(request.form) if form.validate_on_submit(): # create an user instance not yet stored in the database user = User( username=form.username.data, email=form.email.data, pw_hash=form.password.data) # Insert the record in our database and commit it db.session.add(user) db.session.commit() users.login(user) # flash will display a message to the user flash(_('Thanks for registering')) # redirect user to the 'home' method of the user module. return redirect(url_for('users.profile')) return render_template("users/register.html", form=form)
python
{ "resource": "" }
q45623
Project.settings_dir
train
def settings_dir(self): """ Directory that contains the the settings for the project """ path = os.path.join(self.dir, '.dsb') utils.create_dir(path) return os.path.realpath(path)
python
{ "resource": "" }
q45624
Project.read_settings
train
def read_settings(self): """ Read the "dsbfile" file Populates `self.settings` """ logger.debug('Reading settings from: %s', self.settings_path) self.settings = Settings.from_dsbfile(self.settings_path)
python
{ "resource": "" }
q45625
Project.setup_salt_ssh
train
def setup_salt_ssh(self): """ Setup `salt-ssh` """ self.copy_salt_and_pillar() self.create_roster_file() self.salt_ssh_create_dirs() self.salt_ssh_create_master_file()
python
{ "resource": "" }
q45626
Project.salt_ssh_create_dirs
train
def salt_ssh_create_dirs(self): """ Creates the `salt-ssh` required directory structure """ logger.debug('Creating salt-ssh dirs into: %s', self.settings_dir) utils.create_dir(os.path.join(self.settings_dir, 'salt')) utils.create_dir(os.path.join(self.settings_dir, 'pillar')) utils.create_dir(os.path.join(self.settings_dir, 'etc', 'salt')) utils.create_dir(os.path.join(self.settings_dir, 'var', 'cache', 'salt')) utils.create_dir(os.path.join(self.settings_dir, 'var', 'log', 'salt'))
python
{ "resource": "" }
q45627
create
train
def create(*units): """create this unit within the game as specified""" ret = [] for unit in units: # implemented using sc2simulator.ScenarioUnit x, y = unit.position[:2] pt = Point2D(x=x, y=y) unit.tag = 0 # forget any tag because a new unit will be created new = DebugCommand(create_unit=DebugCreateUnit( unit_type = unit.code, owner = unit.owner, pos = pt, quantity = 1, )) ret.append(new) return ret
python
{ "resource": "" }
q45628
Markdown.rewrite_links
train
def rewrite_links(self, func): """ Add a callback for rewriting links. The callback should take a single argument, the url, and should return a replacement url. The callback function is called everytime a ``[]()`` or ``<link>`` is processed. You can use this method as a decorator on the function you want to set as the callback. """ @libmarkdown.e_url_callback def _rewrite_links_func(string, size, context): ret = func(string[:size]) if ret is not None: buf = ctypes.create_string_buffer(ret) self._alloc.append(buf) return ctypes.addressof(buf) self._rewrite_links_func = _rewrite_links_func return func
python
{ "resource": "" }
q45629
Markdown.link_attrs
train
def link_attrs(self, func): """ Add a callback for adding attributes to links. The callback should take a single argument, the url, and should return additional text to be inserted in the link tag, i.e. ``"target="_blank"``. You can use this method as a decorator on the function you want to set as the callback. """ @libmarkdown.e_flags_callback def _link_attrs_func(string, size, context): ret = func(string[:size]) if ret is not None: buf = ctypes.create_string_buffer(ret) self._alloc.append(buf) return ctypes.addressof(buf) self._link_attrs_func = _link_attrs_func return func
python
{ "resource": "" }
q45630
xform
train
def xform(value, xformer): ''' Recursively transforms `value` by calling `xformer` on all keys & values in dictionaries and all values in sequences. Note that `xformer` will be passed each value to transform as the first parameter and other keyword parameters based on type. All transformers MUST support arbitrary additional parameters to stay future-proof. For sequences, `xformer` will be provided the following additional keyword parameters: * `index`: the index of the current value in the current sequence. * `seq`: the current sequence being transformed. * `root`: a reference to the original `value` passed to `xform`. For dictionaries, `xformer` will be provided the following additional keyword parameters: * `item_key`: ONLY provided if the value being transformed is a *value* in key-value dictionary pair. * `item_value`: ONLY provided if the value being transformed is a *key* in key-value dictionary pair. * `dict`: the current dictionary being transformed. * `root`: a reference to the original `value` passed to `xform`. Added in version 0.1.3. ''' def _xform(curval, **kws): if isseq(curval): return [ _xform(val, index=idx, seq=curval) for idx, val in enumerate(curval) ] if isdict(curval): return { _xform(key, item_value=val, dict=curval) : _xform(val, item_key=key, dict=curval) for key, val in curval.items() } return xformer(curval, root=value, **kws) return _xform(value)
python
{ "resource": "" }
q45631
jsonModel._validate_list
train
def _validate_list(self, input_list, schema_list, path_to_root, object_title=''): ''' a helper method for recursively validating items in a list :return: input_list ''' # construct rules for list and items rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) list_rules = self.keyMap[rules_path_to_root] initial_key = rules_path_to_root + '[0]' item_rules = self.keyMap[initial_key] # construct list error report template list_error = { 'object_title': object_title, 'model_schema': self.schema, 'input_criteria': list_rules, 'failed_test': 'value_datatype', 'input_path': path_to_root, 'error_value': 0, 'error_code': 4001 } # validate list rules if 'min_size' in list_rules.keys(): if len(input_list) < list_rules['min_size']: list_error['failed_test'] = 'min_size' list_error['error_value'] = len(input_list) list_error['error_code'] = 4031 raise InputValidationError(list_error) if 'max_size' in list_rules.keys(): if len(input_list) > list_rules['max_size']: list_error['failed_test'] = 'max_size' list_error['error_value'] = len(input_list) list_error['error_code'] = 4032 raise InputValidationError(list_error) # construct item error report template item_error = { 'object_title': object_title, 'model_schema': self.schema, 'input_criteria': item_rules, 'failed_test': 'value_datatype', 'input_path': initial_key, 'error_value': None, 'error_code': 4001 } # validate datatype of items for i in range(len(input_list)): input_path = path_to_root + '[%s]' % i item = input_list[i] item_error['input_path'] = input_path try: item_index = self._datatype_classes.index(item.__class__) except: item_error['error_value'] = item.__class__.__name__ raise InputValidationError(item_error) item_type = self._datatype_names[item_index] item_error['error_value'] = item if item_rules['value_datatype'] == 'null': pass else: if item_type != item_rules['value_datatype']: raise InputValidationError(item_error) # call appropriate validation sub-routine for datatype of item if item_type == 'boolean': input_list[i] = self._validate_boolean(item, input_path, object_title) elif item_type == 'number': input_list[i] = self._validate_number(item, input_path, object_title) elif item_type == 'string': input_list[i] = self._validate_string(item, input_path, object_title) elif item_type == 'map': input_list[i] = self._validate_dict(item, schema_list[0], input_path, object_title) elif item_type == 'list': input_list[i] = self._validate_list(item, schema_list[0], input_path, object_title) # validate unique values in list if 'unique_values' in list_rules.keys(): if len(set(input_list)) < len(input_list): list_error['failed_test'] = 'unique_values' list_error['error_value'] = input_list list_error['error_code'] = 4033 raise InputValidationError(list_error) # TODO: validate top-level item values against identical to reference # TODO: run lambda function and call validation url return input_list
python
{ "resource": "" }
q45632
jsonModel._validate_number
train
def _validate_number(self, input_number, path_to_root, object_title=''): ''' a helper method for validating properties of a number :return: input_number ''' rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) input_criteria = self.keyMap[rules_path_to_root] error_dict = { 'object_title': object_title, 'model_schema': self.schema, 'input_criteria': input_criteria, 'failed_test': 'value_datatype', 'input_path': path_to_root, 'error_value': input_number, 'error_code': 4001 } if 'integer_data' in input_criteria.keys(): if input_criteria['integer_data'] and not isinstance(input_number, int): error_dict['failed_test'] = 'integer_data' error_dict['error_code'] = 4021 raise InputValidationError(error_dict) if 'min_value' in input_criteria.keys(): if input_number < input_criteria['min_value']: error_dict['failed_test'] = 'min_value' error_dict['error_code'] = 4022 raise InputValidationError(error_dict) if 'max_value' in input_criteria.keys(): if input_number > input_criteria['max_value']: error_dict['failed_test'] = 'max_value' error_dict['error_code'] = 4023 raise InputValidationError(error_dict) if 'greater_than' in input_criteria.keys(): if input_number <= input_criteria['greater_than']: error_dict['failed_test'] = 'greater_than' error_dict['error_code'] = 4024 raise InputValidationError(error_dict) if 'less_than' in input_criteria.keys(): if input_number >= input_criteria['less_than']: error_dict['failed_test'] = 'less_than' error_dict['error_code'] = 4025 raise InputValidationError(error_dict) if 'equal_to' in input_criteria.keys(): if input_number != input_criteria['equal_to']: error_dict['failed_test'] = 'equal_to' error_dict['error_code'] = 4026 raise InputValidationError(error_dict) if 'discrete_values' in input_criteria.keys(): if input_number not in input_criteria['discrete_values']: error_dict['failed_test'] = 'discrete_values' error_dict['error_code'] = 4041 raise InputValidationError(error_dict) if 'excluded_values' in input_criteria.keys(): if input_number in input_criteria['excluded_values']: error_dict['failed_test'] = 'excluded_values' error_dict['error_code'] = 4042 raise InputValidationError(error_dict) # TODO: validate number against identical to reference # TODO: run lambda function and call validation url return input_number
python
{ "resource": "" }
q45633
jsonModel._validate_boolean
train
def _validate_boolean(self, input_boolean, path_to_root, object_title=''): ''' a helper method for validating properties of a boolean :return: input_boolean ''' rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) input_criteria = self.keyMap[rules_path_to_root] error_dict = { 'object_title': object_title, 'model_schema': self.schema, 'input_criteria': input_criteria, 'failed_test': 'value_datatype', 'input_path': path_to_root, 'error_value': input_boolean, 'error_code': 4001 } if 'equal_to' in input_criteria.keys(): if input_boolean != input_criteria['equal_to']: error_dict['failed_test'] = 'equal_to' error_dict['error_code'] = 4026 raise InputValidationError(error_dict) # TODO: validate boolean against identical to reference # TODO: run lambda function and call validation url return input_boolean
python
{ "resource": "" }
q45634
jsonModel._ingest_dict
train
def _ingest_dict(self, input_dict, schema_dict, path_to_root): ''' a helper method for ingesting keys, value pairs in a dictionary :return: valid_dict ''' valid_dict = {} # construct path to root for rules rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) # iterate over keys in schema dict for key, value in schema_dict.items(): key_path = path_to_root if not key_path == '.': key_path += '.' key_path += key rules_key_path = re.sub('\[\d+\]', '[0]', key_path) value_match = False if key in input_dict.keys(): value_index = self._datatype_classes.index(value.__class__) value_type = self._datatype_names[value_index] try: v_index = self._datatype_classes.index(input_dict[key].__class__) v_type = self._datatype_names[v_index] if v_type == value_type: value_match = True except: value_match = False if value_match: if value_type == 'null': valid_dict[key] = input_dict[key] elif value_type == 'boolean': valid_dict[key] = self._ingest_boolean(input_dict[key], key_path) elif value_type == 'number': valid_dict[key] = self._ingest_number(input_dict[key], key_path) elif value_type == 'string': valid_dict[key] = self._ingest_string(input_dict[key], key_path) elif value_type == 'map': valid_dict[key] = self._ingest_dict(input_dict[key], schema_dict[key], key_path) elif value_type == 'list': valid_dict[key] = self._ingest_list(input_dict[key], schema_dict[key], key_path) else: value_type = self.keyMap[rules_key_path]['value_datatype'] if 'default_value' in self.keyMap[rules_key_path]: valid_dict[key] = self.keyMap[rules_key_path]['default_value'] elif value_type == 'null': valid_dict[key] = None elif value_type == 'boolean': valid_dict[key] = False elif value_type == 'number': valid_dict[key] = 0.0 if 'integer_data' in self.keyMap[rules_key_path].keys(): if self.keyMap[rules_key_path]['integer_data']: valid_dict[key] = 0 elif value_type == 'string': valid_dict[key] = '' elif value_type == 'list': valid_dict[key] = [] elif value_type == 'map': valid_dict[key] = self._ingest_dict({}, schema_dict[key], key_path) # add extra fields if set to True if self.keyMap[rules_path_to_root]['extra_fields']: for key, value in input_dict.items(): if key not in valid_dict.keys(): valid_dict[key] = value return valid_dict
python
{ "resource": "" }
q45635
jsonModel._ingest_list
train
def _ingest_list(self, input_list, schema_list, path_to_root): ''' a helper method for ingesting items in a list :return: valid_list ''' valid_list = [] # construct max list size max_size = None rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) if 'max_size' in self.keyMap[rules_path_to_root].keys(): if not self.keyMap[rules_path_to_root]['max_size']: return valid_list else: max_size = self.keyMap[rules_path_to_root]['max_size'] # iterate over items in input list if input_list: rules_index = self._datatype_classes.index(schema_list[0].__class__) rules_type = self._datatype_names[rules_index] for i in range(len(input_list)): item_path = '%s[%s]' % (path_to_root, i) value_match = False try: item_index = self._datatype_classes.index(input_list[i].__class__) item_type = self._datatype_names[item_index] if item_type == rules_type: value_match = True except: value_match = False if value_match: try: if item_type == 'boolean': valid_list.append(self._validate_boolean(input_list[i], item_path)) elif item_type == 'number': valid_list.append(self._validate_number(input_list[i], item_path)) elif item_type == 'string': valid_list.append(self._validate_string(input_list[i], item_path)) elif item_type == 'map': valid_list.append(self._ingest_dict(input_list[i], schema_list[0], item_path)) elif item_type == 'list': valid_list.append(self._ingest_list(input_list[i], schema_list[0], item_path)) except: pass if isinstance(max_size, int): if len(valid_list) == max_size: return valid_list return valid_list
python
{ "resource": "" }
q45636
jsonModel._ingest_number
train
def _ingest_number(self, input_number, path_to_root): ''' a helper method for ingesting a number :return: valid_number ''' valid_number = 0.0 try: valid_number = self._validate_number(input_number, path_to_root) except: rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) if 'default_value' in self.keyMap[rules_path_to_root]: valid_number = self.keyMap[rules_path_to_root]['default_value'] elif 'integer_data' in self.keyMap[rules_path_to_root].keys(): if self.keyMap[rules_path_to_root]['integer_data']: valid_number = 0 return valid_number
python
{ "resource": "" }
q45637
jsonModel._ingest_string
train
def _ingest_string(self, input_string, path_to_root): ''' a helper method for ingesting a string :return: valid_string ''' valid_string = '' try: valid_string = self._validate_string(input_string, path_to_root) except: rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) if 'default_value' in self.keyMap[rules_path_to_root]: valid_string = self.keyMap[rules_path_to_root]['default_value'] return valid_string
python
{ "resource": "" }
q45638
jsonModel._ingest_boolean
train
def _ingest_boolean(self, input_boolean, path_to_root): ''' a helper method for ingesting a boolean :return: valid_boolean ''' valid_boolean = False try: valid_boolean = self._validate_boolean(input_boolean, path_to_root) except: rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root) if 'default_value' in self.keyMap[rules_path_to_root]: valid_boolean = self.keyMap[rules_path_to_root]['default_value'] return valid_boolean
python
{ "resource": "" }
q45639
jsonModel._reconstruct
train
def _reconstruct(self, path_to_root): ''' a helper method for finding the schema endpoint from a path to root :param path_to_root: string with dot path to root from :return: list, dict, string, number, or boolean at path to root ''' # split path to root into segments item_pattern = re.compile('\d+\\]') dot_pattern = re.compile('\\.|\\[') path_segments = dot_pattern.split(path_to_root) # construct base schema endpoint schema_endpoint = self.schema # reconstruct schema endpoint from segments if path_segments[1]: for i in range(1,len(path_segments)): if item_pattern.match(path_segments[i]): schema_endpoint = schema_endpoint[0] else: schema_endpoint = schema_endpoint[path_segments[i]] return schema_endpoint
python
{ "resource": "" }
q45640
jsonModel._walk
train
def _walk(self, path_to_root, record_dict): ''' a helper method for finding the record endpoint from a path to root :param path_to_root: string with dot path to root from :param record_dict: :return: list, dict, string, number, or boolean at path to root ''' # split path to root into segments item_pattern = re.compile('\d+\\]') dot_pattern = re.compile('\\.|\\[') path_segments = dot_pattern.split(path_to_root) # construct empty fields record_endpoints = [] # determine starting position if not path_segments[0]: path_segments.pop(0) # define internal recursive function def _walk_int(path_segments, record_dict): record_endpoint = record_dict for i in range(0, len(path_segments)): if item_pattern.match(path_segments[i]): for j in range(0, len(record_endpoint)): if len(path_segments) == 2: record_endpoints.append(record_endpoint[j]) else: stop_chain = False for x in range(0, i): if item_pattern.match(path_segments[x]): stop_chain = True if not stop_chain: shortened_segments = [] for z in range(i + 1, len(path_segments)): shortened_segments.append(path_segments[z]) _walk_int(shortened_segments, record_endpoint[j]) else: stop_chain = False for y in range(0, i): if item_pattern.match(path_segments[y]): stop_chain = True if not stop_chain: if len(path_segments) == i + 1: record_endpoints.append(record_endpoint[path_segments[i]]) else: record_endpoint = record_endpoint[path_segments[i]] # conduct recursive walk _walk_int(path_segments, record_dict) return record_endpoints
python
{ "resource": "" }
q45641
jsonModel.validate
train
def validate(self, input_data, path_to_root='', object_title=''): ''' a core method for validating input against the model input_data is only returned if all data is valid :param input_data: list, dict, string, number, or boolean to validate :param path_to_root: [optional] string with dot-path of model component :param object_title: [optional] string with name of input to validate :return: input_data (or InputValidationError) ''' __name__ = '%s.validate' % self.__class__.__name__ _path_arg = '%s(path_to_root="...")' % __name__ _title_arg = '%s(object_title="...")' % __name__ # validate input copy_path = path_to_root if path_to_root: if not isinstance(path_to_root, str): raise ModelValidationError('%s must be a string.' % _path_arg) else: if path_to_root[0] != '.': copy_path = '.%s' % path_to_root if not copy_path in self.keyMap.keys(): raise ModelValidationError('%s does not exist in components %s.' % (_path_arg.replace('...', path_to_root), self.keyMap.keys())) else: copy_path = '.' if object_title: if not isinstance(object_title, str): raise ModelValidationError('%s must be a string' % _title_arg) # construct generic error dictionary error_dict = { 'object_title': object_title, 'model_schema': self.schema, 'input_criteria': self.keyMap[copy_path], 'failed_test': 'value_datatype', 'input_path': copy_path, 'error_value': input_data, 'error_code': 4001 } # determine value type of input data try: input_index = self._datatype_classes.index(input_data.__class__) except: error_dict['error_value'] = input_data.__class__.__name__ raise InputValidationError(error_dict) input_type = self._datatype_names[input_index] # validate input data type if input_type != self.keyMap[copy_path]['value_datatype']: raise InputValidationError(error_dict) # run helper method appropriate to data type if input_type == 'boolean': input_data = self._validate_boolean(input_data, copy_path, object_title) elif input_type == 'number': input_data = self._validate_number(input_data, copy_path, object_title) elif input_type == 'string': input_data = self._validate_string(input_data, copy_path, object_title) elif input_type == 'list': schema_list = self._reconstruct(copy_path) input_data = self._validate_list(input_data, schema_list, copy_path, object_title) elif input_type == 'map': schema_dict = self._reconstruct(copy_path) input_data = self._validate_dict(input_data, schema_dict, copy_path, object_title) return input_data
python
{ "resource": "" }
q45642
jsonModel.ingest
train
def ingest(self, **kwargs): ''' a core method to ingest and validate arbitrary keyword data **NOTE: data is always returned with this method** for each key in the model, a value is returned according to the following priority: 1. value in kwargs if field passes validation test 2. default value declared for the key in the model 3. empty value appropriate to datatype of key in the model **NOTE: as long as a default value is provided for each key- value, returned data will be model valid **NOTE: if 'extra_fields' is True for a dictionary, the key- value pair of all fields in kwargs which are not declared in the model will also be added to the corresponding dictionary data **NOTE: if 'max_size' is declared for a list, method will stop adding input to the list once it reaches max size :param kwargs: key, value pairs :return: dictionary with keys and value ''' __name__ = '%s.ingest' % self.__class__.__name__ schema_dict = self.schema path_to_root = '.' valid_data = self._ingest_dict(kwargs, schema_dict, path_to_root) return valid_data
python
{ "resource": "" }
q45643
jsonModel.query
train
def query(self, query_criteria, valid_record=None): ''' a core method for querying model valid data with criteria **NOTE: input is only returned if all fields & qualifiers are valid for model :param query_criteria: dictionary with model field names and query qualifiers :param valid_record: dictionary with model valid record :return: boolean (or QueryValidationError) an example of how to construct the query_criteria argument: query_criteria = { '.path.to.number': { 'min_value': 4.5 }, '.path.to.string': { 'must_contain': [ '\\regex' ] } } **NOTE: for a full list of operators for query_criteria based upon field datatype, see either the query-rules.json file or REFERENCE file ''' __name__ = '%s.query' % self.__class__.__name__ _query_arg = '%s(query_criteria={...})' % __name__ _record_arg = '%s(valid_record={...})' % __name__ # validate input if not isinstance(query_criteria, dict): raise ModelValidationError('%s must be a dictionary.' % _query_arg) # convert javascript dot_path to class dot_path criteria_copy = {} equal_fields = [] dot_fields = [] for key, value in query_criteria.items(): copy_key = key if not key: copy_key = '.' else: if key[0] != '.': copy_key = '.%s' % key dot_fields.append(copy_key) criteria_copy[copy_key] = value if value.__class__ in self._datatype_classes[0:4]: criteria_copy[copy_key] = { 'equal_to': value } equal_fields.append(copy_key) # validate query criteria against query rules query_kwargs = { 'fields_dict': criteria_copy, 'fields_rules': self.queryRules, 'declared_value': False } try: self._validate_fields(**query_kwargs) except ModelValidationError as err: message = err.error['message'] for field in equal_fields: equal_error = 'field %s qualifier equal_to' % field if message.find(equal_error) > -1: message = message.replace(equal_error, 'field %s' % field) break field_pattern = re.compile('ield\s(\..*?)\s') field_name = field_pattern.findall(message) if field_name: if field_name[0] in dot_fields: def _replace_field(x): return 'ield %s ' % x.group(1)[1:] message = field_pattern.sub(_replace_field, message) raise QueryValidationError(message) # query test record if valid_record: if not isinstance(valid_record, dict): raise ModelValidationError('%s must be a dictionary.' % _record_arg) for key, value in criteria_copy.items(): eval_outcome = self._evaluate_field(valid_record, key, value) if not eval_outcome: return False return True
python
{ "resource": "" }
q45644
url_view
train
def url_view(url_pattern, name=None, priority=None): """ Decorator for registering functional views. Meta decorator syntax has to be used in order to accept arguments. This decorator does not really do anything that magical: This: >>> from urljects import U, url_view >>> @url_view(U / 'my_view') ... def my_view(request) ... pass is equivalent to this: >>> def my_view(request) ... pass >>> my_view.urljects_view = True >>> my_view.url = U / 'my_view' >>> my_view.url_name = 'my_view' Those view are then supposed to be used with ``view_include`` which will register all views that have ``urljects_view`` set to ``True``. :param url_pattern: regex or URLPattern or anything passable to url() :param name: name of the view, __name__ will be used otherwise. :param priority: priority of the view, the lower the better """ def meta_wrapper(func): @functools.wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) wrapper.urljects_view = True wrapper.url = url_pattern wrapper.url_name = name or func.__name__ wrapper.url_priority = priority return wrapper return meta_wrapper
python
{ "resource": "" }
q45645
url
train
def url(url_pattern, view, kwargs=None, name=None): """ This is replacement for ``django.conf.urls.url`` function. This url auto calls ``as_view`` method for Class based views and resolves URLPattern objects. If ``name`` is not specified it will try to guess it. :param url_pattern: string with regular expression or URLPattern :param view: function/string/class of the view :param kwargs: kwargs that are to be passed to view :param name: name of the view, if empty it will be guessed """ # Special handling for included view if isinstance(url_pattern, URLPattern) and isinstance(view, tuple): url_pattern = url_pattern.for_include() if name is None: name = resolve_name(view) if callable(view) and hasattr(view, 'as_view') and callable(view.as_view): view = view.as_view() return urls.url( regex=url_pattern, view=view, kwargs=kwargs, name=name)
python
{ "resource": "" }
q45646
view_include
train
def view_include(view_module, namespace=None, app_name=None): """ Includes view in the url, works similar to django include function. Auto imports all class based views that are subclass of ``URLView`` and all functional views that have been decorated with ``url_view``. :param view_module: object of the module or string with importable path :param namespace: name of the namespaces, it will be guessed otherwise :param app_name: application name :return: result of urls.include """ # since Django 1.8 patterns() are deprecated, list should be used instead # {priority:[views,]} view_dict = defaultdict(list) if isinstance(view_module, six.string_types): view_module = importlib.import_module(view_module) # pylint:disable=unused-variable for member_name, member in inspect.getmembers(view_module): is_class_view = inspect.isclass(member) and issubclass(member, URLView) is_func_view = (inspect.isfunction(member) and hasattr(member, 'urljects_view') and member.urljects_view) if (is_class_view and member is not URLView) or is_func_view: view_dict[member.url_priority].append( url(member.url, member, name=member.url_name)) view_patterns = list(*[ view_dict[priority] for priority in sorted(view_dict) ]) return urls.include( arg=view_patterns, namespace=namespace, app_name=app_name)
python
{ "resource": "" }
q45647
copy_file
train
def copy_file(file_name): """ Copy a given file from the cache storage """ remote_file_path = join(join(expanduser('~'), OCTOGRID_DIRECTORY), file_name) current_path = join(getcwd(), file_name) try: copyfile(remote_file_path, current_path) except Exception, e: raise e
python
{ "resource": "" }
q45648
is_cached
train
def is_cached(file_name): """ Check if a given file is available in the cache or not """ gml_file_path = join(join(expanduser('~'), OCTOGRID_DIRECTORY), file_name) return isfile(gml_file_path)
python
{ "resource": "" }
q45649
UserManager.register
train
def register(self, app, *args, **kwargs): " Activate loginmanager and principal. " if not self._login_manager or self.app != app: self._login_manager = LoginManager() self._login_manager.user_callback = self.user_loader self._login_manager.setup_app(app) self._login_manager.login_view = 'urls.index' self._login_manager.login_message = u'You need to be signed in for this page.' self.app = app if not self._principal: self._principal = Principal(app) identity_loaded.connect(self.identity_loaded) super(UserManager, self).register(app, *args, **kwargs)
python
{ "resource": "" }
q45650
process_star
train
def process_star(filename, output, *, extension, star_name, period, shift, parameters, period_label, shift_label, **kwargs): """Processes a star's lightcurve, prints its coefficients, and saves its plotted lightcurve to a file. Returns the result of get_lightcurve. """ if star_name is None: basename = path.basename(filename) if basename.endswith(extension): star_name = basename[:-len(extension)] else: # file has wrong extension return if parameters is not None: if period is None: try: period = parameters[period_label][star_name] except KeyError: pass if shift is None: try: shift = parameters.loc[shift_label][star_name] except KeyError: pass result = get_lightcurve_from_file(filename, name=star_name, period=period, shift=shift, **kwargs) if result is None: return if output is not None: plot_lightcurve(star_name, result['lightcurve'], result['period'], result['phased_data'], output=output, **kwargs) return result
python
{ "resource": "" }
q45651
Base.check
train
def check(self, check_url=None): """ Checks whether a server is running. :param str check_url: URL where to check whether the server is running. Default is ``"http://{self.host}:{self.port}"``. """ if check_url is not None: self.check_url = self._normalize_check_url(check_url) response = None sleeped = 0.0 t = datetime.now() while not response: try: response = requests.get(self.check_url, verify=False) except requests.exceptions.ConnectionError: if sleeped > self.timeout: self._kill() raise LiveAndLetDieError( '{0} server {1} didn\'t start in specified timeout {2} ' 'seconds!\ncommand: {3}'.format( self.__class__.__name__, self.check_url, self.timeout, ' '.join(self.create_command()) ) ) time.sleep(1) sleeped = _get_total_seconds(datetime.now() - t) return _get_total_seconds(datetime.now() - t)
python
{ "resource": "" }
q45652
Base.live
train
def live(self, kill_port=False, check_url=None): """ Starts a live server in a separate process and checks whether it is running. :param bool kill_port: If ``True``, processes running on the same port as ``self.port`` will be killed. :param str check_url: URL where to check whether the server is running. Default is ``"http://{self.host}:{self.port}"``. """ pid = port_in_use(self.port, kill_port) if pid: raise LiveAndLetDieError( 'Port {0} is already being used by process {1}!' .format(self.port, pid) ) host = str(self.host) if re.match(_VALID_HOST_PATTERN, host): with open(os.devnull, "w") as devnull: if self.suppress_output: self.process = subprocess.Popen(self.create_command(), stderr=devnull, stdout=devnull, preexec_fn=os.setsid) else: self.process = subprocess.Popen(self.create_command(), preexec_fn=os.setsid) _log(self.logging, 'Starting process PID: {0}' .format(self.process.pid)) duration = self.check(check_url) _log(self.logging, 'Live server started in {0} seconds. PID: {1}' .format(duration, self.process.pid)) return self.process else: raise LiveAndLetDieError('{0} is not a valid host!'.format(host))
python
{ "resource": "" }
q45653
Base.die
train
def die(self): """Stops the server if it is running.""" if self.process: _log(self.logging, 'Stopping {0} server with PID: {1} running at {2}.' .format(self.__class__.__name__, self.process.pid, self.check_url)) self._kill()
python
{ "resource": "" }
q45654
url
train
def url(route, resource_id=None, pagination=None, **parameters): """ Generates an absolute URL to an API resource. :param route: One of the routes available (see the header of this file) :type route: string :param resource_id: The resource ID you want. If None, it will point to the endpoint. :type resource_id: string|None :param pagination: parameters for pagination :type pagination: dict|None :param parameters: additional parameters required by the route :return the absolute route to the API :rtype string """ route = route.format(**parameters) resource_id_url = '/' + str(resource_id) if resource_id else '' query_parameters = '' if pagination: query_parameters += urlencode(pagination) if query_parameters: query_parameters = '?' + query_parameters return _base_url() + route + resource_id_url + query_parameters
python
{ "resource": "" }
q45655
is_nested
train
def is_nested(values): '''Check if values is composed only by iterable elements.''' return (all(isinstance(item, Iterable) for item in values) if isinstance(values, Iterable) else False)
python
{ "resource": "" }
q45656
HTMLElement.get_html_content
train
def get_html_content(self): """ Parses the element and subelements and parses any HTML enabled text to its original HTML form for rendering. :returns: Parsed HTML enabled text content. :rtype: str """ # Extract full element node content (including subelements) html_content = '' if hasattr(self, 'xml_element'): xml = self.xml_element content_list = ["" if xml.text is None else xml.text] def to_string(xml): if isinstance(xml, _Comment): return str(xml) else: return ElementTree.tostring(xml).decode('utf-8') content_list += [to_string(e) for e in xml.getchildren()] full_xml_content = "".join(content_list) # Parse tags to generate HTML valid content first_regex = r'html:' second_regex = r' xmlns:html=(["\'])(?:(?=(\\?))\2.)*?\1' html_content = re.sub(first_regex, '', re.sub(second_regex, '', full_xml_content)) return html_content
python
{ "resource": "" }
q45657
HTMLElement.convert_html_to_xml
train
def convert_html_to_xml(self): """ Parses the HTML parsed texts and converts its tags to XML valid tags. :returns: HTML enabled text in a XML valid format. :rtype: str """ if hasattr(self, 'content') and self.content != '': regex = r'<(?!/)(?!!)' xml_content = re.sub(regex, '<xhtml:', self.content) return xml_content else: return ''
python
{ "resource": "" }
q45658
KubernetesProvider._create_deployment_object
train
def _create_deployment_object(self, job_name, job_image, deployment_name, port=80, replicas=1, cmd_string=None, engine_json_file='~/.ipython/profile_default/security/ipcontroller-engine.json', engine_dir='.'): """ Create a kubernetes deployment for the job. Args: - job_name (string) : Name of the job and deployment - job_image (string) : Docker image to launch KWargs: - port (integer) : Container port - replicas : Number of replica containers to maintain Returns: - True: The deployment object to launch """ # sorry, quick hack that doesn't pass this stuff through to test it works. # TODO it also doesn't only add what is set :( security_context = None if 'security' in self.config['execution']: security_context = client.V1SecurityContext(run_as_group=self.group_id, run_as_user=self.user_id, run_as_non_root=self.run_as_non_root) # self.user_id = None # self.group_id = None # self.run_as_non_root = None # Create the enviornment variables and command to initiate IPP environment_vars = client.V1EnvVar(name="TEST", value="SOME DATA") launch_args = ["-c", "{0}; /app/deploy.sh;".format(cmd_string)] print(launch_args) # Configureate Pod template container container = None if security_context: container = client.V1Container( name=job_name, image=job_image, ports=[client.V1ContainerPort(container_port=port)], command=['/bin/bash'], args=launch_args, env=[environment_vars], security_context=security_context) else: container = client.V1Container( name=job_name, image=job_image, ports=[client.V1ContainerPort(container_port=port)], command=['/bin/bash'], args=launch_args, env=[environment_vars]) # Create a secret to enable pulling images from secure repositories secret = None if self.secret: secret = client.V1LocalObjectReference(name=self.secret) # Create and configurate a spec section template = client.V1PodTemplateSpec( metadata=client.V1ObjectMeta(labels={"app": job_name}), spec=client.V1PodSpec(containers=[container], image_pull_secrets=[secret])) # Create the specification of deployment spec = client.ExtensionsV1beta1DeploymentSpec(replicas=replicas, template=template) # Instantiate the deployment object deployment = client.ExtensionsV1beta1Deployment( api_version="extensions/v1beta1", kind="Deployment", metadata=client.V1ObjectMeta(name=deployment_name), spec=spec) return deployment
python
{ "resource": "" }
q45659
gen_sites
train
def gen_sites(path): " Seek sites by path. " for root, _, _ in walklevel(path, 2): try: yield Site(root) except AssertionError: continue
python
{ "resource": "" }
q45660
Site.get_info
train
def get_info(self, full=False): " Return printable information about current site. " if full: context = self.as_dict() return "".join("{0:<25} = {1}\n".format( key, context[key]) for key in sorted(context.iterkeys())) return "%s [%s]" % (self.get_name(), self.template)
python
{ "resource": "" }
q45661
Site.run_check
train
def run_check(self, template_name=None, service_dir=None): " Run checking scripts. " print_header('Check requirements', sep='-') map(lambda cmd: call("bash %s" % cmd), self._gen_scripts( 'check', template_name=template_name, service_dir=service_dir)) return True
python
{ "resource": "" }
q45662
Site.run_update
train
def run_update(self, template_name=None, service_dir=None): " Run update scripts. " LOGGER.info('Site Update start.') print_header('Update %s' % self.get_name()) map(call, self._gen_scripts( 'update', template_name=template_name, service_dir=service_dir)) LOGGER.info('Site Update done.') return True
python
{ "resource": "" }
q45663
Site.paste_template
train
def paste_template(self, template_name, template=None, deploy_dir=None): " Paste template. " LOGGER.debug("Paste template: %s" % template_name) deploy_dir = deploy_dir or self.deploy_dir template = template or self._get_template_path(template_name) self.read([op.join(template, settings.CFGNAME)], extending=True) for fname in gen_template_files(template): curdir = op.join(deploy_dir, op.dirname(fname)) if not op.exists(curdir): makedirs(curdir) source = op.join(template, fname) target = op.join(deploy_dir, fname) copy2(source, target) name, ext = op.splitext(fname) if ext == '.tmpl': t = Template.from_filename(target, namespace=self.as_dict()) with open(op.join(deploy_dir, name), 'w') as f: f.write(t.substitute()) remove(target) return deploy_dir
python
{ "resource": "" }
q45664
Config.allLobbySlots
train
def allLobbySlots(self): """the current configuration of the lobby's players, defined before the match starts""" if self.debug: p = ["Lobby Configuration detail:"] + \ [" %s:%s%s"%(p, " "*(12-len(p.type)), p.name)] #[" agent: %s"%p for p in self.agents] + \ #[" computer: %s, %s"%(r,d) for r,d in self.computers] if self.observers: # must separate condition because numObs is a number, not an iterator p += [" observers: %d"%self.observers] print(os.linesep.join(p)) return (self.agents, self.computers, self.observers)
python
{ "resource": "" }
q45665
Config.connection
train
def connection(self): """identify the remote connection parameters""" self.getPorts() # acquire if necessary self.getIPaddresses() # acquire if necessary return (self.ipAddress, self.ports)
python
{ "resource": "" }
q45666
Config.execPath
train
def execPath(self): """the executable application's path""" vers = self.version.label if self.version else None # executables in Versions folder are stored by baseVersion (modified by game data patches) return self.installedApp.exec_path(vers)
python
{ "resource": "" }
q45667
Config.installedApp
train
def installedApp(self): """identify the propery application to launch, given the configuration""" try: return self._installedApp except: # raises if not yet defined self._installedApp = runConfigs.get() # application/install/platform management return self._installedApp
python
{ "resource": "" }
q45668
Config.observers
train
def observers(self): """the players who are actually observers""" ret = [] for player in self.players: try: if player.observer: ret.append(player) except: pass # ignore PlayerRecords which don't have an observer attribute return ret
python
{ "resource": "" }
q45669
Config.inflate
train
def inflate(self, newData={}): """ensure all object attribute values are objects""" from sc2maptool.functions import selectMap from sc2maptool.mapRecord import MapRecord self.__dict__.update(newData) #if not isinstance(self.state, types.GameStates): self.state = types.GameStates(self.state) if self.expo and not isinstance(self.expo, types.ExpansionNames): self.expo = types.ExpansionNames(self.expo) if self.version and not isinstance(self.version, versions.Version): self.version = versions.Version(self.version) if self.ladder and not isinstance(self.ladder, Ladder): self.ladder = Ladder(self.ladder) for i,player in enumerate(self.players): # iterate over all players if isinstance(player, str): self.players[i] = getPlayer(player) elif not isinstance(player, PlayerRecord): self.players[i] = buildPlayer(*player) if self.mode and not isinstance(self.mode, types.GameModes): self.mode = types.GameModes(self.mode) if self.themap and not isinstance(self.themap, MapRecord): self.themap = selectMap(name=self.themap)
python
{ "resource": "" }
q45670
Config.load
train
def load(self, cfgFile=None, timeout=None): """expect that the data file has already been established""" #if cfgFile != None: self.cfgFile = cfgFile # if it's specified, use it if not cfgFile: cfgs = activeConfigs() if len(cfgs) > 1: raise Exception("found too many configurations (%s); not clear which to load: %s"%(len(cfgs), cfgs)) elif len(cfgs) < 1: if timeout: # wait for a configuration file to appear to be loaded startWait = time.time() timeReported = 0 while not cfgs: timeWaited = time.time() - startWait if timeWaited > timeout: raise c.TimeoutExceeded("could not join game after %s seconds"%(timeout)) try: cfgs = activeConfigs() except: if self.debug and timeWaited - timeReported >= 1: timeReported += 1 print("second(s) waited for game to appear: %d"%(timeReported)) else: raise Exception("must have a saved configuration to load or allow loading via timeout setting") cfgFile = cfgs.pop() try: with open(cfgFile, "rb") as f: data = f.read() # bytes => str except TypeError as e: print("ERROR %s: %s %s"%(e, cfgFile, type(cfgFile))) raise self.loadJson(data) # str => dict if self.debug: print("configuration loaded: %s"%(self.name)) self.display()
python
{ "resource": "" }
q45671
Config.loadJson
train
def loadJson(self, data): """convert the json data into updating this obj's attrs""" if not isinstance(data, dict): data = json.loads(data) self.__dict__.update(data) self.inflate() # restore objects from str values #if self.ports: self._gotPorts = True return self
python
{ "resource": "" }
q45672
Config.toJson
train
def toJson(self, data=None, pretty=False): """convert the flattened dictionary into json""" if data==None: data = self.attrs data = self.flatten(data) # don't send objects as str in json #if pretty: ret = json.dumps(data, indent=4, sort_keys=True) #self.inflate() # restore objects from json str data return ret
python
{ "resource": "" }
q45673
Config.getVersion
train
def getVersion(self): """the executable application's version""" if isinstance(self.version, versions.Version): return self.version if self.version: # verify specified version exists version = versions.Version(self.version) # create this object to allow self._version_ to be specified in multiple different ways by the user if version.baseVersion not in self.installedApp.versionMap(): # verify that the selected version has an executable raise runConfigs.lib.SC2LaunchError( "specified game version %s executable is not available.%s available: %s"%( \ version, os.linesep, " ".join(self.installedApp.listVersions()))) self.version = version else: # get most recent executable's version path = self.installedApp.exec_path() vResult = self.installedApp.mostRecentVersion self.version = versions.Version(vResult) if self.debug: print(os.linesep.join([ "Game configuration detail:", " platform: %s"%(self.os), " app: %s"%(self.execPath), " version: %s"%(self.version)])) return self.version
python
{ "resource": "" }
q45674
Config.getIPaddresses
train
def getIPaddresses(self): """identify the IP addresses where this process client will launch the SC2 client""" if not self.ipAddress: self.ipAddress = ipAddresses.getAll() # update with IP address return self.ipAddress
python
{ "resource": "" }
q45675
Config.getPorts
train
def getPorts(self): """acquire ports to be used by the SC2 client launched by this process""" if self.ports: # no need to get ports if ports are al return self.ports if not self._gotPorts: self.ports = [ portpicker.pick_unused_port(), # game_port portpicker.pick_unused_port(), # base_port portpicker.pick_unused_port(), # shared_port / init port ] self._gotPorts = True return self.ports
python
{ "resource": "" }
q45676
Config.requestCreateDetails
train
def requestCreateDetails(self): """add configuration to the SC2 protocol create request""" createReq = sc_pb.RequestCreateGame( # used to advance to Status.initGame state, when hosting realtime = self.realtime, disable_fog = self.fogDisabled, random_seed = int(time.time()), # a game is created using the current second timestamp as the seed local_map = sc_pb.LocalMap(map_path=self.mapLocalPath, map_data=self.mapData)) for player in self.players: reqPlayer = createReq.player_setup.add() # add new player; get link to settings playerObj = PlayerPreGame(player) if playerObj.isComputer: reqPlayer.difficulty = playerObj.difficulty.gameValue() reqPlayer.type = c.types.PlayerControls(playerObj.control).gameValue() reqPlayer.race = playerObj.selectedRace.gameValue() return createReq # SC2APIProtocol.RequestCreateGame
python
{ "resource": "" }
q45677
Config.returnPorts
train
def returnPorts(self): """deallocate specific ports on the current machine""" if self._gotPorts: #print("deleting ports >%s<"%(self.ports)) map(portpicker.return_port, self.ports) self._gotPorts = False self.ports = []
python
{ "resource": "" }
q45678
Config.save
train
def save(self, filename=None, debug=False): """save a data file such that all processes know the game that is running""" if not filename: filename = self.name with open(filename, "w") as f: # save config data file f.write(self.toJson(self.attrs)) if self.debug or debug: print("saved configuration %s"%(self.name)) for k,v in sorted(iteritems(self.attrs)): print("%15s : %s"%(k,v))
python
{ "resource": "" }
q45679
Config.updateIDs
train
def updateIDs(self, ginfo, tag=None, debug=False): """ensure all player's playerIDs are correct given game's info""" # SC2APIProtocol.ResponseGameInfo attributes: # map_name # mod_names # local_map_path # player_info # start_raw # options thisPlayer = self.whoAmI() for pInfo in ginfo.player_info: # parse ResponseGameInfo.player_info to validate player information (SC2APIProtocol.PlayerInfo) against the specified configuration pID = pInfo.player_id if pID == thisPlayer.playerID: continue # already updated pCon = c.types.PlayerControls(pInfo.type) rReq = c.types.SelectRaces(pInfo.race_requested) for p in self.players: # ensure joined player is identified appropriately if p.playerID and p.playerID != pID: continue # if this non-matching player already has a set playerID, it can't match if p.control == pCon and p.selectedRace == rReq: # matched player p.playerID = pID # updated player IDs should be saved into the game configuration if debug: print("[%s] match contains %s."%(tag, p)) pID = 0 # declare that the player has been identified break if pID: raise c.UnknownPlayer("could not match %s %s %s to any " "existing player of %s"%(pID, pCon, rReq, self.players))
python
{ "resource": "" }
q45680
Config.whoAmI
train
def whoAmI(self): """return the player object that owns this configuration""" self.inflate() # ensure self.players contains player objects if self.thePlayer: for p in self.players: if p.name != self.thePlayer: continue return p elif len(self.players) == 1: ret = self.players[0] self.thePlayer = ret.name # remember this for the future in case more players are added return ret raise Exception("could not identify which player this is given %s (%s)"%(self.players, self.thePlayer))
python
{ "resource": "" }
q45681
Session.get_urls
train
def get_urls(self, order="total_clicks desc", offset=None, count=None): """Returns a list of URLs you've included in messages. List is sorted by ``total_clicks``, starting at an optional integer ``offset``, and optionally limited to the first ``count`` items. """ req_data = [ None, order, fmt_paging(offset, count) ] return self.request("query:Message_Url", req_data)
python
{ "resource": "" }
q45682
Session.get_message_urls
train
def get_message_urls(self, message_id, order="total_clicks desc"): """Returns a list of URLs you've included in a specific message. List is sorted by ``total_clicks``, starting at an optional integer ``offset``, and optionally limited to the first ``count`` items. """ req_data = [ { "message_id": str(message_id) }, order, None ] return self.request("query:Message_Url", req_data)
python
{ "resource": "" }
q45683
LocalProvider._write_submit_script
train
def _write_submit_script(self, script_string, script_filename): ''' Load the template string with config values and write the generated submit script to a submit script file. Args: - template_string (string) : The template string to be used for the writing submit script - script_filename (string) : Name of the submit script Returns: - True: on success Raises: SchedulerMissingArgs : If template is missing args ScriptPathError : Unable to write submit script out ''' try: with open(script_filename, 'w') as f: f.write(script_string) except KeyError as e: logger.error("Missing keys for submit script : %s", e) raise (ep_error.SchedulerMissingArgs(e.args, self.label)) except IOError as e: logger.error("Failed writing to submit script: %s", script_filename) raise (ep_error.ScriptPathError(script_filename, e)) return True
python
{ "resource": "" }
q45684
HgRepo.find_branches
train
def find_branches(self): """ Find the branches in the Mercurial repository. :returns: A generator of :class:`.Revision` objects. .. note:: Closed branches are not included. """ listing = self.context.capture('hg', 'branches') for line in listing.splitlines(): tokens = line.split() if len(tokens) >= 2 and ':' in tokens[1]: revision_number, revision_id = tokens[1].split(':') yield Revision( branch=tokens[0], repository=self, revision_id=revision_id, revision_number=int(revision_number), )
python
{ "resource": "" }
q45685
HgRepo.get_checkout_command
train
def get_checkout_command(self, revision, clean=False): """Get the command to update the working tree of the local repository.""" command = ['hg', 'update'] if clean: command.append('--clean') command.append('--rev=%s' % revision) return command
python
{ "resource": "" }
q45686
HgRepo.get_delete_branch_command
train
def get_delete_branch_command(self, branch_name, message, author): """Get the command to delete or close a branch in the local repository.""" tokens = ['hg update --rev=%s && hg commit' % quote(branch_name)] if author: tokens.append('--user=%s' % quote(author.combined)) tokens.append('--message=%s' % quote(message)) tokens.append('--close-branch') return [' '.join(tokens)]
python
{ "resource": "" }
q45687
Russound.set_source
train
def set_source(self, controller, zone, source): """ Set source for a zone - 0 based value for source """ _LOGGER.info("Begin - controller= %s, zone= %s change source to %s.", controller, zone, source) send_msg = self.create_send_message("F0 @cc 00 7F 00 @zz @kk 05 02 00 00 00 F1 3E 00 00 00 @pr 00 01", controller, zone, source) try: self.lock.acquire() _LOGGER.debug('Zone %s - acquired lock for ', zone) self.send_data(send_msg) _LOGGER.debug("Zone %s - sent message %s", zone, send_msg) # Clear response buffer in case there is any response data(ensures correct results on future reads) self.get_response_message() finally: self.lock.release() _LOGGER.debug("Zone %s - released lock for ", zone) _LOGGER.debug("End - controller= %s, zone= %s source set to %s.\n", controller, zone, source)
python
{ "resource": "" }
q45688
Russound.get_volume
train
def get_volume(self, controller, zone): """ Gets the volume level which needs to be doubled to get it to the range of 0..100 - it is located on a 2 byte offset """ volume_level = self.get_zone_info(controller, zone, 2) if volume_level is not None: volume_level *= 2 return volume_level
python
{ "resource": "" }
q45689
Russound.create_send_message
train
def create_send_message(self, string_message, controller, zone=None, parameter=None): """ Creates a message from a string, substituting the necessary parameters, that is ready to send to the socket """ cc = hex(int(controller) - 1).replace('0x', '') # RNET requires controller value to be zero based if zone is not None: zz = hex(int(zone) - 1).replace('0x', '') # RNET requires zone value to be zero based else: zz = '' if parameter is not None: pr = hex(int(parameter)).replace('0x', '') else: pr = '' string_message = string_message.replace('@cc', cc) # Replace controller parameter string_message = string_message.replace('@zz', zz) # Replace zone parameter string_message = string_message.replace('@kk', KEYPAD_CODE) # Replace keypad parameter string_message = string_message.replace('@pr', pr) # Replace specific parameter to message # Split message into an array for each "byte" and add the checksum and end of message bytes send_msg = string_message.split() send_msg = self.calc_checksum(send_msg) return send_msg
python
{ "resource": "" }
q45690
Russound.create_response_signature
train
def create_response_signature(self, string_message, zone): """ Basic helper function to keep code clean for defining a response message signature """ zz = '' if zone is not None: zz = hex(int(zone)-1).replace('0x', '') # RNET requires zone value to be zero based string_message = string_message.replace('@zz', zz) # Replace zone parameter return string_message
python
{ "resource": "" }
q45691
Russound.send_data
train
def send_data(self, data, delay=COMMAND_DELAY): """ Send data to connected gateway """ time_since_last_send = time.time() - self._last_send delay = max(0, delay - time_since_last_send) time.sleep(delay) # Ensure minim recommended delay since last send for item in data: data = bytes.fromhex(str(item.zfill(2))) try: self.sock.send(data) except ConnectionResetError as msg: _LOGGER.error("Error trying to connect to Russound controller. " "Check that no other device or system is using the port that " "you are trying to connect to. Try resetting the bridge you are using to connect.") _LOGGER.error(msg) self._last_send = time.time()
python
{ "resource": "" }
q45692
Russound.find_signature
train
def find_signature(self, data_stream, msg_signature): """ Takes the stream of bytes received and looks for a message that matches the signature of the expected response """ signature_match_index = None # The message that will be returned if it matches the signature msg_signature = msg_signature.split() # Split into list # convert to bytearray in order to be able to compare with the messages list which contains bytearrays msg_signature = bytearray(int(x, 16) for x in msg_signature) # loop through each message returned from Russound index_of_last_f7 = None for i in range(len(data_stream)): if data_stream[i] == 247: index_of_last_f7 = i # the below line checks for the matching signature, ensuring ALL bytes of the response have been received if (data_stream[i:i + len(msg_signature)] == msg_signature) and (len(data_stream) - i >= 24): signature_match_index = i break if signature_match_index is None: # Scrap bytes up to end of msg (to avoid searching these again) data_stream = data_stream[index_of_last_f7:len(data_stream)] matching_message = None else: matching_message = data_stream[signature_match_index:len(data_stream)] _LOGGER.debug("Message signature found at location: %s", signature_match_index) return matching_message, data_stream
python
{ "resource": "" }
q45693
Russound.calc_checksum
train
def calc_checksum(self, data): """ Calculate the checksum we need """ output = 0 length = len(data) for value in data: output += int(value, 16) output += length checksum = hex(output & int('0x007F', 16)).lstrip("0x") data.append(checksum) data.append('F7') return data
python
{ "resource": "" }
q45694
crab_factory
train
def crab_factory(**kwargs): ''' Factory that generates a CRAB client. A few parameters will be handled by the factory, other parameters will be passed on to the client. :param wsdl: `Optional.` Allows overriding the default CRAB wsdl url. :param proxy: `Optional.` A dictionary of proxy information that is passed to the underlying :class:`suds.client.Client` :rtype: :class:`suds.client.Client` ''' if 'wsdl' in kwargs: wsdl = kwargs['wsdl'] del kwargs['wsdl'] else: wsdl = "http://crab.agiv.be/wscrab/wscrab.svc?wsdl" log.info('Creating CRAB client with wsdl: %s', wsdl) c = Client( wsdl, **kwargs ) return c
python
{ "resource": "" }
q45695
BzrRepo.update_context
train
def update_context(self): """ Make sure Bazaar respects the configured author. This method first calls :func:`.Repository.update_context()` and then it sets the ``$BZR_EMAIL`` environment variable based on the value of :attr:`~Repository.author` (but only if :attr:`~Repository.author` was set by the caller). This is a workaround for a weird behavior of Bazaar that I've observed when running under Python 2.6: The ``bzr commit --author`` command line option is documented but it doesn't prevent Bazaar from nevertheless reporting the following error:: bzr: ERROR: Unable to determine your name. Please, set your name with the 'whoami' command. E.g. bzr whoami "Your Name <name@example.com>" """ # Call our superclass. super(BzrRepo, self).update_context() # Try to ensure that $BZR_EMAIL is set (see above for the reason) # but only if the `author' property was set by the caller (more # specifically there's no point in setting $BZR_EMAIL to the # output of `bzr whoami'). if self.__dict__.get('author'): environment = self.context.options.setdefault('environment', {}) environment.setdefault('BZR_EMAIL', self.author.combined)
python
{ "resource": "" }
q45696
LocalBase.exec_path
train
def exec_path(self, baseVersion=None): """Get the exec_path for this platform. Possibly find the latest build.""" if not os.path.isdir(self.data_dir): raise sc_process.SC2LaunchError("Install Starcraft II at %s or set the SC2PATH environment variable"%(self.data_dir)) if baseVersion==None: # then select most recent version's baseVersion mostRecent = versions.handle.mostRecent if mostRecent: return mostRecent["base-version"] raise sc_process.SC2LaunchError( "When requesting a versioned executable path without specifying base-version, expected " "to find StarCraft II versions installed at %s."%(self.versionsDir)) elif isinstance(baseVersion, versions.Version): baseVersion = baseVersion.baseVersion elif str(baseVersion).count(".") > 0: baseVersion = versions.Version(baseVersion).baseVersion #else: # otherwise expect that the baseVersion specified is correct baseVersExec = os.path.join(self.versionsDir, "Base%s"%baseVersion, self._exec_name) if os.path.isfile(baseVersExec): return baseVersExec # if baseVersion in Versions subdir is valid, it is the correct executable raise sc_process.SC2LaunchError("Specified baseVersion %s does not exist at %s.%s available: %s"%(\ baseVersion, baseVersExec, os.linesep, " ".join( str(val) for val in sorted(self.versionMap().keys())) ))
python
{ "resource": "" }
q45697
LocalBase.start
train
def start(self, version=None, **kwargs):#game_version=None, data_version=None, **kwargs): """Launch the game process.""" if not version: version = self.mostRecentVersion pysc2Version = lib.Version( # convert to pysc2 Version version.version, version.baseVersion, version.dataHash, version.fixedHash) return sc_process.StarcraftProcess( self, exec_path=self.exec_path(version.baseVersion), version=pysc2Version, **kwargs)
python
{ "resource": "" }
q45698
generate_request_access_signature
train
def generate_request_access_signature(parameters, secret_key): """ Generate the parameter signature used during third party access requests """ # pull out the parameter keys keys = parameters.keys() # alphanumerically sort the keys in place keys.sort() # create an array of url encoded key:value pairs encoded_pairs = [urlencode({key: parameters[key]}) for key in keys] # create the serialized parameters in a single, URL style string serialized_parameters = '&'.join(encoded_pairs) # create the string with the secret key and the parameters which will be hashed string_to_hash = '%s:%s' % (secret_key, serialized_parameters) # return the hex digest of the hashed string return sha256(string_to_hash).hexdigest()
python
{ "resource": "" }
q45699
has_credentials_stored
train
def has_credentials_stored(): """ Return 'auth token' string, if the user credentials are already stored """ try: with open(credentials_file, 'r') as f: token = f.readline().strip() id = f.readline().strip() return token except Exception, e: return False
python
{ "resource": "" }