text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _insert_or_update(self, resourcetype, source, mode='insert', hhclass='Service'): """ Insert or update a record in the repository """
keywords = [] if self.filter is not None: catalog = Catalog.objects.get(id=int(self.filter.split()[-1])) try: if hhclass == 'Layer': # TODO: better way of figuring out duplicates match = Layer.objects.filter(name=source.name, title=source.title, abstract=source.abstract, is_monitored=False) matches = match.all() if matches: if mode == 'insert': raise RuntimeError('HHypermap error: Layer %d \'%s\' already exists' % ( matches[0].id, source.title)) elif mode == 'update': match.update( name=source.name, title=source.title, abstract=source.abstract, is_monitored=False, xml=source.xml, wkt_geometry=source.wkt_geometry, anytext=util.get_anytext([source.title, source.abstract, source.keywords_csv]) ) service = get_service(source.xml) res, keywords = create_layer_from_metadata_xml(resourcetype, source.xml, monitor=False, service=service, catalog=catalog) res.save() LOGGER.debug('Indexing layer with id %s on search engine' % res.uuid) index_layer(res.id, use_cache=True) else: if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': res = Endpoint(url=source, catalog=catalog) else: res = Service(type=HYPERMAP_SERVICE_TYPES[resourcetype], url=source, catalog=catalog) res.save() if keywords: for kw in keywords: res.keywords.add(kw) except Exception as err: raise RuntimeError('HHypermap error: %s' % err) # return a list of ids that were inserted or updated ids = [] if hhclass == 'Layer': ids.append({'identifier': res.uuid, 'title': res.title}) else: if resourcetype == 'http://www.opengis.net/cat/csw/2.0.2': for res in Endpoint.objects.filter(url=source).all(): ids.append({'identifier': res.uuid, 'title': res.url}) else: for res in Service.objects.filter(url=source).all(): ids.append({'identifier': res.uuid, 'title': res.title}) return ids
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete(self, constraint): """ Delete a record from the repository """
results = self._get_repo_filter(Service.objects).extra(where=[constraint['where']], params=constraint['values']).all() deleted = len(results) results.delete() return deleted
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check(func): """ Check the permissions, http method and login state. """
def iCheck(request, *args, **kwargs): if not request.method == "POST": return HttpResponseBadRequest("Must be POST request.") follow = func(request, *args, **kwargs) if request.is_ajax(): return HttpResponse('ok') try: if 'next' in request.GET: return HttpResponseRedirect(request.GET.get('next')) if 'next' in request.POST: return HttpResponseRedirect(request.POST.get('next')) return HttpResponseRedirect(follow.target.get_absolute_url()) except (AttributeError, TypeError): if 'HTTP_REFERER' in request.META: return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if follow: return HttpResponseServerError('"%s" object of type ``%s`` has no method ``get_absolute_url()``.' % ( unicode(follow.target), follow.target.__class__)) return HttpResponseServerError('No follow object and `next` parameter found.') return iCheck
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register(model, field_name=None, related_name=None, lookup_method_name='get_follows'): """ This registers any model class to be follow-able. """
if model in registry: return registry.append(model) if not field_name: field_name = 'target_%s' % model._meta.module_name if not related_name: related_name = 'follow_%s' % model._meta.module_name field = ForeignKey(model, related_name=related_name, null=True, blank=True, db_index=True) field.contribute_to_class(Follow, field_name) setattr(model, lookup_method_name, get_followers_for_object) model_map[model] = [related_name, field_name]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def follow(user, obj): """ Make a user follow an object """
follow, created = Follow.objects.get_or_create(user, obj) return follow
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unfollow(user, obj): """ Make a user unfollow an object """
try: follow = Follow.objects.get_follows(obj).get(user=user) follow.delete() return follow except Follow.DoesNotExist: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create(self, user, obj, **kwargs): """ Create a new follow link between a user and an object of a registered model type. """
follow = Follow(user=user) follow.target = obj follow.save() return follow
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_or_create(self, user, obj, **kwargs): """ Almost the same as `FollowManager.objects.create` - behaves the same as the normal `get_or_create` methods in django though. Returns a tuple with the `Follow` and either `True` or `False` """
if not self.is_following(user, obj): return self.create(user, obj, **kwargs), True return self.get_follows(obj).get(user=user), False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_following(self, user, obj): """ Returns `True` or `False` """
if isinstance(user, AnonymousUser): return False return 0 < self.get_follows(obj).filter(user=user).count()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_follows(self, model_or_obj_or_qs): """ Returns all the followers of a model, an object or a queryset. """
fname = self.fname(model_or_obj_or_qs) if isinstance(model_or_obj_or_qs, QuerySet): return self.filter(**{'%s__in' % fname: model_or_obj_or_qs}) if inspect.isclass(model_or_obj_or_qs): return self.exclude(**{fname:None}) return self.filter(**{fname:model_or_obj_or_qs})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_event_regressors(self, event_times_indices, covariates = None, durations = None): """create_event_regressors creates the part of the design matrix corresponding to one event type. :param event_times_indices: indices in the resampled data, on which the events occurred. :type event_times_indices: numpy array, (nr_events) :param covariates: covariates belonging to this event type. If None, covariates with a value of 1 for all events are created and used internally. :type covariates: numpy array, (nr_events) :param durations: durations belonging to this event type. If None, durations with a value of 1 sample for all events are created and used internally. :type durations: numpy array, (nr_events) :returns: This event type's part of the design matrix. """
# check covariates if covariates is None: covariates = np.ones(self.event_times_indices.shape) # check/create durations, convert from seconds to samples time, and compute mean duration for this event type. if durations is None: durations = np.ones(self.event_times_indices.shape) else: durations = np.round(durations*self.deconvolution_frequency).astype(int) mean_duration = np.mean(durations) # set up output array regressors_for_event = np.zeros((self.deconvolution_interval_size, self.resampled_signal_size)) # fill up output array by looping over events. for cov, eti, dur in zip(covariates, event_times_indices, durations): valid = True if eti < 0: self.logger.debug('deconv samples are starting before the data starts.') valid = False if eti+self.deconvolution_interval_size > self.resampled_signal_size: self.logger.debug('deconv samples are continuing after the data stops.') valid = False if eti > self.resampled_signal_size: self.logger.debug('event falls outside of the scope of the data.') valid = False if valid: # only incorporate sensible events. # calculate the design matrix that belongs to this event. this_event_design_matrix = (np.diag(np.ones(self.deconvolution_interval_size)) * cov) over_durations_dm = np.copy(this_event_design_matrix) if dur > 1: # if this event has a non-unity duration, duplicate the stick regressors in the time direction for d in np.arange(1,dur): over_durations_dm[d:] += this_event_design_matrix[:-d] # and correct for differences in durations between different regressor types. over_durations_dm /= mean_duration # add the designmatrix for this event to the full design matrix for this type of event. regressors_for_event[:,eti:int(eti+self.deconvolution_interval_size)] += over_durations_dm return regressors_for_event
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def regress(self, method = 'lstsq'): """regress performs linear least squares regression of the designmatrix on the data. :param method: method, or backend to be used for the regression analysis. :type method: string, one of ['lstsq', 'sm_ols'] :returns: instance variables 'betas' (nr_betas x nr_signals) and 'residuals' (nr_signals x nr_samples) are created. """
if method is 'lstsq': self.betas, residuals_sum, rank, s = LA.lstsq(self.design_matrix.T, self.resampled_signal.T) self.residuals = self.resampled_signal - self.predict_from_design_matrix(self.design_matrix) elif method is 'sm_ols': import statsmodels.api as sm assert self.resampled_signal.shape[0] == 1, \ 'signal input into statsmodels OLS cannot contain multiple signals at once, present shape %s' % str(self.resampled_signal.shape) model = sm.OLS(np.squeeze(self.resampled_signal),self.design_matrix.T) results = model.fit() # make betas and residuals that are compatible with the LA.lstsq type. self.betas = np.array(results.params).reshape((self.design_matrix.shape[0], self.resampled_signal.shape[0])) self.residuals = np.array(results.resid).reshape(self.resampled_signal.shape) self.logger.debug('performed %s regression on %s design_matrix and %s signal' % (method, str(self.design_matrix.shape), str(self.resampled_signal.shape)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def predict_from_design_matrix(self, design_matrix): """predict_from_design_matrix predicts signals given a design matrix. :param design_matrix: design matrix from which to predict a signal. :type design_matrix: numpy array, (nr_samples x betas.shape) :returns: predicted signal(s) :rtype: numpy array (nr_signals x nr_samples) """
# check if we have already run the regression - which is necessary assert hasattr(self, 'betas'), 'no betas found, please run regression before prediction' assert design_matrix.shape[0] == self.betas.shape[0], \ 'designmatrix needs to have the same number of regressors as the betas already calculated' # betas = np.copy(self.betas.T, order="F", dtype = np.float32) # f_design_matrix = np.copy(design_matrix, order = "F", dtype = np.float32) prediction = np.dot(self.betas.astype(np.float32).T, design_matrix.astype(np.float32)) return prediction
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def resource_urls(request): """Global values to pass to templates"""
url_parsed = urlparse(settings.SEARCH_URL) defaults = dict( APP_NAME=__description__, APP_VERSION=__version__, SITE_URL=settings.SITE_URL.rstrip('/'), SEARCH_TYPE=settings.SEARCH_TYPE, SEARCH_URL=settings.SEARCH_URL, SEARCH_IP='%s://%s:%s' % (url_parsed.scheme, url_parsed.hostname, url_parsed.port) ) return defaults
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_service_checks(self, service_id): """ Remove all checks from a service. """
from hypermap.aggregator.models import Service service = Service.objects.get(id=service_id) service.check_set.all().delete() layer_to_process = service.layer_set.all() for layer in layer_to_process: layer.check_set.all().delete()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def index_service(self, service_id): """ Index a service in search engine. """
from hypermap.aggregator.models import Service service = Service.objects.get(id=service_id) if not service.is_valid: LOGGER.debug('Not indexing service with id %s in search engine as it is not valid' % service.id) return LOGGER.debug('Indexing service %s' % service.id) layer_to_process = service.layer_set.all() for layer in layer_to_process: if not settings.REGISTRY_SKIP_CELERY: index_layer(layer.id, use_cache=True) else: index_layer(layer.id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def index_layer(self, layer_id, use_cache=False): """ Index a layer in the search backend. If cache is set, append it to the list, if it isn't send the transaction right away. cache needs memcached to be available. """
from hypermap.aggregator.models import Layer layer = Layer.objects.get(id=layer_id) if not layer.is_valid: LOGGER.debug('Not indexing or removing layer with id %s in search engine as it is not valid' % layer.id) unindex_layer(layer.id, use_cache) return if layer.was_deleted: LOGGER.debug('Not indexing or removing layer with id %s in search engine as was_deleted is true' % layer.id) unindex_layer(layer.id, use_cache) return # 1. if we use cache if use_cache: LOGGER.debug('Caching layer with id %s for syncing with search engine' % layer.id) layers = cache.get('layers') if layers is None: layers = set([layer.id]) else: layers.add(layer.id) cache.set('layers', layers) return # 2. if we don't use cache # TODO: Make this function more DRY # by abstracting the common bits. if SEARCH_TYPE == 'solr': from hypermap.aggregator.solr import SolrHypermap LOGGER.debug('Syncing layer %s to solr' % layer.name) solrobject = SolrHypermap() success, message = solrobject.layer_to_solr(layer) # update the error message if using celery if not settings.REGISTRY_SKIP_CELERY: if not success: self.update_state( state=states.FAILURE, meta=message ) raise Ignore() elif SEARCH_TYPE == 'elasticsearch': from hypermap.aggregator.elasticsearch_client import ESHypermap LOGGER.debug('Syncing layer %s to es' % layer.name) esobject = ESHypermap() success, message = esobject.layer_to_es(layer) # update the error message if using celery if not settings.REGISTRY_SKIP_CELERY: if not success: self.update_state( state=states.FAILURE, meta=message ) raise Ignore()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unindex_layers_with_issues(self, use_cache=False): """ Remove the index for layers in search backend, which are linked to an issue. """
from hypermap.aggregator.models import Issue, Layer, Service from django.contrib.contenttypes.models import ContentType layer_type = ContentType.objects.get_for_model(Layer) service_type = ContentType.objects.get_for_model(Service) for issue in Issue.objects.filter(content_type__pk=layer_type.id): unindex_layer(issue.content_object.id, use_cache) for issue in Issue.objects.filter(content_type__pk=service_type.id): for layer in issue.content_object.layer_set.all(): unindex_layer(layer.id, use_cache)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unindex_layer(self, layer_id, use_cache=False): """ Remove the index for a layer in the search backend. If cache is set, append it to the list of removed layers, if it isn't send the transaction right away. """
from hypermap.aggregator.models import Layer layer = Layer.objects.get(id=layer_id) if use_cache: LOGGER.debug('Caching layer with id %s for being removed from search engine' % layer.id) deleted_layers = cache.get('deleted_layers') if deleted_layers is None: deleted_layers = set([layer.id]) else: deleted_layers.add(layer.id) cache.set('deleted_layers', deleted_layers) return if SEARCH_TYPE == 'solr': from hypermap.aggregator.solr import SolrHypermap LOGGER.debug('Removing layer %s from solr' % layer.id) try: solrobject = SolrHypermap() solrobject.remove_layer(layer.uuid) except Exception: LOGGER.error('Layer NOT correctly removed from Solr') elif SEARCH_TYPE == 'elasticsearch': # TODO implement me pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def index_all_layers(self): """ Index all layers in search engine. """
from hypermap.aggregator.models import Layer if not settings.REGISTRY_SKIP_CELERY: layers_cache = set(Layer.objects.filter(is_valid=True).values_list('id', flat=True)) deleted_layers_cache = set(Layer.objects.filter(is_valid=False).values_list('id', flat=True)) cache.set('layers', layers_cache) cache.set('deleted_layers', deleted_layers_cache) else: for layer in Layer.objects.all(): index_layer(layer.id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def bbox2wktpolygon(bbox): """ Return OGC WKT Polygon of a simple bbox list """
try: minx = float(bbox[0]) miny = float(bbox[1]) maxx = float(bbox[2]) maxy = float(bbox[3]) except: LOGGER.debug("Invalid bbox, setting it to a zero POLYGON") minx = 0 miny = 0 maxx = 0 maxy = 0 return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' \ % (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gen_anytext(*args): """ Convenience function to create bag of words for anytext property """
bag = [] for term in args: if term is not None: if isinstance(term, list): for term2 in term: if term2 is not None: bag.append(term2) else: bag.append(term) return ' '.join(bag)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def endpointlist_post_save(instance, *args, **kwargs): """ Used to process the lines of the endpoint list. """
with open(instance.upload.file.name, mode='rb') as f: lines = f.readlines() for url in lines: if len(url) > 255: LOGGER.debug('Skipping this endpoint, as it is more than 255 characters: %s' % url) else: if Endpoint.objects.filter(url=url, catalog=instance.catalog).count() == 0: endpoint = Endpoint(url=url, endpoint_list=instance) endpoint.catalog = instance.catalog endpoint.save() if not settings.REGISTRY_SKIP_CELERY: update_endpoints.delay(instance.id) else: update_endpoints(instance.id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def layer_pre_save(instance, *args, **kwargs): """ Used to check layer validity. """
is_valid = True # we do not need to check validity for WM layers if not instance.service.type == 'Hypermap:WorldMap': # 0. a layer is invalid if its service its invalid as well if not instance.service.is_valid: is_valid = False LOGGER.debug('Layer with id %s is marked invalid because its service is invalid' % instance.id) # 1. a layer is invalid with an extent within (-2, -2, +2, +2) if instance.bbox_x0 > -2 and instance.bbox_x1 < 2 and instance.bbox_y0 > -2 and instance.bbox_y1 < 2: is_valid = False LOGGER.debug( 'Layer with id %s is marked invalid because its extent is within (-2, -2, +2, +2)' % instance.id ) instance.is_valid = is_valid
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def layer_post_save(instance, *args, **kwargs): """ Used to do a layer full check when saving it. """
if instance.is_monitored and instance.service.is_monitored: # index and monitor if not settings.REGISTRY_SKIP_CELERY: check_layer.delay(instance.id) else: check_layer(instance.id) else: # just index index_layer(instance.id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_layers(self): """ Update layers for a service. """
signals.post_save.disconnect(layer_post_save, sender=Layer) try: LOGGER.debug('Updating layers for service id %s' % self.id) if self.type == 'OGC:WMS': update_layers_wms(self) elif self.type == 'OGC:WMTS': update_layers_wmts(self) elif self.type == 'ESRI:ArcGIS:MapServer': update_layers_esri_mapserver(self) elif self.type == 'ESRI:ArcGIS:ImageServer': update_layers_esri_imageserver(self) elif self.type == 'Hypermap:WorldMapLegacy': update_layers_wm_legacy(self) elif self.type == 'Hypermap:WorldMap': update_layers_geonode_wm(self) elif self.type == 'Hypermap:WARPER': update_layers_warper(self) except: LOGGER.error('Error updating layers for service %s' % self.uuid) signals.post_save.connect(layer_post_save, sender=Layer)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_validity(self): """ Update validity of a service. """
# WM is always valid if self.type == 'Hypermap:WorldMap': return signals.post_save.disconnect(service_post_save, sender=Service) try: # some service now must be considered invalid: # 0. any service not exposed in SUPPORTED_SRS # 1. any WMTS service # 2. all of the NOAA layers is_valid = True # 0. any service not exposed in SUPPORTED_SRS if self.srs.filter(code__in=SUPPORTED_SRS).count() == 0: LOGGER.debug('Service with id %s is marked invalid because in not exposed in SUPPORTED_SRS' % self.id) is_valid = False # 1. any WMTS service if self.type == 'OGC:WMTS': LOGGER.debug('Service with id %s is marked invalid because it is of type OGC:WMTS' % self.id) is_valid = False # 2. all of the NOAA layers if 'noaa' in self.url.lower(): LOGGER.debug('Service with id %s is marked invalid because it is from NOAA' % self.id) is_valid = False # now we save the service self.is_valid = is_valid self.save() except: LOGGER.error('Error updating validity of the service!') signals.post_save.connect(service_post_save, sender=Service)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_url_endpoint(self): """ Returns the Hypermap endpoint for a layer. This endpoint will be the WMTS MapProxy endpoint, only for WM we use the original endpoint. """
endpoint = self.url if self.type not in ('Hypermap:WorldMap',): endpoint = 'registry/%s/layer/%s/map/wmts/1.0.0/WMTSCapabilities.xml' % ( self.catalog.slug, self.id ) return endpoint
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_available(self): """ Check for availability of a layer and provide run metrics. """
success = True start_time = datetime.datetime.utcnow() message = '' LOGGER.debug('Checking layer id %s' % self.id) signals.post_save.disconnect(layer_post_save, sender=Layer) try: self.update_thumbnail() except ValueError, err: # caused by update_thumbnail() # self.href is empty in arcserver.ExportMap if str(err).startswith("unknown url type:"): LOGGER.debug('Thumbnail can not be updated: %s' % str(err)) except Exception, err: message = str(err) success = False signals.post_save.connect(layer_post_save, sender=Layer) end_time = datetime.datetime.utcnow() delta = end_time - start_time response_time = '%s.%s' % (delta.seconds, delta.microseconds) check = Check( content_object=self, success=success, response_time=response_time, message=message ) check.save() LOGGER.debug('Layer checked in %s seconds, status is %s' % (response_time, success)) return success, message
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _input_github_repo(url=None): """ Grabs input from the user and saves it as their trytravis target repo """
if url is None: url = user_input('Input the URL of the GitHub repository ' 'to use as a `trytravis` repository: ') url = url.strip() http_match = _HTTPS_REGEX.match(url) ssh_match = _SSH_REGEX.match(url) if not http_match and not ssh_match: raise RuntimeError('That URL doesn\'t look like a valid ' 'GitHub URL. We expect something ' 'of the form: `https://github.com/[USERNAME]/' '[REPOSITORY]` or `ssh://git@github.com/' '[USERNAME]/[REPOSITORY]') # Make sure that the user actually made a new repository on GitHub. if http_match: _, name = http_match.groups() else: _, name = ssh_match.groups() if 'trytravis' not in name: raise RuntimeError('You must have `trytravis` in the name of your ' 'repository. This is a security feature to reduce ' 'chances of running git push -f on a repository ' 'you don\'t mean to.') # Make sure that the user actually wants to use this repository. accept = user_input('Remember that `trytravis` will make commits on your ' 'behalf to `%s`. Are you sure you wish to use this ' 'repository? Type `y` or `yes` to accept: ' % url) if accept.lower() not in ['y', 'yes']: raise RuntimeError('Operation aborted by user.') if not os.path.isdir(config_dir): os.makedirs(config_dir) with open(os.path.join(config_dir, 'repo'), 'w+') as f: f.truncate() f.write(url) print('Repository saved successfully.')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_github_repo(): """ Loads the GitHub repository from the users config. """
if 'TRAVIS' in os.environ: raise RuntimeError('Detected that we are running in Travis. ' 'Stopping to prevent infinite loops.') try: with open(os.path.join(config_dir, 'repo'), 'r') as f: return f.read() except (OSError, IOError): raise RuntimeError('Could not find your repository. ' 'Have you ran `trytravis --repo`?')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _submit_changes_to_github_repo(path, url): """ Temporarily commits local changes and submits them to the GitHub repository that the user has specified. Then reverts the changes to the git repository if a commit was necessary. """
try: repo = git.Repo(path) except Exception: raise RuntimeError('Couldn\'t locate a repository at `%s`.' % path) commited = False try: try: repo.delete_remote('trytravis') except Exception: pass print('Adding a temporary remote to ' '`%s`...' % url) remote = repo.create_remote('trytravis', url) print('Adding all local changes...') repo.git.add('--all') try: print('Committing local changes...') timestamp = datetime.datetime.now().isoformat() repo.git.commit(m='trytravis-' + timestamp) commited = True except git.exc.GitCommandError as e: if 'nothing to commit' in str(e): commited = False else: raise commit = repo.head.commit.hexsha committed_at = repo.head.commit.committed_datetime print('Pushing to `trytravis` remote...') remote.push(force=True) finally: if commited: print('Reverting to old state...') repo.git.reset('HEAD^') try: repo.delete_remote('trytravis') except Exception: pass return commit, committed_at
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _wait_for_travis_build(url, commit, committed_at): """ Waits for a Travis build to appear with the given commit SHA """
print('Waiting for a Travis build to appear ' 'for `%s` after `%s`...' % (commit, committed_at)) import requests slug = _slug_from_url(url) start_time = time.time() build_id = None while time.time() - start_time < 60: with requests.get('https://api.travis-ci.org/repos/%s/builds' % slug, headers=_travis_headers()) as r: if not r.ok: raise RuntimeError('Could not reach the Travis API ' 'endpoint. Additional information: ' '%s' % str(r.content)) # Search through all commits and builds to find our build. commit_to_sha = {} json = r.json() for travis_commit in sorted(json['commits'], key=lambda x: x['committed_at']): travis_committed_at = datetime.datetime.strptime( travis_commit['committed_at'], '%Y-%m-%dT%H:%M:%SZ' ).replace(tzinfo=utc) if travis_committed_at < committed_at: continue commit_to_sha[travis_commit['id']] = travis_commit['sha'] for build in json['builds']: if (build['commit_id'] in commit_to_sha and commit_to_sha[build['commit_id']] == commit): build_id = build['id'] print('Travis build id: `%d`' % build_id) print('Travis build URL: `https://travis-ci.org/' '%s/builds/%d`' % (slug, build_id)) if build_id is not None: break time.sleep(3.0) else: raise RuntimeError('Timed out while waiting for a Travis build ' 'to start. Is Travis configured for `%s`?' % url) return build_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _watch_travis_build(build_id): """ Watches and progressively outputs information about a given Travis build """
import requests try: build_size = None # type: int running = True while running: with requests.get('https://api.travis-ci.org/builds/%d' % build_id, headers=_travis_headers()) as r: json = r.json() if build_size is not None: if build_size > 1: sys.stdout.write('\r\x1b[%dA' % build_size) else: sys.stdout.write('\r') build_size = len(json['jobs']) running = False current_number = 1 for job in json['jobs']: # pragma: no coverage color, state, is_running = _travis_job_state(job['state']) if is_running: running = True platform = job['config']['os'] if platform == 'osx': platform = ' osx ' env = job['config'].get('env', '') sudo = 's' if job['config'].get('sudo', True) else 'c' lang = job['config'].get('language', 'generic') padding = ' ' * (len(str(build_size)) - len(str(current_number))) number = str(current_number) + padding current_number += 1 job_display = '#' + ' '.join([number, state, platform, sudo, lang, env]) print(color + job_display + colorama.Style.RESET_ALL) time.sleep(3.0) except KeyboardInterrupt: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _travis_job_state(state): """ Converts a Travis state into a state character, color, and whether it's still running or a stopped state. """
if state in [None, 'queued', 'created', 'received']: return colorama.Fore.YELLOW, '*', True elif state in ['started', 'running']: return colorama.Fore.LIGHTYELLOW_EX, '*', True elif state == 'passed': return colorama.Fore.LIGHTGREEN_EX, 'P', False elif state == 'failed': return colorama.Fore.LIGHTRED_EX, 'X', False elif state == 'errored': return colorama.Fore.LIGHTRED_EX, '!', False elif state == 'canceled': return colorama.Fore.LIGHTBLACK_EX, 'X', False else: raise RuntimeError('unknown state: %s' % str(state))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _slug_from_url(url): """ Parses a project slug out of either an HTTPS or SSH URL. """
http_match = _HTTPS_REGEX.match(url) ssh_match = _SSH_REGEX.match(url) if not http_match and not ssh_match: raise RuntimeError('Could not parse the URL (`%s`) ' 'for your repository.' % url) if http_match: return '/'.join(http_match.groups()) else: return '/'.join(ssh_match.groups())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(argv=None): # pragma: no coverage """ Main entry point when the user runs the `trytravis` command. """
try: colorama.init() if argv is None: argv = sys.argv[1:] _main(argv) except RuntimeError as e: print(colorama.Fore.RED + 'ERROR: ' + str(e) + colorama.Style.RESET_ALL) sys.exit(1) else: sys.exit(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def csw_global_dispatch_by_catalog(request, catalog_slug): """pycsw wrapper for catalogs"""
catalog = get_object_or_404(Catalog, slug=catalog_slug) if catalog: # define catalog specific settings url = settings.SITE_URL.rstrip('/') + request.path.rstrip('/') return csw_global_dispatch(request, url=url, catalog_id=catalog.id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def good_coords(coords): """ passed a string array """
if (len(coords) != 4): return False for coord in coords[0:3]: try: num = float(coord) if (math.isnan(num)): return False if (math.isinf(num)): return False except ValueError: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clear_es(): """Clear all indexes in the es core"""
# TODO: should receive a catalog slug. ESHypermap.es.indices.delete(ESHypermap.index_name, ignore=[400, 404]) LOGGER.debug('Elasticsearch: Index cleared')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_indices(catalog_slug): """Create ES core indices """
# TODO: enable auto_create_index in the ES nodes to make this implicit. # https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html#index-creation # http://support.searchly.com/customer/en/portal/questions/ # 16312889-is-automatic-index-creation-disabled-?new=16312889 mapping = { "mappings": { "layer": { "properties": { "layer_geoshape": { "type": "geo_shape", "tree": "quadtree", "precision": REGISTRY_MAPPING_PRECISION } } } } } ESHypermap.es.indices.create(catalog_slug, ignore=[400, 404], body=mapping)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def kill_process(procname, scriptname): """kill WSGI processes that may be running in development"""
# from http://stackoverflow.com/a/2940878 import signal import subprocess p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.decode().splitlines(): if procname in line and scriptname in line: pid = int(line.split()[1]) info('Stopping %s %s %d' % (procname, scriptname, pid)) os.kill(pid, signal.SIGKILL)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def populate_initial_services(): """ Populate a fresh installed Hypermap instances with basic services. """
services_list = ( ( 'Harvard WorldMap', 'Harvard WorldMap open source web geospatial platform', 'Hypermap:WorldMap', 'http://worldmap.harvard.edu' ), ( 'NYPL MapWarper', 'The New York Public Library (NYPL) MapWarper web site', 'Hypermap:WARPER', 'http://maps.nypl.org/warper/maps' ), ( 'Map Warper', 'The MapWarper web site developed, hosted and maintained by Tim Waters', 'Hypermap:WARPER', 'http://mapwarper.net/maps' ), ( 'WorldMap Warp', 'The MapWarper instance part of the Harvard WorldMap project', 'Hypermap:WARPER', 'http://warp.worldmap.harvard.edu/maps' ), ( 'WFP GeoNode', 'World Food Programme GeoNode', 'OGC:WMS', 'http://geonode.wfp.org/geoserver/ows?' ), ( 'NASA EARTHDATA', 'NASA EARTHDATA, powered by EOSDIS', 'OGC:WMTS', 'http://map1.vis.earthdata.nasa.gov/wmts-geo/1.0.0/WMTSCapabilities.xml' ), ) esri_endpoint = 'https://gis.ngdc.noaa.gov/arcgis/rest/services' LOGGER.debug('*** Importing esri endpoint: %s' % esri_endpoint) create_services_from_endpoint(esri_endpoint) for service in services_list: LOGGER.debug('*** Importing %s' % service[0]) service = Service( title=service[0], abstract=service[1], type=service[2], url=service[3] ) service.save()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """For testing purpose"""
tcp_adapter = TcpAdapter("192.168.1.3", name="HASS", activate_source=False) hdmi_network = HDMINetwork(tcp_adapter) hdmi_network.start() while True: for d in hdmi_network.devices: _LOGGER.info("Device: %s", d) time.sleep(7)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compare_hexdigests( digest1, digest2 ): """Compute difference in bits between digest1 and digest2 returns -127 to 128; 128 is the same, -127 is different"""
# convert to 32-tuple of unsighed two-byte INTs digest1 = tuple([int(digest1[i:i+2],16) for i in range(0,63,2)]) digest2 = tuple([int(digest2[i:i+2],16) for i in range(0,63,2)]) bits = 0 for i in range(32): bits += POPC[255 & digest1[i] ^ digest2[i]] return 128 - bits
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tran3(self, a, b, c, n): """Get accumulator for a transition n between chars a, b, c."""
return (((TRAN[(a+n)&255]^TRAN[b]*(n+n+1))+TRAN[(c)^TRAN[n]])&255)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update(self, data): """Add data to running digest, increasing the accumulators for 0-8 triplets formed by this char and the previous 0-3 chars."""
for character in data: if PY3: ch = character else: ch = ord(character) self.count += 1 # incr accumulators for triplets if self.lastch[1] > -1: self.acc[self.tran3(ch, self.lastch[0], self.lastch[1], 0)] +=1 if self.lastch[2] > -1: self.acc[self.tran3(ch, self.lastch[0], self.lastch[2], 1)] +=1 self.acc[self.tran3(ch, self.lastch[1], self.lastch[2], 2)] +=1 if self.lastch[3] > -1: self.acc[self.tran3(ch, self.lastch[0], self.lastch[3], 3)] +=1 self.acc[self.tran3(ch, self.lastch[1], self.lastch[3], 4)] +=1 self.acc[self.tran3(ch, self.lastch[2], self.lastch[3], 5)] +=1 self.acc[self.tran3(self.lastch[3], self.lastch[0], ch, 6)] +=1 self.acc[self.tran3(self.lastch[3], self.lastch[2], ch, 7)] +=1 # adjust last seen chars self.lastch = [ch] + self.lastch[:3]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def digest(self): """Get digest of data seen thus far as a list of bytes."""
total = 0 # number of triplets seen if self.count == 3: # 3 chars = 1 triplet total = 1 elif self.count == 4: # 4 chars = 4 triplets total = 4 elif self.count > 4: # otherwise 8 triplets/char less total = 8 * self.count - 28 # 28 'missed' during 'ramp-up' threshold = total / 256 # threshold for accumulators, using the mean code = [0]*32 # start with all zero bits for i in range(256): # for all 256 accumulators if self.acc[i] > threshold: # if it meets the threshold code[i >> 3] += 1 << (i&7) # set corresponding digest bit, equivalent to i/8, 2 ** (i % 8) return code[::-1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_file(self, filename): """Update running digest with content of named file."""
f = open(filename, 'rb') while True: data = f.read(10480) if not data: break self.update(data) f.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compare(self, otherdigest, ishex=False): """Compute difference in bits between own digest and another. returns -127 to 128; 128 is the same, -127 is different"""
bits = 0 myd = self.digest() if ishex: # convert to 32-tuple of unsighed two-byte INTs otherdigest = tuple([int(otherdigest[i:i+2],16) for i in range(0,63,2)]) for i in range(32): bits += POPC[255 & myd[i] ^ otherdigest[i]] return 128 - bits
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def jdout(api_response): """ JD Output function. Does quick pretty printing of a CloudGenix Response body. This function returns a string instead of directly printing content. **Parameters:** - **api_response:** A CloudGenix-attribute extended `requests.Response` object **Returns:** Pretty-formatted text of the Response body """
try: # attempt to output the cgx_content. should always be a Dict if it exists. output = json.dumps(api_response.cgx_content, indent=4) except (TypeError, ValueError, AttributeError): # cgx_content did not exist, or was not JSON serializable. Try pretty output the base obj. try: output = json.dumps(api_response, indent=4) except (TypeError, ValueError, AttributeError): # Same issue, just raw output the passed data. Let any exceptions happen here. output = api_response return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def jdout_detailed(api_response, sensitive=False): """ JD Output Detailed function. Meant for quick DETAILED pretty-printing of CloudGenix Request and Response objects for troubleshooting. This function returns a string instead of directly printing content. **Parameters:** - **api_response:** A CloudGenix-attribute extended `requests.Response` object - **sensitive:** Boolean, if True will print sensitive content (specifically, authentication cookies/headers). **Returns:** Pretty-formatted text of the Request, Request Headers, Request body, Response, Response Headers, and Response Body. """
try: # try to be super verbose. output = "REQUEST: {0} {1}\n".format(api_response.request.method, api_response.request.path_url) output += "REQUEST HEADERS:\n" for key, value in api_response.request.headers.items(): # look for sensitive values if key.lower() in ['cookie'] and not sensitive: # we need to do some work to watch for the AUTH_TOKEN cookie. Split on cookie separator cookie_list = value.split('; ') muted_cookie_list = [] for cookie in cookie_list: # check if cookie starts with a permutation of AUTH_TOKEN/whitespace. if cookie.lower().strip().startswith('auth_token='): # first 11 chars of cookie with whitespace removed + mute string. newcookie = cookie.strip()[:11] + "\"<SENSITIVE - NOT SHOWN BY DEFAULT>\"" muted_cookie_list.append(newcookie) else: muted_cookie_list.append(cookie) # got list of cookies, muted as needed. recombine. muted_value = "; ".join(muted_cookie_list) output += "\t{0}: {1}\n".format(key, muted_value) elif key.lower() in ['x-auth-token'] and not sensitive: output += "\t{0}: {1}\n".format(key, "<SENSITIVE - NOT SHOWN BY DEFAULT>") else: output += "\t{0}: {1}\n".format(key, value) # if body not present, output blank. if not api_response.request.body: output += "REQUEST BODY:\n{0}\n\n".format({}) else: try: # Attempt to load JSON from string to make it look beter. output += "REQUEST BODY:\n{0}\n\n".format(json.dumps(json.loads(api_response.request.body), indent=4)) except (TypeError, ValueError, AttributeError): # if pretty call above didn't work, just toss it to jdout to best effort it. output += "REQUEST BODY:\n{0}\n\n".format(jdout(api_response.request.body)) output += "RESPONSE: {0} {1}\n".format(api_response.status_code, api_response.reason) output += "RESPONSE HEADERS:\n" for key, value in api_response.headers.items(): output += "\t{0}: {1}\n".format(key, value) try: # look for CGX content first. output += "RESPONSE DATA:\n{0}".format(json.dumps(api_response.cgx_content, indent=4)) except (TypeError, ValueError, AttributeError): # look for standard response data. output += "RESPONSE DATA:\n{0}".format(json.dumps(json.loads(api_response.content), indent=4)) except (TypeError, ValueError, AttributeError, UnicodeDecodeError): # cgx_content did not exist, or was not JSON serializable. Try pretty output the base obj. try: output = json.dumps(api_response, indent=4) except (TypeError, ValueError, AttributeError): # Same issue, just raw output the passed data. Let any exceptions happen here. output = api_response return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def notify_for_new_version(self): """ Check for a new version of the SDK on API constructor instantiation. If new version found, print Notification to STDERR. On failure of this check, fail silently. **Returns:** No item returned, directly prints notification to `sys.stderr`. """
# broad exception clause, if this fails for any reason just return. try: recommend_update = False update_check_resp = requests.get(self.update_info_url, timeout=3) web_version = update_check_resp.json()["info"]["version"] api_logger.debug("RETRIEVED_VERSION: %s", web_version) available_version = SDK_BUILD_REGEX.search(web_version).groupdict() current_version = SDK_BUILD_REGEX.search(self.version).groupdict() available_major = available_version.get('major') available_minor = available_version.get('minor') available_patch = available_version.get('patch') available_build = available_version.get('build') current_major = current_version.get('major') current_minor = current_version.get('minor') current_patch = current_version.get('patch') current_build = current_version.get('build') api_logger.debug("AVAILABLE_VERSION: %s", available_version) api_logger.debug("CURRENT_VERSION: %s", current_version) # check for major/minor version differences, do not alert for build differences. if available_major > current_major: recommend_update = True elif available_major >= current_major and available_minor > current_minor: recommend_update = True elif available_major >= current_major and available_minor >= current_minor and \ available_patch > current_patch: recommend_update = True api_logger.debug("NEED_UPDATE: %s", recommend_update) # notify. if recommend_update: sys.stderr.write("WARNING: CloudGenix Python SDK upgrade available. SDKs are typically deprecated 6 " "months after release of a new version.\n" "\tLatest Version: {0}\n" "\tCurrent Version: {1}\n" "\tFor more info, see 'https://github.com/cloudgenix/sdk-python'. Additionally, this " "message can be suppressed by instantiating the API with API(update_check=False).\n\n" "".format(web_version, self.version)) return except Exception: # just return and continue. return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ssl_verify(self, ssl_verify): """ Modify ssl verification settings **Parameters:** - ssl_verify: - True: Verify using builtin BYTE_CA_BUNDLE. - False: No SSL Verification. - Str: Full path to a x509 PEM CA File or bundle. **Returns:** Mutates API object in place, no return. """
self.verify = ssl_verify # if verify true/false, set ca_verify_file appropriately if isinstance(self.verify, bool): if self.verify: # True if os.name == 'nt': # Windows does not allow tmpfile access w/out close. Close file then delete it when done. self._ca_verify_file_handle = temp_ca_bundle(delete=False) self._ca_verify_file_handle.write(BYTE_CA_BUNDLE) self._ca_verify_file_handle.flush() self.ca_verify_filename = self._ca_verify_file_handle.name self._ca_verify_file_handle.close() # Other (POSIX/Unix/Linux/OSX) else: self._ca_verify_file_handle = temp_ca_bundle() self._ca_verify_file_handle.write(BYTE_CA_BUNDLE) self._ca_verify_file_handle.flush() self.ca_verify_filename = self._ca_verify_file_handle.name # register cleanup function for temp file. atexit.register(self._cleanup_ca_temp_file) else: # False # disable warnings for SSL certs. urllib3.disable_warnings() self.ca_verify_filename = False else: # Not True/False, assume path to file/dir for Requests self.ca_verify_filename = self.verify return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def modify_rest_retry(self, total=8, connect=None, read=None, redirect=None, status=None, method_whitelist=urllib3.util.retry.Retry.DEFAULT_METHOD_WHITELIST, status_forcelist=None, backoff_factor=0.705883, raise_on_redirect=True, raise_on_status=True, respect_retry_after_header=True, adapter_url="https://"): """ Modify retry parameters for the SDK's rest call object. Parameters are directly from and passed directly to `urllib3.util.retry.Retry`, and get applied directly to the underlying `requests.Session` object. Default retry with total=8 and backoff_factor=0.705883: - Try 1, 0 delay (0 total seconds) - Try 2, 0 delay (0 total seconds) - Try 3, 0.705883 delay (0.705883 total seconds) - Try 4, 1.411766 delay (2.117649 total seconds) - Try 5, 2.823532 delay (4.941181 total seconds) - Try 6, 5.647064 delay (10.588245 total seconds) - Try 7, 11.294128 delay (21.882373 total seconds) - Try 8, 22.588256 delay (44.470629 total seconds) - Try 9, 45.176512 delay (89.647141 total seconds) - Try 10, 90.353024 delay (180.000165 total seconds) **Parameters:** - **total:** int, Total number of retries to allow. Takes precedence over other counts. - **connect:** int, How many connection-related errors to retry on. - **read:** int, How many times to retry on read errors. - **redirect:** int, How many redirects to perform. loops. - **status:** int, How many times to retry on bad status codes. - **method_whitelist:** iterable, Set of uppercased HTTP method verbs that we should retry on. - **status_forcelist:** iterable, A set of integer HTTP status codes that we should force a retry on. - **backoff_factor:** float, A backoff factor to apply between attempts after the second try. - **raise_on_redirect:** bool, True = raise a MaxRetryError, False = return latest 3xx response. - **raise_on_status:** bool, Similar logic to ``raise_on_redirect`` but for status responses. - **respect_retry_after_header:** bool, Whether to respect Retry-After header on status codes. - **adapter_url:** string, URL match for these retry values (default `https://`) **Returns:** No return, mutates the session directly """
# Cloudgenix responses with 502/504 are usually recoverable. Use them if no list specified. if status_forcelist is None: status_forcelist = (413, 429, 502, 503, 504) retry = urllib3.util.retry.Retry(total=total, connect=connect, read=read, redirect=redirect, status=status, method_whitelist=method_whitelist, status_forcelist=status_forcelist, backoff_factor=backoff_factor, raise_on_redirect=raise_on_redirect, raise_on_status=raise_on_status, respect_retry_after_header=respect_retry_after_header) adapter = requests.adapters.HTTPAdapter(max_retries=retry) self._session.mount(adapter_url, adapter) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_debug(self, debuglevel): """ Change the debug level of the API **Returns:** No item returned. """
if isinstance(debuglevel, int): self._debuglevel = debuglevel if self._debuglevel == 1: logging.basicConfig(level=logging.INFO, format="%(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s") api_logger.setLevel(logging.INFO) elif self._debuglevel == 2: logging.basicConfig(level=logging.DEBUG, format="%(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s") requests.cookies.cookielib.debug = True api_logger.setLevel(logging.DEBUG) elif self._debuglevel >= 3: logging.basicConfig(level=logging.DEBUG, format="%(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s") requests.cookies.cookielib.debug = True api_logger.setLevel(logging.DEBUG) urllib3_logger = logging.getLogger("requests.packages.urllib3") urllib3_logger.setLevel(logging.DEBUG) urllib3_logger.propagate = True else: # Remove all handlers for handler in logging.root.handlers[:]: logging.root.removeHandler(handler) # set logging level to default requests.cookies.cookielib.debug = False api_logger.setLevel(logging.WARNING) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _subclass_container(self): """ Call subclasses via function to allow passing parent namespace to subclasses. **Returns:** dict with subclass references. """
_parent_class = self class GetWrapper(Get): def __init__(self): self._parent_class = _parent_class class PostWrapper(Post): def __init__(self): self._parent_class = _parent_class class PutWrapper(Put): def __init__(self): self._parent_class = _parent_class class PatchWrapper(Patch): def __init__(self): self._parent_class = _parent_class class DeleteWrapper(Delete): def __init__(self): self._parent_class = _parent_class class InteractiveWrapper(Interactive): def __init__(self): self._parent_class = _parent_class return {"get": GetWrapper, "post": PostWrapper, "put": PutWrapper, "patch": PatchWrapper, "delete": DeleteWrapper, "interactive": InteractiveWrapper}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _cleanup_ca_temp_file(self): """ Function to clean up ca temp file for requests. **Returns:** Removes TEMP ca file, no return """
if os.name == 'nt': if isinstance(self.ca_verify_filename, (binary_type, text_type)): # windows requires file to be closed for access. Have to manually remove os.unlink(self.ca_verify_filename) else: # other OS's allow close and delete of file. self._ca_verify_file_handle.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_auth_token(self, auth_token): """ Break auth_token up into it's constituent values. **Parameters:** - **auth_token:** Auth_token string **Returns:** dict with Auth Token constituents """
# remove the random security key value from the front of the auth_token auth_token_cleaned = auth_token.split('-', 1)[1] # URL Decode the Auth Token auth_token_decoded = self.url_decode(auth_token_cleaned) # Create a new dict to hold the response. auth_dict = {} # Parse the token for key_value in auth_token_decoded.split("&"): key_value_list = key_value.split("=") # check for valid token parts if len(key_value_list) == 2 and type(key_value_list[0]) in [text_type, binary_type]: auth_dict[key_value_list[0]] = key_value_list[1] # Return the dict of key/values in the token. return auth_dict
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_region(self, login_response): """ Return region from a successful login response. **Parameters:** - **login_response:** requests.Response from a successful login. **Returns:** region name. """
auth_token = login_response.cgx_content['x_auth_token'] auth_token_dict = self.parse_auth_token(auth_token) auth_region = auth_token_dict.get('region') return auth_region
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _catch_nonjson_streamresponse(rawresponse): """ Validate a streamed response is JSON. Return a Python dictionary either way. **Parameters:** - **rawresponse:** Streamed Response from Requests. **Returns:** Dictionary """
# attempt to load response for return. try: response = json.loads(rawresponse) except (ValueError, TypeError): if rawresponse: response = { '_error': [ { 'message': 'Response not in JSON format.', 'data': rawresponse, } ] } else: # in case of null response, return empty dict. response = {} return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def url_decode(url): """ URL Decode function using REGEX **Parameters:** - **url:** URLENCODED text string **Returns:** Non URLENCODED string """
return re.compile('%([0-9a-fA-F]{2})', re.M).sub(lambda m: chr(int(m.group(1), 16)), url)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def jcrop_css(css_url=None): """Load jcrop css file. :param css_url: The custom CSS URL. """
if css_url is None: if current_app.config['AVATARS_SERVE_LOCAL']: css_url = url_for('avatars.static', filename='jcrop/css/jquery.Jcrop.min.css') else: css_url = 'https://cdn.jsdelivr.net/npm/jcrop-0.9.12@0.9.12/css/jquery.Jcrop.min.css' return Markup('<link rel="stylesheet" href="%s">' % css_url)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def crop_box(endpoint=None, filename=None): """Create a crop box. :param endpoint: The endpoint of view function that serve avatar image file. :param filename: The filename of the image that need to be crop. """
crop_size = current_app.config['AVATARS_CROP_BASE_WIDTH'] if endpoint is None or filename is None: url = url_for('avatars.static', filename='default/default_l.jpg') else: url = url_for(endpoint, filename=filename) return Markup('<img src="%s" id="crop-box" style="max-width: %dpx; display: block;">' % (url, crop_size))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def resize_avatar(self, img, base_width): """Resize an avatar. :param img: The image that needs to be resize. :param base_width: The width of output image. """
w_percent = (base_width / float(img.size[0])) h_size = int((float(img.size[1]) * float(w_percent))) img = img.resize((base_width, h_size), PIL.Image.ANTIALIAS) return img
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save_avatar(self, image): """Save an avatar as raw image, return new filename. :param image: The image that needs to be saved. """
path = current_app.config['AVATARS_SAVE_PATH'] filename = uuid4().hex + '_raw.png' image.save(os.path.join(path, filename)) return filename
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_image(self, string, width, height, pad=0): """ Byte representation of a PNG image """
hex_digest_byte_list = self._string_to_byte_list(string) matrix = self._create_matrix(hex_digest_byte_list) return self._create_image(matrix, width, height, pad)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_pastel_colour(self, lighten=127): """ Create a pastel colour hex colour string """
def r(): return random.randint(0, 128) + lighten return r(), r(), r()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _luminance(self, rgb): """ Determine the liminanace of an RGB colour """
a = [] for v in rgb: v = v / float(255) if v < 0.03928: result = v / 12.92 else: result = math.pow(((v + 0.055) / 1.055), 2.4) a.append(result) return a[0] * 0.2126 + a[1] * 0.7152 + a[2] * 0.0722
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _string_to_byte_list(self, data): """ Creates a hex digest of the input string given to create the image, if it's not already hexadecimal Returns: Length 16 list of rgb value range integers (each representing a byte of the hex digest) """
bytes_length = 16 m = self.digest() m.update(str.encode(data)) hex_digest = m.hexdigest() return list(int(hex_digest[num * 2:num * 2 + 2], bytes_length) for num in range(bytes_length))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_image(self, matrix, width, height, pad): """ Generates a PNG byte list """
image = Image.new("RGB", (width + (pad * 2), height + (pad * 2)), self.bg_colour) image_draw = ImageDraw.Draw(image) # Calculate the block width and height. block_width = float(width) / self.cols block_height = float(height) / self.rows # Loop through blocks in matrix, draw rectangles. for row, cols in enumerate(matrix): for col, cell in enumerate(cols): if cell: image_draw.rectangle(( pad + col * block_width, # x1 pad + row * block_height, # y1 pad + (col + 1) * block_width - 1, # x2 pad + (row + 1) * block_height - 1 # y2 ), fill=self.fg_colour) stream = BytesIO() image.save(stream, format="png", optimize=True) # return the image byte data return stream.getvalue()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def city(self, value=None): """Corresponds to IDD Field `city` Args: value (str): value for IDD Field `city` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError('value {} need to be of type str ' 'for field `city`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `city`') self._city = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def state_province_region(self, value=None): """Corresponds to IDD Field `state_province_region` Args: value (str): value for IDD Field `state_province_region` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError( 'value {} need to be of type str ' 'for field `state_province_region`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `state_province_region`') self._state_province_region = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def country(self, value=None): """Corresponds to IDD Field `country` Args: value (str): value for IDD Field `country` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError('value {} need to be of type str ' 'for field `country`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `country`') self._country = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def source(self, value=None): """Corresponds to IDD Field `source` Args: value (str): value for IDD Field `source` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError('value {} need to be of type str ' 'for field `source`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `source`') self._source = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def wmo(self, value=None): """Corresponds to IDD Field `wmo` usually a 6 digit field. Used as alpha in EnergyPlus. Args: value (str): value for IDD Field `wmo` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError('value {} need to be of type str ' 'for field `wmo`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `wmo`') self._wmo = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def latitude(self, value=0.0): """Corresponds to IDD Field `latitude` + is North, - is South, degree minutes represented in decimal (i.e. 30 minutes is .5) Args: value (float): value for IDD Field `latitude` Unit: deg Default value: 0.0 value >= -90.0 value <= 90.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `latitude`'.format(value)) if value < -90.0: raise ValueError('value need to be greater or equal -90.0 ' 'for field `latitude`') if value > 90.0: raise ValueError('value need to be smaller 90.0 ' 'for field `latitude`') self._latitude = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def longitude(self, value=0.0): """Corresponds to IDD Field `longitude` - is West, + is East, degree minutes represented in decimal (i.e. 30 minutes is .5) Args: value (float): value for IDD Field `longitude` Unit: deg Default value: 0.0 value >= -180.0 value <= 180.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `longitude`'.format(value)) if value < -180.0: raise ValueError('value need to be greater or equal -180.0 ' 'for field `longitude`') if value > 180.0: raise ValueError('value need to be smaller 180.0 ' 'for field `longitude`') self._longitude = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def timezone(self, value=0.0): """Corresponds to IDD Field `timezone` Time relative to GMT. Args: value (float): value for IDD Field `timezone` Unit: hr - not on standard units list??? Default value: 0.0 value >= -12.0 value <= 12.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `timezone`'.format(value)) if value < -12.0: raise ValueError('value need to be greater or equal -12.0 ' 'for field `timezone`') if value > 12.0: raise ValueError('value need to be smaller 12.0 ' 'for field `timezone`') self._timezone = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def elevation(self, value=0.0): """Corresponds to IDD Field `elevation` Args: value (float): value for IDD Field `elevation` Unit: m Default value: 0.0 value >= -1000.0 value < 9999.9 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `elevation`'.format(value)) if value < -1000.0: raise ValueError('value need to be greater or equal -1000.0 ' 'for field `elevation`') if value >= 9999.9: raise ValueError('value need to be smaller 9999.9 ' 'for field `elevation`') self._elevation = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def title_of_design_condition(self, value=None): """Corresponds to IDD Field `title_of_design_condition` Args: value (str): value for IDD Field `title_of_design_condition` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError( 'value {} need to be of type str ' 'for field `title_of_design_condition`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `title_of_design_condition`') self._title_of_design_condition = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unkown_field(self, value=None): """Corresponds to IDD Field `unkown_field` Empty field in data. Args: value (str): value for IDD Field `unkown_field` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError('value {} need to be of type str ' 'for field `unkown_field`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `unkown_field`') self._unkown_field = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def design_stat_heating(self, value="Heating"): """Corresponds to IDD Field `design_stat_heating` Args: value (str): value for IDD Field `design_stat_heating` Accepted values are: - Heating Default value: Heating if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError( 'value {} need to be of type str ' 'for field `design_stat_heating`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `design_stat_heating`') vals = set() vals.add("Heating") if value not in vals: raise ValueError('value {} is not an accepted value for ' 'field `design_stat_heating`'.format(value)) self._design_stat_heating = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def coldestmonth(self, value=None): """Corresponds to IDD Field `coldestmonth` Args: value (int): value for IDD Field `coldestmonth` value >= 1 value <= 12 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = int(value) except ValueError: raise ValueError('value {} need to be of type int ' 'for field `coldestmonth`'.format(value)) if value < 1: raise ValueError('value need to be greater or equal 1 ' 'for field `coldestmonth`') if value > 12: raise ValueError('value need to be smaller 12 ' 'for field `coldestmonth`') self._coldestmonth = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ws004c(self, value=None): """Corresponds to IDD Field `ws004c` Args: value (float): value for IDD Field `ws004c` Unit: m/s if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `ws004c`'.format(value)) self._ws004c = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def db_ws004c(self, value=None): """ Corresponds to IDD Field `db_ws004c` Mean coincident dry-bulb temperature to wind speed corresponding to 0.40% cumulative frequency for coldest month Args: value (float): value for IDD Field `db_ws004c` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db_ws004c`'.format(value)) self._db_ws004c = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ws010c(self, value=None): """ Corresponds to IDD Field `ws010c` Wind speed corresponding to 1.0% cumulative frequency of occurrence for coldest month; Args: value (float): value for IDD Field `ws010c` Unit: m/s if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `ws010c`'.format(value)) self._ws010c = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def db_ws010c(self, value=None): """ Corresponds to IDD Field `db_ws010c` Mean coincident dry-bulb temperature to wind speed corresponding to 1.0% cumulative frequency for coldest month Args: value (float): value for IDD Field `db_ws010c` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db_ws010c`'.format(value)) self._db_ws010c = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ws_db996(self, value=None): """ Corresponds to IDD Field `ws_db996` Mean wind speed coincident with 99.6% dry-bulb temperature Args: value (float): value for IDD Field `ws_db996` Unit: m/s if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `ws_db996`'.format(value)) self._ws_db996 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def design_stat_cooling(self, value="Cooling"): """Corresponds to IDD Field `design_stat_cooling` Args: value (str): value for IDD Field `design_stat_cooling` Accepted values are: - Cooling Default value: Cooling if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = str(value) except ValueError: raise ValueError( 'value {} need to be of type str ' 'for field `design_stat_cooling`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `design_stat_cooling`') vals = set() vals.add("Cooling") if value not in vals: raise ValueError('value {} is not an accepted value for ' 'field `design_stat_cooling`'.format(value)) self._design_stat_cooling = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dbr(self, value=None): """Corresponds to IDD Field `dbr` Daily temperature range for hottest month. [defined as mean of the difference between daily maximum and daily minimum dry-bulb temperatures for hottest month] Args: value (float): value for IDD Field `dbr` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `dbr`'.format(value)) self._dbr = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def wb004(self, value=None): """ Corresponds to IDD Field `wb004` Wet-bulb temperature corresponding to 0.4% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `wb004` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `wb004`'.format(value)) self._wb004 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def db_wb004(self, value=None): """ Corresponds to IDD Field `db_wb004` mean coincident dry-bulb temperature to Wet-bulb temperature corresponding to 0.4% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `db_wb004` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db_wb004`'.format(value)) self._db_wb004 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def wb010(self, value=None): """ Corresponds to IDD Field `wb010` Wet-bulb temperature corresponding to 1.0% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `wb010` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `wb010`'.format(value)) self._wb010 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def db_wb010(self, value=None): """ Corresponds to IDD Field `db_wb010` mean coincident dry-bulb temperature to Wet-bulb temperature corresponding to 1.0% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `db_wb010` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db_wb010`'.format(value)) self._db_wb010 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def wb020(self, value=None): """ Corresponds to IDD Field `wb020` Wet-bulb temperature corresponding to 02.0% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `wb020` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `wb020`'.format(value)) self._wb020 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def db_wb020(self, value=None): """ Corresponds to IDD Field `db_wb020` mean coincident dry-bulb temperature to Wet-bulb temperature corresponding to 2.0% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `db_wb020` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db_wb020`'.format(value)) self._db_wb020 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ws_db004(self, value=None): """ Corresponds to IDD Field `ws_db004` Mean wind speed coincident with 0.4% dry-bulb temperature Args: value (float): value for IDD Field `ws_db004` Unit: m/s if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `ws_db004`'.format(value)) self._ws_db004 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dp004(self, value=None): """ Corresponds to IDD Field `dp004` Dew-point temperature corresponding to 0.4% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `dp004` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `dp004`'.format(value)) self._dp004 = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def hr_dp004(self, value=None): """ Corresponds to IDD Field `hr_dp004` humidity ratio corresponding to Dew-point temperature corresponding to 0.4% annual cumulative frequency of occurrence Args: value (float): value for IDD Field `hr_dp004` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """
if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `hr_dp004`'.format(value)) self._hr_dp004 = value