_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q38200
Service._determine_auth_mechanism
train
def _determine_auth_mechanism(username, password, delegation): """ if the username contains at '@' sign we will use kerberos if the username contains a '/ we will use ntlm either NTLM or Kerberos. In fact its basically always Negotiate. """ if re.match('(.*)@(.+)', username) is not None: if delegation is True: raise Exception('Kerberos is not yet supported, specify the username in <domain>\<username> form for NTLM') else: raise Exception('Kerberos is not yet supported, specify the username in <domain>>\<username> form for NTLM') # check for NT format 'domain\username' a blank domain or username is invalid legacy = re.match('(.*)\\\\(.*)', username) if legacy is not None: if not legacy.group(1): raise Exception('Please specify the Windows domain for user in <domain>\<username> format') if not legacy.group(2): raise Exception('Please specify the Username of the user in <domain>\<username> format') if delegation is True: return HttpCredSSPAuth(legacy.group(1), legacy.group(2), password) else: return HttpNtlmAuth(legacy.group(1), legacy.group(2), password) #return HttpCredSSPAuth("SERVER2012", "Administrator", password) # attempt NTLM (local account, not domain) - if username is '' then we try anonymous NTLM auth # as if anyone will configure that - uf! return HttpNtlmAuth('', username, password)
python
{ "resource": "" }
q38201
Service._create_request
train
def _create_request(headers, body): """ Create the SOAP 1.2 Envelope An ordered dictionary is required to ensure the same order is reflected in the XML, otherwise the SOAP Body element would appear before the Header element. """ envelope = OrderedDict() for (namespace, alias) in Service.Namespaces.items(): envelope['@xmlns:' + alias] = namespace envelope['soap:Header'] = headers envelope['soap:Body'] = body return xmltodict.unparse({'soap:Envelope': envelope}, encoding='utf-8')
python
{ "resource": "" }
q38202
Service._parse_response
train
def _parse_response(xml): """ Attempt to parse the SOAP response and return a python object Raise a WSManException if a Fault is found """ try: soap_response = xmltodict.parse(xml, process_namespaces=True, namespaces=Service.Namespaces) except Exception: logging.debug('unable to parse the xml response: %s', xml) raise WSManException("the remote host returned an invalid soap response") # the delete response has an empty body body = soap_response['soap:Envelope']['soap:Body'] if body is not None and 'soap:Fault' in body: raise WSManOperationException(body['soap:Fault']['soap:Reason']['soap:Text']['#text']) return body
python
{ "resource": "" }
q38203
MongoCollection.get_mongo_cursor
train
def get_mongo_cursor(self, bulk=False): """ Returns Mongo cursor using the class variables :param bulk: bulk writer option :type bulk: boolean :return: mongo collection for which cursor will be created :rtype: mongo colection object """ try: if self.host: if self.port: client = MongoClient(self.host, self.port) else: client = MongoClient( self.host, MongoCollection.DEFAULT_PORT) else: client = MongoClient(self.mongo_uri) db = client[self.db_name] cursor = db[self.collection] if bulk: try: return cursor.initialize_unordered_bulk_op() except Exception as e: msg = "Mongo Bulk cursor could not be fetched, Error: {error}".format( error=str(e)) raise Exception(msg) return cursor except Exception as e: msg = "Mongo Connection could not be established for Mongo Uri: {mongo_uri}, Database: {db_name}, Collection {col}, Error: {error}".format( mongo_uri=self.mongo_uri, db_name=self.db_name, col=self.collection, error=str(e)) raise Exception(msg)
python
{ "resource": "" }
q38204
MongoCollection.bulk_cursor_execute
train
def bulk_cursor_execute(self, bulk_cursor): """ Executes the bulk_cursor :param bulk_cursor: Cursor to perform bulk operations :type bulk_cursor: pymongo bulk cursor object :returns: pymongo bulk cursor object (for bulk operations) """ try: result = bulk_cursor.execute() except BulkWriteError as bwe: msg = "bulk_cursor_execute: Exception in executing Bulk cursor to mongo with {error}".format( error=str(bwe)) raise Exception(msg) except Exception as e: msg = "Mongo Bulk cursor could not be fetched, Error: {error}".format( error=str(e)) raise Exception(msg)
python
{ "resource": "" }
q38205
detect_terminal
train
def detect_terminal(_environ=os.environ): """ Detect "terminal" you are using. First, this function checks if you are in tmux, byobu, or screen. If not it uses $COLORTERM [#]_ if defined and fallbacks to $TERM. .. [#] So, if you are in Gnome Terminal you have "gnome-terminal" instead of "xterm-color"". """ if _environ.get('TMUX'): return 'tmux' elif subdict_by_key_prefix(_environ, 'BYOBU'): return 'byobu' elif _environ.get('TERM').startswith('screen'): return _environ['TERM'] elif _environ.get('COLORTERM'): return _environ['COLORTERM'] else: return _environ.get('TERM')
python
{ "resource": "" }
q38206
ServerConnection3.get_waittime
train
def get_waittime(self): """Return the appropriate time to wait, if we sent too many messages :returns: the time to wait in seconds :rtype: :class:`float` :raises: None """ now = time.time() self.sentmessages.appendleft(now) if len(self.sentmessages) == self.sentmessages.maxlen: # check if the oldes message is older than # limited by self.limitinterval oldest = self.sentmessages[-1] waittime = self.limitinterval - (now - oldest) if waittime > 0: return waittime + 1 # add a little buffer return 0
python
{ "resource": "" }
q38207
ServerConnection3._process_line
train
def _process_line(self, line): """Process the given line and handle the events :param line: the raw message :type line: :class:`str` :returns: None :rtype: None :raises: None """ m = self._rfc_1459_command_regexp.match(line) prefix = m.group('prefix') tags = self._process_tags(m.group('tags')) source = self._process_prefix(prefix) command = self._process_command(m.group('command')) arguments = self._process_arguments(m.group('argument')) if not self.real_server_name: self.real_server_name = prefix # Translate numerics into more readable strings. command = irc.events.numeric.get(command, command) if command not in ["privmsg", "notice"]: return super(ServerConnection3, self)._process_line(line) event = Event3("all_raw_messages", self.get_server_name(), None, [line], tags=tags) self._handle_event(event) target, msg = arguments[0], arguments[1] messages = irc.ctcp.dequote(msg) command = self._resolve_command(command, target) for m in messages: self._handle_message(tags, source, command, target, m)
python
{ "resource": "" }
q38208
ServerConnection3._resolve_command
train
def _resolve_command(self, command, target): """Get the correct event for the command Only for 'privmsg' and 'notice' commands. :param command: The command string :type command: :class:`str` :param target: either a user or a channel :type target: :class:`str` :returns: the correct event type :rtype: :class:`str` :raises: None """ if command == "privmsg": if irc.client.is_channel(target): command = "pubmsg" else: if irc.client.is_channel(target): command = "pubnotice" else: command = "privnotice" return command
python
{ "resource": "" }
q38209
ServerConnection3._handle_message
train
def _handle_message(self, tags, source, command, target, msg): """Construct the correct events and handle them :param tags: the tags of the message :type tags: :class:`list` of :class:`message.Tag` :param source: the sender of the message :type source: :class:`str` :param command: the event type :type command: :class:`str` :param target: the target of the message :type target: :class:`str` :param msg: the content :type msg: :class:`str` :returns: None :rtype: None :raises: None """ if isinstance(msg, tuple): if command in ["privmsg", "pubmsg"]: command = "ctcp" else: command = "ctcpreply" msg = list(msg) log.debug("tags: %s, command: %s, source: %s, target: %s, " "arguments: %s", tags, command, source, target, msg) event = Event3(command, source, target, msg, tags=tags) self._handle_event(event) if command == "ctcp" and msg[0] == "ACTION": event = Event3("action", source, target, msg[1:], tags=tags) self._handle_event(event) else: log.debug("tags: %s, command: %s, source: %s, target: %s, " "arguments: %s", tags, command, source, target, [msg]) event = Event3(command, source, target, [msg], tags=tags) self._handle_event(event)
python
{ "resource": "" }
q38210
ServerConnection3._process_tags
train
def _process_tags(self, tags): """Process the tags of the message :param tags: the tags string of a message :type tags: :class:`str` | None :returns: list of tags :rtype: :class:`list` of :class:`message.Tag` :raises: None """ if not tags: return [] return [message.Tag.from_str(x) for x in tags.split(';')]
python
{ "resource": "" }
q38211
ServerConnection3._process_arguments
train
def _process_arguments(self, arguments): """Process the arguments :param arguments: arguments string of a message :type arguments: :class:`str` | None :returns: A list of arguments :rtype: :class:`list` of :class:`str` | None :raises: None """ if not arguments: return None a = arguments.split(" :", 1) arglist = a[0].split() if len(a) == 2: arglist.append(a[1]) return arglist
python
{ "resource": "" }
q38212
CollectionsProcessedData.fetch_and_process_data
train
def fetch_and_process_data(self, collection, pipeline): """ Fetches and Processess data from the input collection by aggregating using the pipeline :param collection: The collection object for which mongo connection has to be made :type collection: MongoCollection :param pipeline: The pipeline using which aggregation will be performed :type pipeline: list of dicts :return grouped_docs_dict: dict of property_id,metric_count """ collection_cursor = collection.get_mongo_cursor() grouped_docs = list(collection_cursor.aggregate(pipeline)) grouped_docs_dict = {} while grouped_docs: doc = grouped_docs.pop() keys_list = [] for group_by_key in self.join_keys: keys_list.append(doc["_id"].get(group_by_key, None)) grouped_docs_dict[tuple(keys_list)] = doc['docs'] return grouped_docs_dict
python
{ "resource": "" }
q38213
CollectionsProcessedData.get_collections_data
train
def get_collections_data(self): """ Driver function to fetch the data from the two collections """ collections = { 'left': self.left_collection, 'right': self.right_collection } for collection_type, collection in collections.iteritems(): pipeline = self.build_pipeline(collection) self.collections_data[collection_type] = self.fetch_and_process_data( collection, pipeline)
python
{ "resource": "" }
q38214
parse_duration
train
def parse_duration(string): """ Parse human readable duration. >>> parse_duration('1m') 60 >>> parse_duration('7 days') == 7 * 24 * 60 * 60 True """ if string.isdigit(): return int(string) try: return float(string) except ValueError: pass string = string.rstrip() for (suf, mult) in DURATION_SUFFIX_MAP.items(): if string.lower().endswith(suf): try: return parse_duration(string[:-len(suf)].strip()) * mult except TypeError: return
python
{ "resource": "" }
q38215
FakerModel.fake_chars_or_choice
train
def fake_chars_or_choice(self, field_name): """ Return fake chars or choice it if the `field_name` has choices. Then, returning random value from it. This specially for `CharField`. Usage: faker.fake_chars_or_choice('field_name') Example for field: TYPE_CHOICES = ( ('project', 'I wanna to talk about project'), ('feedback', 'I want to report a bugs or give feedback'), ('hello', 'I just want to say hello') ) type = models.CharField(max_length=200, choices=TYPE_CHOICES) """ return self.djipsum_fields().randomCharField( self.model_class(), field_name=field_name )
python
{ "resource": "" }
q38216
FakerModel.fake_m2m
train
def fake_m2m(self, obj, field_name): """ Return the random objects from m2m relationship. The ManyToManyField need specific object, so i handle it after created the object. """ instance_m2m = getattr(obj, field_name) objects_m2m = instance_m2m.model.objects.all() if objects_m2m.exists(): ids_m2m = [i.pk for i in objects_m2m] random_decission = random.sample( range(min(ids_m2m), max(ids_m2m)), max(ids_m2m) - 1 ) if len(random_decission) <= 2: random_decission = [ self.djipsum_fields().randomize(ids_m2m) ] related_objects = [ rel_obj for rel_obj in objects_m2m if rel_obj.pk in random_decission ] instance_m2m.add(*related_objects)
python
{ "resource": "" }
q38217
FakerModel.create
train
def create(self, fields): """ Create the object only once. So, you need loop to usage. :param `fields` is dictionary fields. """ try: # Cleaning the fields, and check if has `ForeignKey` type. cleaned_fields = {} for key, value in fields.items(): if type(value) is dict: try: if value['type'] == 'fk': fake_fk = self.fake_fk(value['field_name']) cleaned_fields.update({key: fake_fk}) except: pass else: cleaned_fields.update({key: value}) # Creating the object from dictionary fields. model_class = self.model_class() obj = model_class.objects.create(**cleaned_fields) # The `ManyToManyField` need specific object, # so i handle it after created the object. for key, value in fields.items(): if type(value) is dict: try: if value['type'] == 'm2m': self.fake_m2m(obj, value['field_name']) except: pass try: obj.save_m2m() except: obj.save() return obj except Exception as e: raise e
python
{ "resource": "" }
q38218
trading_dates
train
def trading_dates(start, end, calendar='US'): """ Trading dates for given exchange Args: start: start date end: end date calendar: exchange as string Returns: pd.DatetimeIndex: datetime index Examples: >>> bus_dates = ['2018-12-24', '2018-12-26', '2018-12-27'] >>> trd_dates = trading_dates(start='2018-12-23', end='2018-12-27') >>> assert len(trd_dates) == len(bus_dates) >>> assert pd.Series(trd_dates == pd.DatetimeIndex(bus_dates)).all() """ kw = dict(start=pd.Timestamp(start, tz='UTC').date(), end=pd.Timestamp(end, tz='UTC').date()) us_cal = getattr(sys.modules[__name__], f'{calendar}TradingCalendar')() return pd.bdate_range(**kw).drop(us_cal.holidays(**kw))
python
{ "resource": "" }
q38219
SensuAPI.get_clients
train
def get_clients(self, limit=None, offset=None): """ Returns a list of clients. """ data = {} if limit: data['limit'] = limit if offset: data['offset'] = offset result = self._request('GET', '/clients', data=json.dumps(data)) return result.json()
python
{ "resource": "" }
q38220
SensuAPI.get_client_data
train
def get_client_data(self, client): """ Returns a client. """ data = self._request('GET', '/clients/{}'.format(client)) return data.json()
python
{ "resource": "" }
q38221
SensuAPI.get_client_history
train
def get_client_history(self, client): """ Returns the history for a client. """ data = self._request('GET', '/clients/{}/history'.format(client)) return data.json()
python
{ "resource": "" }
q38222
SensuAPI.get_all_client_events
train
def get_all_client_events(self, client): """ Returns the list of current events for a given client. """ data = self._request('GET', '/events/{}'.format(client)) return data.json()
python
{ "resource": "" }
q38223
SensuAPI.get_event
train
def get_event(self, client, check): """ Returns an event for a given client & check name. """ data = self._request('GET', '/events/{}/{}'.format(client, check)) return data.json()
python
{ "resource": "" }
q38224
SensuAPI.get_check
train
def get_check(self, check): """ Returns a check. """ data = self._request('GET', '/checks/{}'.format(check)) return data.json()
python
{ "resource": "" }
q38225
SensuAPI.post_check_request
train
def post_check_request(self, check, subscribers): """ Issues a check execution request. """ data = { 'check': check, 'subscribers': [subscribers] } self._request('POST', '/request', data=json.dumps(data)) return True
python
{ "resource": "" }
q38226
SensuAPI.post_silence_request
train
def post_silence_request(self, kwargs): """ Create a silence entry. """ self._request('POST', '/silenced', data=json.dumps(kwargs)) return True
python
{ "resource": "" }
q38227
SensuAPI.clear_silence
train
def clear_silence(self, kwargs): """ Clear a silence entry. """ self._request('POST', '/silenced/clear', data=json.dumps(kwargs)) return True
python
{ "resource": "" }
q38228
SensuAPI.get_aggregate_check
train
def get_aggregate_check(self, check, age=None): """ Returns the list of aggregates for a given check """ data = {} if age: data['max_age'] = age result = self._request('GET', '/aggregates/{}'.format(check), data=json.dumps(data)) return result.json()
python
{ "resource": "" }
q38229
SensuAPI.get_health
train
def get_health(self, consumers=2, messages=100): """ Returns health information on transport & Redis connections. """ data = {'consumers': consumers, 'messages': messages} try: self._request('GET', '/health', data=json.dumps(data)) return True except SensuAPIException: return False
python
{ "resource": "" }
q38230
SensuAPI.get_results
train
def get_results(self, client): """ Returns a result. """ data = self._request('GET', '/results/{}'.format(client)) return data.json()
python
{ "resource": "" }
q38231
SensuAPI.get_result
train
def get_result(self, client, check): """ Returns an event for a given client & result name. """ data = self._request('GET', '/results/{}/{}'.format(client, check)) return data.json()
python
{ "resource": "" }
q38232
SensuAPI.delete_result
train
def delete_result(self, client, check): """ Deletes an check result data for a given check on a given client. """ self._request('DELETE', '/results/{}/{}'.format(client, check)) return True
python
{ "resource": "" }
q38233
SensuAPI.post_result_data
train
def post_result_data(self, client, check, output, status): """ Posts check result data. """ data = { 'source': client, 'name': check, 'output': output, 'status': status, } self._request('POST', '/results', data=json.dumps(data)) return True
python
{ "resource": "" }
q38234
SensuAPI.get_subscriptions_channel
train
def get_subscriptions_channel(self, search_channel): """ Return all the nodes that are subscribed to the specified channel """ data = self.get_clients() clients = [] for client in data: if 'subscriptions' in client: if isinstance(client['subscriptions'], list): if search_channel in client['subscriptions']: clients.append(client['name']) else: if search_channel == client['subscriptions']: clients.append(client['name']) return clients
python
{ "resource": "" }
q38235
clrmagic_build_ext.build_extension
train
def build_extension(self, ext): """ build clrmagic.dll using csc or mcs """ if sys.platform == "win32": _clr_compiler = "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\csc.exe" else: _clr_compiler = "mcs" cmd = [ _clr_compiler, "/target:library", "clrmagic.cs" ] check_call(" ".join(cmd), shell=True)
python
{ "resource": "" }
q38236
EventSourceMixin.wait_for_event
train
def wait_for_event(self, event, timeout=10): """ Block waiting for the given event. Returns the event params. :param event: The event to handle. :return: The event params. :param timeout: The maximum time to wait before raising :exc:`.TimeoutError`. """ return self.__handler.wait_for_event(event, timeout=timeout)
python
{ "resource": "" }
q38237
search_greater
train
def search_greater(values, target): """ Return the first index for which target is greater or equal to the first item of the tuple found in values """ first = 0 last = len(values) while first < last: middle = (first + last) // 2 if values[middle][0] < target: first = middle + 1 else: last = middle return first
python
{ "resource": "" }
q38238
ReservoirBase.add
train
def add(self, value): """ Add a value to the reservoir The value will be casted to a floating-point, so a TypeError or a ValueError may be raised. """ if not isinstance(value, float): value = float(value) return self._do_add(value)
python
{ "resource": "" }
q38239
ReservoirBase.same_kind
train
def same_kind(self, other): """ Return True if "other" is an object of the same type and it was instantiated with the same parameters """ return type(self) is type(other) and self._same_parameters(other)
python
{ "resource": "" }
q38240
ExponentialDecayingReservoir._lookup
train
def _lookup(self, timestamp): """ Return the index of the value associated with "timestamp" if any, else None. Since the timestamps are floating-point values, they are considered equal if their absolute difference is smaller than self.EPSILON """ idx = search_greater(self._values, timestamp) if (idx < len(self._values) and math.fabs(self._values[idx][0] - timestamp) < self.EPSILON): return idx return None
python
{ "resource": "" }
q38241
ExponentialDecayingReservoir._put
train
def _put(self, timestamp, value): """Replace the value associated with "timestamp" or add the new value""" idx = self._lookup(timestamp) if idx is not None: self._values[idx] = (timestamp, value) else: self._values.append((timestamp, value))
python
{ "resource": "" }
q38242
AppGlances.reload_glance
train
def reload_glance(self, target_app, slices=None): """ Reloads an app's glance. Blocks as long as necessary. :param target_app: The UUID of the app for which to reload its glance. :type target_app: ~uuid.UUID :param slices: The slices with which to reload the app's glance. :type slices: list[.AppGlanceSlice] """ glance = AppGlance( version=1, creation_time=time.time(), slices=(slices or []) ) SyncWrapper(self._blobdb.insert, BlobDatabaseID.AppGlance, target_app, glance.serialise()).wait()
python
{ "resource": "" }
q38243
LoginServer.set_token
train
def set_token(self, redirecturl): """Set the token on the session :param redirecturl: the original full redirect url :type redirecturl: :class:`str` :returns: None :rtype: None :raises: None """ log.debug('Setting the token on %s.' % self.session) self.session.token_from_fragment(redirecturl)
python
{ "resource": "" }
q38244
install_template
train
def install_template(username, repo): """Installs a Blended template from GitHub""" print("Installing template from " + username + "/" + repo) dpath = os.path.join(cwd, "templates") getunzipped(username, repo, dpath)
python
{ "resource": "" }
q38245
import_wp
train
def import_wp(filepath): """Imports A WordPress export and converts it to a Blended site""" print("\nBlended: Static Website Generator -\n") checkConfig() print("Importing from WordPress...") wp = parseXML(filepath) wname = wp.rss.channel.title.cdata wdesc = wp.rss.channel.description.cdata wlan = wp.rss.channel.language.cdata wurl = wp.rss.channel.link.cdata aname = wp.rss.channel.wp_author.wp_author_display_name.cdata.strip() createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, wdesc=wdesc, wlan=wlan, wurl=wurl, aname=aname) for item in wp.rss.channel.item: with open(os.path.join(cwd, "content", item.title.cdata.replace(" ", "_") + ".html"), 'w') as wfile: wfile.write(item.content_encoded.cdata.strip()) print("\nYour website has been imported from WordPress.")
python
{ "resource": "" }
q38246
import_blogger
train
def import_blogger(filepath): """Imports A Blogger export and converts it to a Blended site""" print("\nBlended: Static Website Generator -\n") checkConfig() print("Importing from Blogger...") blogger = parseXML(filepath) wname = blogger.feed.title.cdata aname = blogger.feed.author.name.cdata.strip() createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, aname=aname) for entry in blogger.feed.entry: if "post" in entry.id.cdata: with open(os.path.join(cwd, "content", entry.title.cdata.replace(" ", "_") + ".html"), 'w') as wfile: wfile.write(entry.content.cdata.strip()) print("\nYour website has been imported from Blogger.")
python
{ "resource": "" }
q38247
install_plugin
train
def install_plugin(username, repo): """Installs a Blended plugin from GitHub""" print("Installing plugin from " + username + "/" + repo) pip.main(['install', '-U', "git+git://github.com/" + username + "/" + repo + ".git"])
python
{ "resource": "" }
q38248
init
train
def init(): """Initiates a new website""" print("Blended: Static Website Generator -\n") checkConfig() if (sys.version_info > (3, 0)): wname = input("Website Name: ") wdesc = input("Website Description: ") wlan = input("Website Language: ") wlic = input("Website License: ") aname = input("Author(s) Name(s): ") else: wname = raw_input("Website Name: ") wdesc = raw_input("Website Description: ") wlan = raw_input("Website Language: ") wlic = raw_input("Website License: ") aname = raw_input("Author(s) Name(s): ") createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, wdesc=wdesc, wlic=wlic, wlan=wlan, aname=aname) print("\nThe required files for your website have been generated.")
python
{ "resource": "" }
q38249
placeFiles
train
def placeFiles(ftp, path): """Upload the built files to FTP""" for name in os.listdir(path): if name != "config.py" and name != "config.pyc" and name != "templates" and name != "content": localpath = os.path.join(path, name) if os.path.isfile(localpath): print("STOR", name, localpath) ftp.storbinary('STOR ' + name, open(localpath, 'rb')) elif os.path.isdir(localpath): print("MKD", name) try: ftp.mkd(name) # ignore "directory already exists" except error_perm as e: if not e.args[0].startswith('550'): raise print("CWD", name) ftp.cwd(name) placeFiles(ftp, localpath) print("CWD", "..") ftp.cwd("..")
python
{ "resource": "" }
q38250
send_ftp
train
def send_ftp(outdir): """Upload the built website to FTP""" print("Uploading the files in the " + outdir + "/ directory!\n") # Make sure there is actually a configuration file config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import ftp_server, ftp_username, ftp_password, ftp_port, ftp_upload_path except: sys.exit( "The FTP settings could not be found. Maybe your config file is too old. Re-run 'blended init' to fix it.") server = ftp_server username = ftp_username password = ftp_password port = ftp_port ftp = FTP() ftp.connect(server, port) ftp.login(username, password) filenameCV = os.path.join(cwd, outdir) try: ftp.cwd(ftp_upload_path) placeFiles(ftp, filenameCV) except: ftp.quit() sys.exit("Files not able to be uploaded! Are you sure the directory exists?") ftp.quit() print("\nFTP Done!")
python
{ "resource": "" }
q38251
clean_built
train
def clean_built(outdir): """Removes all built files""" print("Removing the built files!") # Remove the build folder build_dir = os.path.join(cwd, outdir) if os.path.exists(build_dir): shutil.rmtree(build_dir)
python
{ "resource": "" }
q38252
zip_built
train
def zip_built(outdir): """Packages the build folder into a zip""" print("Zipping the built files!") config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import website_name except: sys.exit( "Some of the configuration values could not be found! Maybe your config.py is too old. Run 'blended init' to fix.") # Remove the build folder build_dir = os.path.join(cwd, outdir) zip_dir = os.path.join(cwd, website_name.replace(" ", "_") + "-build-" + str(datetime.now().date())) if os.path.exists(build_dir): shutil.make_archive(zip_dir, 'zip', build_dir) else: print("The " + outdir + "/ folder could not be found! Have you run 'blended build' yet?")
python
{ "resource": "" }
q38253
purge
train
def purge(): """Removes all files generated by Blended""" print("Purging the Blended files!") # Remove the templates folder templ_dir = os.path.join(cwd, "templates") if os.path.exists(templ_dir): shutil.rmtree(templ_dir) # Remove the content folder cont_dir = os.path.join(cwd, "content") if os.path.exists(cont_dir): shutil.rmtree(cont_dir) # Remove the build folder build_dir = os.path.join(cwd, "build") if os.path.exists(build_dir): shutil.rmtree(build_dir) # Remove config.py config_file_dir = os.path.join(cwd, "config.py") if os.path.exists(config_file_dir): os.remove(config_file_dir) # Remove config.pyc config2_file_dir = os.path.join(cwd, "config.pyc") if os.path.exists(config2_file_dir): os.remove(config2_file_dir) # Remove config.py config3_file_dir = os.path.join(cwd, "config.py.oldbak") if os.path.exists(config3_file_dir): os.remove(config3_file_dir)
python
{ "resource": "" }
q38254
build
train
def build(outdir): """Blends the generated files and outputs a HTML website""" print("Building your Blended files into a website!") reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("The files are built! You can find them in the " + outdir + "/ directory. Run the view command to see what you have created in a web browser.")
python
{ "resource": "" }
q38255
interactive
train
def interactive(outdir): """Blends the generated files and outputs a HTML website on file change""" print("Building your Blended files into a website!") global outdir_type outdir_type = outdir reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("Watching the content and templates directories for changes, press CTRL+C to stop...\n") w = Watcher() w.run()
python
{ "resource": "" }
q38256
view
train
def view(outdir): """Opens the built index.html file in a web browser""" index_path = os.path.realpath(os.path.join(cwd, outdir, "index.html")) if os.path.exists(index_path): webbrowser.open('file://' + index_path) else: print("The index.html file could not be found in the " + outdir + "/ folder! Have you deleted it or have you built with home_page_list set to 'no' in config.py?")
python
{ "resource": "" }
q38257
Watcher.run
train
def run(self): """Run the builder on changes""" event_handler = Handler() threads = [] paths = [os.path.join(cwd, "content"), os.path.join(cwd, "templates")] for i in paths: targetPath = str(i) self.observer.schedule(event_handler, targetPath, recursive=True) threads.append(self.observer) self.observer.start() try: while True: time.sleep(5) except: self.observer.stop() print("\nObserver stopped.") self.observer.join()
python
{ "resource": "" }
q38258
_sanitize_numbers
train
def _sanitize_numbers(uncleaned_numbers): """ Convert strings to integers if possible """ cleaned_numbers = [] for x in uncleaned_numbers: try: cleaned_numbers.append(int(x)) except ValueError: cleaned_numbers.append(x) return cleaned_numbers
python
{ "resource": "" }
q38259
_handle_negatives
train
def _handle_negatives(numbers): """ Add the minimum negative number to all the numbers in the such that all the elements become >= 0 """ min_number = min(filter(lambda x : type(x)==int,numbers)) if min_number < 0: return [x+abs(min_number) if type(x)==int else x for x in numbers] else: return numbers
python
{ "resource": "" }
q38260
_draw_tickgram
train
def _draw_tickgram(numbers): """ Takes a list of integers and generate the equivalent list of ticks corresponding to each of the number """ max_number = max(filter(lambda x : type(x)==int,numbers)) # If the maxium number is 0, then all the numbers should be 0 # coz we have called handle_negatives prior to this function if max_number == 0 : return upticks[0]*len(numbers) else: normalized_numbers = [ float(x)/max_number if type(x)==int else x for x in numbers ] upticks_indexes = [ int(math.ceil(x*len(upticks))) if type(x)==float else x for x in normalized_numbers ] return ''.join([ ' ' if type(x)==str else upticks[x-1] if x != 0 else upticks[0] for x in upticks_indexes ])
python
{ "resource": "" }
q38261
get_raw_gids
train
def get_raw_gids(model_params): ''' Reads text file containing gids of neuron populations as created within the NEST simulation. These gids are not continuous as in the simulation devices get created in between. ''' gidfile = open(os.path.join(model_params.raw_nest_output_path, model_params.GID_filename), 'r') gids = [] for l in gidfile : a = l.split() gids.append([int(a[0]),int(a[1])]) return gids
python
{ "resource": "" }
q38262
replace_folder
train
def replace_folder(path): """If the specified folder exists, it is deleted and recreated""" if os.path.exists(path): shutil.rmtree(path) os.makedirs(path) else: os.makedirs(path)
python
{ "resource": "" }
q38263
get_html_filename
train
def get_html_filename(filename): """Converts the filename to a .html extension""" if ".html" in filename: newFilename = filename elif ".md" in filename: newFilename = filename.replace(".md", ".html") elif ".tile" in filename: newFilename = filename.replace(".tile", ".html") elif ".jade" in filename: newFilename = filename.replace(".jade", ".html") elif ".txt" in filename: newFilename = filename.replace(".txt", ".html") elif ".rst" in filename: newFilename = filename.replace(".rst", ".html") elif ".docx" in filename: newFilename = filename.replace(".docx", ".html") else: print(filename + " is not a valid file type!") return newFilename
python
{ "resource": "" }
q38264
get_html_clear_filename
train
def get_html_clear_filename(filename): """Clears the file extension from the filename and makes it nice looking""" newFilename = filename.replace(".html", "") newFilename = newFilename.replace(".md", "") newFilename = newFilename.replace(".txt", "") newFilename = newFilename.replace(".tile", "") newFilename = newFilename.replace(".jade", "") newFilename = newFilename.replace(".rst", "") newFilename = newFilename.replace(".docx", "") newFilename = newFilename.replace("index", "home") newFilename = newFilename.replace("-", " ") newFilename = newFilename.replace("_", " ") newFilename = newFilename.title() return newFilename
python
{ "resource": "" }
q38265
getunzipped
train
def getunzipped(username, repo, thedir): """Downloads and unzips a zip file""" theurl = "https://github.com/" + username + "/" + repo + "/archive/master.zip" name = os.path.join(thedir, 'temp.zip') try: name = urllib.urlretrieve(theurl, name) name = os.path.join(thedir, 'temp.zip') except IOError as e: print("Can't retrieve %r to %r: %s" % (theurl, thedir, e)) return try: z = zipfile.ZipFile(name) except zipfile.error as e: print("Bad zipfile (from %r): %s" % (theurl, e)) return z.extractall(thedir) z.close() os.remove(name) copy_tree(os.path.join(thedir, repo + "-master"), thedir) shutil.rmtree(os.path.join(thedir, repo + "-master"))
python
{ "resource": "" }
q38266
checkConfig
train
def checkConfig(): """If the config.py file exists, back it up""" config_file_dir = os.path.join(cwd, "config.py") if os.path.exists(config_file_dir): print("Making a backup of your config file!") config_file_dir2 = os.path.join(cwd, "config.py.oldbak") copyfile(config_file_dir, config_file_dir2)
python
{ "resource": "" }
q38267
createConfig
train
def createConfig(app_version=5.0, wname="", wdesc="", wdescl="", wlic="", wlan="", wurl="", aname="", abio=""): """Generates a config file from the information""" config_file_dir = os.path.join(cwd, "config.py") config_file = open(config_file_dir, "w") config_file.write('blended_version = ' + app_version + '\n') config_file.write('\n') config_file.write( '# Configuration is automatically generated by Blended (http://jmroper.com/blended), feel free to edit any values below') config_file.write('\n') config_file.write('website_name = "' + wname + '"\n') config_file.write('website_description = "' + wdesc + '"\n') config_file.write( 'website_description_long = "' + wdescl + '"\n') config_file.write('website_license = "' + wlic + '"\n') config_file.write('website_language = "' + wlan + '"\n') config_file.write('website_url = "' + wurl + '"\n') config_file.write('\n') config_file.write('author_name = "' + aname + '"\n') config_file.write('author_bio = "' + abio + '"\n') config_file.write('\n') config_file.write('home_page_list = True\n') config_file.write('\n') config_file.write('plugins = [] # Place all needed plugins in here\n') config_file.write( 'custom_variables = {} # Place all custom variables in here\n') config_file.write('\n') config_file.write('minify_css = False\n') config_file.write('minify_js = False\n') config_file.write('\n') config_file.write('# The following values are used for FTP uploads') config_file.write('\n') config_file.write('ftp_server = "localhost"\n') config_file.write('ftp_username = "user"\n') config_file.write('ftp_password = "pass"\n') config_file.write('ftp_port = 21\n') config_file.write('ftp_upload_path = "public_html/myWebsite"\n') config_file.close()
python
{ "resource": "" }
q38268
createBlendedFolders
train
def createBlendedFolders(): """Creates the standard folders for a Blended website""" # Create the templates folder create_folder(os.path.join(cwd, "templates")) # Create the templates/assets folder create_folder(os.path.join(cwd, "templates", "assets")) # Create the templates/assets/css folder create_folder(os.path.join(cwd, "templates", "assets", "css")) # Create the templates/assets/js folder create_folder(os.path.join(cwd, "templates", "assets", "js")) # Create the templates/assets/img folder create_folder(os.path.join(cwd, "templates", "assets", "img")) # Create the content folder create_folder(os.path.join(cwd, "content"))
python
{ "resource": "" }
q38269
record_run
train
def record_run(record_type, print_session_id, **kwds): """ Record shell history. """ if print_session_id and record_type != 'init': raise RuntimeError( '--print-session-id should be used with --record-type=init') cfstore = ConfigStore() # SOMEDAY: Pass a list of environment variables to shell by "rash # init" and don't read configuration in "rash record" command. It # is faster. config = cfstore.get_config() envkeys = config.record.environ[record_type] json_path = os.path.join(cfstore.record_path, record_type, time.strftime('%Y-%m-%d-%H%M%S.json')) mkdirp(os.path.dirname(json_path)) # Command line options directly map to record keys data = dict((k, v) for (k, v) in kwds.items() if v is not None) data.update( environ=get_environ(envkeys), ) # Automatically set some missing variables: data.setdefault('cwd', getcwd()) if record_type in ['command', 'exit']: data.setdefault('stop', int(time.time())) elif record_type in ['init']: data.setdefault('start', int(time.time())) if print_session_id: data['session_id'] = generate_session_id(data) print(data['session_id']) with open(json_path, 'w') as fp: json.dump(data, fp)
python
{ "resource": "" }
q38270
format_html
train
def format_html(format_string, *args, **kwargs): """ Similar to str.format, but passes all arguments through conditional_escape, and calls 'mark_safe' on the result. This function should be used instead of str.format or % interpolation to build up small HTML fragments. """ args_safe = map(conditional_escape, args) kwargs_safe = dict([(k, conditional_escape(v)) for (k, v) in six.iteritems(kwargs)]) return mark_safe(format_string.format(*args_safe, **kwargs_safe))
python
{ "resource": "" }
q38271
sql_program_name_func
train
def sql_program_name_func(command): """ Extract program name from `command`. >>> sql_program_name_func('ls') 'ls' >>> sql_program_name_func('git status') 'git' >>> sql_program_name_func('EMACS=emacs make') 'make' :type command: str """ args = command.split(' ') for prog in args: if '=' not in prog: return prog return args[0]
python
{ "resource": "" }
q38272
sql_pathdist_func
train
def sql_pathdist_func(path1, path2, sep=os.path.sep): """ Return a distance between `path1` and `path2`. >>> sql_pathdist_func('a/b/', 'a/b/', sep='/') 0 >>> sql_pathdist_func('a/', 'a/b/', sep='/') 1 >>> sql_pathdist_func('a', 'a/', sep='/') 0 """ seq1 = path1.rstrip(sep).split(sep) seq2 = path2.rstrip(sep).split(sep) return sum(1 for (p1, p2) in zip_longest(seq1, seq2) if p1 != p2)
python
{ "resource": "" }
q38273
DataBase.connection
train
def connection(self, commit=False): """ Context manager to keep around DB connection. :rtype: sqlite3.Connection SOMEDAY: Get rid of this function. Keeping connection around as an argument to the method using this context manager is probably better as it is more explicit. Also, holding "global state" as instance attribute is bad for supporting threaded search, which is required for more fluent percol integration. """ if commit: self._need_commit = True if self._db: yield self._db else: try: with self._get_db() as db: self._db = db db.create_function("REGEXP", 2, sql_regexp_func) db.create_function("PROGRAM_NAME", 1, sql_program_name_func) db.create_function("PATHDIST", 2, sql_pathdist_func) yield self._db if self._need_commit: db.commit() finally: self._db = None self._need_commit = False
python
{ "resource": "" }
q38274
DataBase.update_version_records
train
def update_version_records(self): """ Update rash_info table if necessary. """ from .__init__ import __version__ as version with self.connection(commit=True) as connection: for vrec in self.get_version_records(): if (vrec.rash_version == version and vrec.schema_version == schema_version): return # no need to insert the new one! connection.execute( 'INSERT INTO rash_info (rash_version, schema_version) ' 'VALUES (?, ?)', [version, schema_version])
python
{ "resource": "" }
q38275
DataBase.select_by_command_record
train
def select_by_command_record(self, crec): """ Yield records that matches to `crec`. All attributes of `crec` except for `environ` are concerned. """ keys = ['command_history_id', 'command', 'session_history_id', 'cwd', 'terminal', 'start', 'stop', 'exit_code'] sql = """ SELECT command_history.id, CL.command, session_id, DL.directory, TL.terminal, start_time, stop_time, exit_code FROM command_history LEFT JOIN command_list AS CL ON command_id = CL.id LEFT JOIN directory_list AS DL ON directory_id = DL.id LEFT JOIN terminal_list AS TL ON terminal_id = TL.id WHERE (CL.command = ? OR (CL.command IS NULL AND ? IS NULL)) AND (DL.directory = ? OR (DL.directory IS NULL AND ? IS NULL)) AND (TL.terminal = ? OR (TL.terminal IS NULL AND ? IS NULL)) AND (start_time = ? OR (start_time IS NULL AND ? IS NULL)) AND (stop_time = ? OR (stop_time IS NULL AND ? IS NULL)) AND (exit_code = ? OR (exit_code IS NULL AND ? IS NULL)) """ desired_row = [ crec.command, normalize_directory(crec.cwd), crec.terminal, convert_ts(crec.start), convert_ts(crec.stop), crec.exit_code] params = list(itertools.chain(*zip(desired_row, desired_row))) return self._select_rows(CommandRecord, keys, sql, params)
python
{ "resource": "" }
q38276
DummyBackend.start_user_session
train
def start_user_session(self, username, domain, resource, **kwargs): """Method to add a user session for debugging. Accepted parameters are the same as to the constructor of :py:class:`~xmpp_backends.base.UserSession`. """ kwargs.setdefault('uptime', pytz.utc.localize(datetime.utcnow())) kwargs.setdefault('priority', 0) kwargs.setdefault('status', 'online') kwargs.setdefault('status_text', '') kwargs.setdefault('connection_type', CONNECTION_XMPP) kwargs.setdefault('encrypted', True) kwargs.setdefault('compressed', False) kwargs.setdefault('ip_address', '127.0.0.1') if six.PY2 and isinstance(kwargs['ip_address'], str): # ipaddress constructor does not eat str in py2 :-/ kwargs['ip_address'] = kwargs['ip_address'].decode('utf-8') if isinstance(kwargs['ip_address'], six.string_types): kwargs['ip_address'] = ipaddress.ip_address(kwargs['ip_address']) user = '%s@%s' % (username, domain) session = UserSession(self, username, domain, resource, **kwargs) data = self.module.get(user) if data is None: raise UserNotFound(username, domain, resource) data.setdefault('sessions', set()) if isinstance(data['sessions'], list): # Cast old data to set data['sessions'] = set(data['sessions']) data['sessions'].add(session) self.module.set(user, data) all_sessions = self.module.get('all_sessions', set()) all_sessions.add(session) self.module.set('all_sessions', all_sessions)
python
{ "resource": "" }
q38277
label_for_field
train
def label_for_field(name, model, return_attr=False): """ Returns a sensible label for a field name. The name can be a callable, property (but not created with @property decorator) or the name of an object's attribute, as well as a genuine fields. If return_attr is True, the resolved attribute (which could be a callable) is also returned. This will be None if (and only if) the name refers to a field. """ attr = None try: field = model._meta.get_field_by_name(name)[0] if isinstance(field, RelatedObject): label = field.opts.verbose_name else: label = field.verbose_name except models.FieldDoesNotExist: if name == "__unicode__": label = force_text(model._meta.verbose_name) attr = six.text_type elif name == "__str__": label = force_str(model._meta.verbose_name) attr = bytes else: if callable(name): attr = name elif hasattr(model, name): attr = getattr(model, name) else: message = "Unable to lookup '%s' on %s" % (name, model._meta.object_name) raise AttributeError(message) if hasattr(attr, "short_description"): label = attr.short_description elif (isinstance(attr, property) and hasattr(attr, "fget") and hasattr(attr.fget, "short_description")): label = attr.fget.short_description elif callable(attr): if attr.__name__ == "<lambda>": label = "--" else: label = pretty_name(attr.__name__) else: label = pretty_name(name) if return_attr: return (label, attr) else: return label
python
{ "resource": "" }
q38278
EWMA.update
train
def update(self, value): """ Update the current rate with the given value. The value must be an integer. """ value = int(value) with self.lock: self.value += value
python
{ "resource": "" }
q38279
EWMA.tick
train
def tick(self): """Decay the current rate according to the elapsed time""" instant_rate = float(self.value) / float(self.tick_interval) with self.lock: if self.initialized: self.rate += (self.alpha * (instant_rate - self.rate)) else: self.initialized = True self.rate = instant_rate self.value = 0
python
{ "resource": "" }
q38280
Meter.notify
train
def notify(self, value): """Add a new observation to the metric""" with self.lock: #TODO: this could slow down slow-rate incoming updates # since the number of ticks depends on the actual time # passed since the latest notification. Consider using # a real timer to tick the EWMA. self.tick() for avg in (self.m1, self.m5, self.m15, self.day): avg.update(value) self.count += value
python
{ "resource": "" }
q38281
Meter.tick_all
train
def tick_all(self, times): """ Tick all the EWMAs for the given number of times """ for i in range(times): for avg in (self.m1, self.m5, self.m15, self.day): avg.tick()
python
{ "resource": "" }
q38282
open
train
def open(lower_value, upper_value): """Helper function to construct an interval object with open lower and upper. For example: >>> open(100.2, 800.9) (100.2, 800.9) """ return Interval(Interval.OPEN, lower_value, upper_value, Interval.OPEN)
python
{ "resource": "" }
q38283
closed
train
def closed(lower_value, upper_value): """Helper function to construct an interval object with closed lower and upper. For example: >>> closed(100.2, 800.9) [100.2, 800.9] """ return Interval(Interval.CLOSED, lower_value, upper_value, Interval.CLOSED)
python
{ "resource": "" }
q38284
openclosed
train
def openclosed(lower_value, upper_value): """Helper function to construct an interval object with a open lower and closed upper. For example: >>> openclosed(100.2, 800.9) (100.2, 800.9] """ return Interval(Interval.OPEN, lower_value, upper_value, Interval.CLOSED)
python
{ "resource": "" }
q38285
closedopen
train
def closedopen(lower_value, upper_value): """Helper function to construct an interval object with a closed lower and open upper. For example: >>> closedopen(100.2, 800.9) [100.2, 800.9) """ return Interval(Interval.CLOSED, lower_value, upper_value, Interval.OPEN)
python
{ "resource": "" }
q38286
Interval.overlaps
train
def overlaps(self, other): """If self and other have any overlapping values returns True, otherwise returns False""" if self > other: smaller, larger = other, self else: smaller, larger = self, other if larger.empty(): return False if smaller._upper_value == larger._lower_value: return smaller._upper == smaller.CLOSED and larger._lower == smaller.CLOSED return larger._lower_value < smaller._upper_value
python
{ "resource": "" }
q38287
remove
train
def remove(id_): """ Remove the callback and its schedule """ with LOCK: thread = REGISTRY.pop(id_, None) if thread is not None: thread.cancel() return thread
python
{ "resource": "" }
q38288
get_metrics
train
def get_metrics(tag): """ Return the values for the metrics with the given tag or all the available metrics if None """ if tag is None: return metrics.metrics_by_name_list(metrics.metrics()) else: return metrics.metrics_by_tag(tag)
python
{ "resource": "" }
q38289
fixed_interval_scheduler
train
def fixed_interval_scheduler(interval): """ A scheduler that ticks at fixed intervals of "interval" seconds """ start = time.time() next_tick = start while True: next_tick += interval yield next_tick
python
{ "resource": "" }
q38290
Game.wrap_search
train
def wrap_search(cls, response): """Wrap the response from a game search into instances and return them :param response: The response from searching a game :type response: :class:`requests.Response` :returns: the new game instances :rtype: :class:`list` of :class:`Game` :raises: None """ games = [] json = response.json() gamejsons = json['games'] for j in gamejsons: g = cls.wrap_json(j) games.append(g) return games
python
{ "resource": "" }
q38291
Game.wrap_topgames
train
def wrap_topgames(cls, response): """Wrap the response from quering the top games into instances and return them :param response: The response for quering the top games :type response: :class:`requests.Response` :returns: the new game instances :rtype: :class:`list` of :class:`Game` :raises: None """ games = [] json = response.json() topjsons = json['top'] for t in topjsons: g = cls.wrap_json(json=t['game'], viewers=t['viewers'], channels=t['channels']) games.append(g) return games
python
{ "resource": "" }
q38292
Game.wrap_json
train
def wrap_json(cls, json, viewers=None, channels=None): """Create a Game instance for the given json :param json: the dict with the information of the game :type json: :class:`dict` :param viewers: The viewer count :type viewers: :class:`int` :param channels: The viewer count :type channels: :class:`int` :returns: the new game instance :rtype: :class:`Game` :raises: None """ g = Game(name=json.get('name'), box=json.get('box'), logo=json.get('logo'), twitchid=json.get('_id'), viewers=viewers, channels=channels) return g
python
{ "resource": "" }
q38293
Channel.wrap_search
train
def wrap_search(cls, response): """Wrap the response from a channel search into instances and return them :param response: The response from searching a channel :type response: :class:`requests.Response` :returns: the new channel instances :rtype: :class:`list` of :class:`channel` :raises: None """ channels = [] json = response.json() channeljsons = json['channels'] for j in channeljsons: c = cls.wrap_json(j) channels.append(c) return channels
python
{ "resource": "" }
q38294
Channel.wrap_get_channel
train
def wrap_get_channel(cls, response): """Wrap the response from getting a channel into an instance and return it :param response: The response from getting a channel :type response: :class:`requests.Response` :returns: the new channel instance :rtype: :class:`list` of :class:`channel` :raises: None """ json = response.json() c = cls.wrap_json(json) return c
python
{ "resource": "" }
q38295
Channel.wrap_json
train
def wrap_json(cls, json): """Create a Channel instance for the given json :param json: the dict with the information of the channel :type json: :class:`dict` :returns: the new channel instance :rtype: :class:`Channel` :raises: None """ c = Channel(name=json.get('name'), status=json.get('status'), displayname=json.get('display_name'), game=json.get('game'), twitchid=json.get('_id'), views=json.get('views'), followers=json.get('followers'), url=json.get('url'), language=json.get('language'), broadcaster_language=json.get('broadcaster_language'), mature=json.get('mature'), logo=json.get('logo'), banner=json.get('banner'), video_banner=json.get('video_banner'), delay=json.get('delay')) return c
python
{ "resource": "" }
q38296
Stream.wrap_search
train
def wrap_search(cls, response): """Wrap the response from a stream search into instances and return them :param response: The response from searching a stream :type response: :class:`requests.Response` :returns: the new stream instances :rtype: :class:`list` of :class:`stream` :raises: None """ streams = [] json = response.json() streamjsons = json['streams'] for j in streamjsons: s = cls.wrap_json(j) streams.append(s) return streams
python
{ "resource": "" }
q38297
Stream.wrap_get_stream
train
def wrap_get_stream(cls, response): """Wrap the response from getting a stream into an instance and return it :param response: The response from getting a stream :type response: :class:`requests.Response` :returns: the new stream instance :rtype: :class:`list` of :class:`stream` :raises: None """ json = response.json() s = cls.wrap_json(json['stream']) return s
python
{ "resource": "" }
q38298
Stream.wrap_json
train
def wrap_json(cls, json): """Create a Stream instance for the given json :param json: the dict with the information of the stream :type json: :class:`dict` | None :returns: the new stream instance :rtype: :class:`Stream` | None :raises: None """ if json is None: return None channel = Channel.wrap_json(json.get('channel')) s = Stream(game=json.get('game'), channel=channel, twitchid=json.get('_id'), viewers=json.get('viewers'), preview=json.get('preview')) return s
python
{ "resource": "" }
q38299
User.wrap_get_user
train
def wrap_get_user(cls, response): """Wrap the response from getting a user into an instance and return it :param response: The response from getting a user :type response: :class:`requests.Response` :returns: the new user instance :rtype: :class:`list` of :class:`User` :raises: None """ json = response.json() u = cls.wrap_json(json) return u
python
{ "resource": "" }