_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q29800
get_market_gainers
train
def get_market_gainers(*args, **kwargs): """ MOVED to iexfinance.stocks.get_market_gainers """ import warnings warnings.warn(WNG_MSG, ("get_market_gainers", "stocks.get_market_gainers")) return stocks.get_market_gainers(*args, **kwargs)
python
{ "resource": "" }
q29801
get_market_losers
train
def get_market_losers(*args, **kwargs): """ MOVED to iexfinance.stocks.get_market_losers """ import warnings warnings.warn(WNG_MSG, ("get_market_losers", "stocks.get_market_losers")) return stocks.get_market_losers(*args, **kwargs)
python
{ "resource": "" }
q29802
get_market_most_active
train
def get_market_most_active(*args, **kwargs): """ MOVED to iexfinance.stocks.get_market_most_active """ import warnings warnings.warn(WNG_MSG, ("get_market_most_active", "stocks.get_market_most_active")) return stocks.get_market_most_active(*args, **kwargs)
python
{ "resource": "" }
q29803
get_market_iex_volume
train
def get_market_iex_volume(*args, **kwargs): """ MOVED to iexfinance.stocks.get_market_iex_volume """ import warnings warnings.warn(WNG_MSG, ("get_market_iex_volume", "stocks.get_market_iex_volume")) return stocks.get_market_iex_volume(*args, **kwargs)
python
{ "resource": "" }
q29804
get_market_iex_percent
train
def get_market_iex_percent(*args, **kwargs): """ MOVED to iexfinance.stocks.get_market_iex_percent """ import warnings warnings.warn(WNG_MSG, ("get_market_iex_percent", "stocks.get_market_iex_percent")) return stocks.get_market_iex_percent(*args, **kwargs)
python
{ "resource": "" }
q29805
get_available_symbols
train
def get_available_symbols(**kwargs): """ MOVED to iexfinance.refdata.get_symbols """ import warnings warnings.warn(WNG_MSG % ("get_available_symbols", "refdata.get_symbols")) _ALL_SYMBOLS_URL = "https://api.iextrading.com/1.0/ref-data/symbols" handler = _IEXBase(**kwargs) response = handler._execute_iex_query(_ALL_SYMBOLS_URL) if not response: raise IEXQueryError("Could not download all symbols") else: return response
python
{ "resource": "" }
q29806
get_iex_corporate_actions
train
def get_iex_corporate_actions(start=None, **kwargs): """ MOVED to iexfinance.refdata.get_iex_corporate_actions """ import warnings warnings.warn(WNG_MSG % ("get_iex_corporate_actions", "refdata.get_iex_corporate_actions")) return CorporateActions(start=start, **kwargs).fetch()
python
{ "resource": "" }
q29807
get_iex_dividends
train
def get_iex_dividends(start=None, **kwargs): """ MOVED to iexfinance.refdata.get_iex_dividends """ import warnings warnings.warn(WNG_MSG % ("get_iex_dividends", "refdata.get_iex_dividends")) return Dividends(start=start, **kwargs).fetch()
python
{ "resource": "" }
q29808
get_iex_next_day_ex_date
train
def get_iex_next_day_ex_date(start=None, **kwargs): """ MOVED to iexfinance.refdata.get_iex_next_day_ex_date """ import warnings warnings.warn(WNG_MSG % ("get_iex_next_day_ex_date", "refdata.get_iex_next_day_ex_date")) return NextDay(start=start, **kwargs).fetch()
python
{ "resource": "" }
q29809
get_iex_listed_symbol_dir
train
def get_iex_listed_symbol_dir(start=None, **kwargs): """ MOVED to iexfinance.refdata.get_listed_symbol_dir """ import warnings warnings.warn(WNG_MSG % ("get_iex_listed_symbol_dir", "refdata.get_iex_listed_symbol_dir")) return ListedSymbolDir(start=start, **kwargs)
python
{ "resource": "" }
q29810
get_market_tops
train
def get_market_tops(symbols=None, **kwargs): """ MOVED to iexfinance.iexdata.get_tops """ import warnings warnings.warn(WNG_MSG % ("get_market_tops", "iexdata.get_tops")) return TOPS(symbols, **kwargs).fetch()
python
{ "resource": "" }
q29811
get_market_last
train
def get_market_last(symbols=None, **kwargs): """ MOVED to iexfinance.iexdata.get_last """ import warnings warnings.warn(WNG_MSG % ("get_market_last", "iexdata.get_last")) return Last(symbols, **kwargs).fetch()
python
{ "resource": "" }
q29812
get_market_deep
train
def get_market_deep(symbols=None, **kwargs): """ MOVED to iexfinance.iexdata.get_deep """ import warnings warnings.warn(WNG_MSG % ("get_market_deep", "iexdata.get_deep")) return DEEP(symbols, **kwargs).fetch()
python
{ "resource": "" }
q29813
get_market_book
train
def get_market_book(symbols=None, **kwargs): """ MOVED to iexfinance.iexdata.get_deep_book """ import warnings warnings.warn(WNG_MSG % ("get_market_book", "iexdata.get_deep_book")) return Book(symbols, **kwargs).fetch()
python
{ "resource": "" }
q29814
get_stats_daily
train
def get_stats_daily(start=None, end=None, last=None, **kwargs): """ MOVED to iexfinance.iexdata.get_stats_daily """ import warnings warnings.warn(WNG_MSG % ("get_stats_daily", "iexdata.get_stats_daily")) start, end = _sanitize_dates(start, end) return DailySummaryReader(start=start, end=end, last=last, **kwargs).fetch()
python
{ "resource": "" }
q29815
get_stats_monthly
train
def get_stats_monthly(start=None, end=None, **kwargs): """ MOVED to iexfinance.iexdata.get_stats_summary """ import warnings warnings.warn(WNG_MSG % ("get_stats_monthly", "iexdata.get_stats_summary")) return MonthlySummaryReader(start=start, end=end, **kwargs).fetch()
python
{ "resource": "" }
q29816
_IEXBase._handle_error
train
def _handle_error(self, response): """ Handles all responses which return an error status code """ auth_msg = "The query could not be completed. Invalid auth token." status_code = response.status_code if 400 <= status_code < 500: if status_code == 400: raise auth_error(auth_msg) else: raise auth_error("The query could not be completed. " "There was a client-side error with your " "request.") elif 500 <= status_code < 600: raise auth_error("The query could not be completed. " "There was a server-side error with " "your request.") else: raise auth_error("The query could not be completed.")
python
{ "resource": "" }
q29817
_IEXBase._output_format
train
def _output_format(self, out, fmt_j=None, fmt_p=None): """ Output formatting handler """ if self.output_format == 'pandas': if fmt_p is not None: return fmt_p(out) else: return self._convert_output(out) if fmt_j: return fmt_j(out) return out
python
{ "resource": "" }
q29818
get_historical_data
train
def get_historical_data(symbols, start=None, end=None, **kwargs): """ Function to obtain historical date for a symbol or list of symbols. Return an instance of HistoricalReader Parameters ---------- symbols: str or list A symbol or list of symbols start: datetime.datetime, default None Beginning of desired date range end: datetime.datetime, default None End of required date range kwargs: Additional Request Parameters (see base class) Returns ------- list or DataFrame Historical stock prices over date range, start to end """ start, end = _sanitize_dates(start, end) return HistoricalReader(symbols, start=start, end=end, **kwargs).fetch()
python
{ "resource": "" }
q29819
LazySettings.get
train
def get(self, key): """ This function is here only to provide backwards compatibility in case anyone uses old settings interface. It is strongly encouraged to use dot notation. """ warnings.warn( 'The settings.get(key) is superseded by the dot attribute access.', PendingDeprecationWarning ) try: return getattr(self, key) except AttributeError: raise ImproperlyConfigured('Missing settings: {}[\'{}\']'.format( DJOSER_SETTINGS_NAMESPACE, key) )
python
{ "resource": "" }
q29820
Connection._pack
train
def _pack(self, msg_type, payload): """ Packs the given message type and payload. Turns the resulting message into a byte string. """ pb = payload.encode('utf-8') s = struct.pack('=II', len(pb), msg_type.value) return self.MAGIC.encode('utf-8') + s + pb
python
{ "resource": "" }
q29821
Connection._unpack
train
def _unpack(self, data): """ Unpacks the given byte string and parses the result from JSON. Returns None on failure and saves data into "self.buffer". """ msg_magic, msg_length, msg_type = self._unpack_header(data) msg_size = self._struct_header_size + msg_length # XXX: Message shouldn't be any longer than the data payload = data[self._struct_header_size:msg_size] return payload.decode('utf-8', 'replace')
python
{ "resource": "" }
q29822
Connection._unpack_header
train
def _unpack_header(self, data): """ Unpacks the header of given byte string. """ return struct.unpack(self._struct_header, data[:self._struct_header_size])
python
{ "resource": "" }
q29823
Connection.get_bar_config_list
train
def get_bar_config_list(self): """ Get list of bar IDs as active in the connected i3 session. :rtype: List of strings that can be fed as ``bar_id`` into :meth:`get_bar_config`. """ data = self.message(MessageType.GET_BAR_CONFIG, '') return json.loads(data)
python
{ "resource": "" }
q29824
Connection.get_workspaces
train
def get_workspaces(self): """ Get a list of workspaces. Returns JSON-like data, not a Con instance. You might want to try the :meth:`Con.workspaces` instead if the info contained here is too little. :rtype: List of :class:`WorkspaceReply`. """ data = self.message(MessageType.GET_WORKSPACES, '') return json.loads(data, object_hook=WorkspaceReply)
python
{ "resource": "" }
q29825
Connection.get_marks
train
def get_marks(self): """ Get a list of the names of all currently set marks. :rtype: list """ data = self.message(MessageType.GET_MARKS, '') return json.loads(data)
python
{ "resource": "" }
q29826
Connection.get_binding_modes
train
def get_binding_modes(self): """ Returns all currently configured binding modes. :rtype: list """ data = self.message(MessageType.GET_BINDING_MODES, '') return json.loads(data)
python
{ "resource": "" }
q29827
Connection.get_config
train
def get_config(self): """ Currently only contains the "config" member, which is a string containing the config file as loaded by i3 most recently. :rtype: ConfigReply """ data = self.message(MessageType.GET_CONFIG, '') return json.loads(data, object_hook=ConfigReply)
python
{ "resource": "" }
q29828
Connection.send_tick
train
def send_tick(self, payload=""): """ Sends a tick event with the specified payload. After the reply was received, the tick event has been written to all IPC connections which subscribe to tick events. :rtype: TickReply """ data = self.message(MessageType.SEND_TICK, payload) return json.loads(data, object_hook=TickReply)
python
{ "resource": "" }
q29829
Con.root
train
def root(self): """ Retrieves the root container. :rtype: :class:`Con`. """ if not self.parent: return self con = self.parent while con.parent: con = con.parent return con
python
{ "resource": "" }
q29830
Con.leaves
train
def leaves(self): """ Retrieve a list of windows that delineate from the currently selected container. Only lists client windows, no intermediate containers. :rtype: List of :class:`Con`. """ leaves = [] for c in self: if not c.nodes and c.type == "con" and c.parent.type != "dockarea": leaves.append(c) return leaves
python
{ "resource": "" }
q29831
Con.command
train
def command(self, command): """ Run a command on the currently active container. :rtype: CommandReply """ return self._conn.command('[con_id="{}"] {}'.format(self.id, command))
python
{ "resource": "" }
q29832
Con.command_children
train
def command_children(self, command): """ Run a command on the direct children of the currently selected container. :rtype: List of CommandReply???? """ if not len(self.nodes): return commands = [] for c in self.nodes: commands.append('[con_id="{}"] {};'.format(c.id, command)) self._conn.command(' '.join(commands))
python
{ "resource": "" }
q29833
Con.workspaces
train
def workspaces(self): """ Retrieve a list of currently active workspaces. :rtype: List of :class:`Con`. """ workspaces = [] def collect_workspaces(con): if con.type == "workspace" and not con.name.startswith('__'): workspaces.append(con) return for c in con.nodes: collect_workspaces(c) collect_workspaces(self.root()) return workspaces
python
{ "resource": "" }
q29834
_parse_json
train
def _parse_json(s): ''' Parse json string into JsonDict. >>> r = _parse_json(r'{"name":"Michael","score":95}') >>> r.name u'Michael' >>> r['score'] 95 ''' return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))
python
{ "resource": "" }
q29835
_encode_params
train
def _encode_params(**kw): ''' Do url-encode parameters >>> _encode_params(a=1, b='R&D') 'a=1&b=R%26D' >>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123]) 'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123' ''' def _encode(L, k, v): if isinstance(v, unicode): L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8')))) elif isinstance(v, str): L.append('%s=%s' % (k, urllib.quote(v))) elif isinstance(v, collections.Iterable): for x in v: _encode(L, k, x) else: L.append('%s=%s' % (k, urllib.quote(str(v)))) args = [] for k, v in kw.iteritems(): _encode(args, k, v) return '&'.join(args)
python
{ "resource": "" }
q29836
_guess_content_type
train
def _guess_content_type(url): ''' Guess content type by url. >>> _guess_content_type('http://test/A.HTML') 'text/html' >>> _guess_content_type('http://test/a.jpg') 'image/jpeg' >>> _guess_content_type('/path.txt/aaa') 'application/octet-stream' ''' OCTET_STREAM = 'application/octet-stream' n = url.rfind('.') if n == -1: return OCTET_STREAM return mimetypes.types_map.get(url[n:].lower(), OCTET_STREAM)
python
{ "resource": "" }
q29837
_http
train
def _http(method, url, headers=None, **kw): ''' Send http request and return response text. ''' params = None boundary = None if method == 'UPLOAD': params, boundary = _encode_multipart(**kw) else: params = _encode_params(**kw) http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url http_body = None if method == 'GET' else params logging.error('%s: %s' % (method, http_url)) req = urllib2.Request(http_url, data=http_body) req.add_header('Accept-Encoding', 'gzip') if headers: for k, v in headers.iteritems(): req.add_header(k, v) if boundary: req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary) try: resp = urllib2.urlopen(req, timeout=5) return _read_http_body(resp) finally: pass
python
{ "resource": "" }
q29838
SinaWeiboMixin._prepare_api
train
def _prepare_api(self, method, path, access_token, **kw): ''' Get api url. ''' headers = None if access_token: headers = {'Authorization': 'OAuth2 %s' % access_token} if '/remind/' in path: # sina remind api url is different: return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw if method == 'POST' and 'pic' in kw: # if 'pic' in parameter, set to UPLOAD mode: return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
python
{ "resource": "" }
q29839
SinaWeiboMixin.parse_signed_request
train
def parse_signed_request(self, signed_request): ''' parse signed request when using in-site app. Returns: dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp }, or None if parse failed. ''' def _b64_normalize(s): appendix = '=' * (4 - len(s) % 4) return s.replace('-', '+').replace('_', '/') + appendix sr = str(signed_request) logging.info('parse signed request: %s' % sr) enc_sig, enc_payload = sr.split('.', 1) sig = base64.b64decode(_b64_normalize(enc_sig)) data = _parse_json(base64.b64decode(_b64_normalize(enc_payload))) if data['algorithm'] != u'HMAC-SHA256': return None expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest() if expected_sig == sig: data.user_id = data.uid = data.get('user_id', None) data.access_token = data.get('oauth_token', None) expires = data.get('expires', None) if expires: data.expires = data.expires_in = time.time() + expires return data return None
python
{ "resource": "" }
q29840
QQMixin.refresh_access_token
train
def refresh_access_token(self, refresh_token, redirect_uri=None): ''' Refresh access token. ''' redirect = redirect_uri or self._redirect_uri resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token', refresh_token=refresh_token, client_id=self._client_id, client_secret=self._client_secret, redirect_uri=redirect, grant_type='refresh_token') return self._parse_access_token(resp_text)
python
{ "resource": "" }
q29841
QQMixin._parse_access_token
train
def _parse_access_token(self, resp_text): ' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true ' r = self._qs2dict(resp_text) access_token = r.pop('access_token') expires = time.time() + float(r.pop('expires_in')) return JsonDict(access_token=access_token, expires=expires, **r)
python
{ "resource": "" }
q29842
_parse_json
train
def _parse_json(s): ' parse str into JsonDict ' def _obj_hook(pairs): ' convert json object to python object ' o = JsonDict() for k, v in pairs.iteritems(): o[str(k)] = v return o return json.loads(s, object_hook=_obj_hook)
python
{ "resource": "" }
q29843
_encode_params
train
def _encode_params(**kw): ''' do url-encode parameters >>> _encode_params(a=1, b='R&D') 'a=1&b=R%26D' >>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123]) 'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123' ''' args = [] for k, v in kw.iteritems(): if isinstance(v, basestring): qv = v.encode('utf-8') if isinstance(v, unicode) else v args.append('%s=%s' % (k, urllib.quote(qv))) elif isinstance(v, collections.Iterable): for i in v: qv = i.encode('utf-8') if isinstance(i, unicode) else str(i) args.append('%s=%s' % (k, urllib.quote(qv))) else: qv = str(v) args.append('%s=%s' % (k, urllib.quote(qv))) return '&'.join(args)
python
{ "resource": "" }
q29844
_http_call
train
def _http_call(the_url, method, authorization, **kw): ''' send an http request and return a json object if no error occurred. ''' params = None boundary = None if method == _HTTP_UPLOAD: # fix sina upload url: the_url = the_url.replace('https://api.', 'https://upload.api.') params, boundary = _encode_multipart(**kw) else: params = _encode_params(**kw) if '/remind/' in the_url: # fix sina remind api: the_url = the_url.replace('https://api.', 'https://rm.api.') http_url = '%s?%s' % (the_url, params) if method == _HTTP_GET else the_url http_body = None if method == _HTTP_GET else params req = urllib2.Request(http_url, data=http_body) req.add_header('Accept-Encoding', 'gzip') if authorization: req.add_header('Authorization', 'OAuth2 %s' % authorization) if boundary: req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary) try: resp = urllib2.urlopen(req, timeout=5) body = _read_body(resp) r = _parse_json(body) if hasattr(r, 'error_code'): raise APIError(r.error_code, r.get('error', ''), r.get('request', '')) return r except urllib2.HTTPError as e: try: r = _parse_json(_read_body(e)) except: r = None if hasattr(r, 'error_code'): raise APIError(r.error_code, r.get('error', ''), r.get('request', '')) raise e
python
{ "resource": "" }
q29845
proto_files
train
def proto_files(root): """Yields the path of all .proto files under the root.""" for (dirpath, _, filenames) in os.walk(root): for filename in filenames: if filename.endswith('.proto'): yield os.path.join(dirpath, filename)
python
{ "resource": "" }
q29846
compile_proto
train
def compile_proto(source, python_out, proto_path): """Invoke Protocol Compiler to generate python from given source .proto.""" if not protoc: sys.exit('protoc not found. Is the protobuf-compiler installed?\n') protoc_command = [ protoc, '--proto_path', proto_path, '--python_out', python_out, source, ] if subprocess.call(protoc_command) != 0: sys.exit('Make sure your protoc version >= 2.6. You can use a custom ' 'protoc by setting the PROTOC environment variable.')
python
{ "resource": "" }
q29847
ProjectFiles.add_folder
train
def add_folder(self, path, parent=None, excludes=None, recursive=True, create_groups=True, target_name=None, file_options=FileOptions()): """ Given a directory, it will create the equivalent group structure and add all files in the process. If groups matching the logical path already exist, it will use them instead of creating a new one. Same apply for file within a group, if the file name already exists it will be ignored. :param path: OS path to the directory to be added. :param parent: Parent group to be added under :param excludes: list of regexs to ignore :param recursive: add folders recursively or stop in the first level :param create_groups: add folders recursively as groups or references :param target_name: Target name or list of target names where the file should be added (none for every target) :param file_options: FileOptions object to be used during the addition of the file to the project. :return: a list of elements that were added to the project successfully as PBXBuildFile objects """ if not os.path.isdir(path): return None if not excludes: excludes = [] results = [] # add the top folder as a group, make it the new parent path = os.path.abspath(path) if not create_groups and os.path.splitext(path)[1] not in ProjectFiles._SPECIAL_FOLDERS: return self.add_file(path, parent, target_name=target_name, force=False, file_options=file_options) parent = self.get_or_create_group(os.path.split(path)[1], path, parent) # iterate over the objects in the directory for child in os.listdir(path): # exclude dirs or files matching any of the expressions if [pattern for pattern in excludes if re.match(pattern, child)]: continue full_path = os.path.join(path, child) children = [] if os.path.isfile(full_path) or os.path.splitext(child)[1] in ProjectFiles._SPECIAL_FOLDERS or \ not create_groups: # check if the file exists already, if not add it children = self.add_file(full_path, parent, target_name=target_name, force=False, file_options=file_options) else: # if recursive is true, go deeper, otherwise create the group here. if recursive: children = self.add_folder(full_path, parent, excludes, recursive, target_name=target_name, file_options=file_options) else: self.get_or_create_group(child, child, parent) results.extend(children) return results
python
{ "resource": "" }
q29848
ProjectFlags.add_code_sign
train
def add_code_sign(self, code_sign_identity, development_team, provisioning_profile_uuid, provisioning_profile_specifier, target_name=None, configuration_name=None): """ Adds the code sign information to the project and creates the appropriate flags in the configuration. In xcode 8+ the provisioning_profile_uuid becomes optional, and the provisioning_profile_specifier becomes mandatory. Contrariwise, in xcode 8< provisioning_profile_uuid becomes mandatory and provisioning_profile_specifier becomes optional. :param code_sign_identity: Code sign identity name. Usually formatted as: 'iPhone Distribution[: <Company name> (MAAYFEXXXX)]' :param development_team: Development team identifier string. Usually formatted as: 'MAAYFEXXXX' :param provisioning_profile_uuid: Provisioning profile UUID string. Usually formatted as: '6f1ffc4d-xxxx-xxxx-xxxx-6dc186280e1e' :param provisioning_profile_specifier: Provisioning profile specifier (a.k.a. name) string. :param target_name: Target name or list of target names to add the flag to or None for every target :param configuration_name: Configuration name to add the flag to or None for every configuration :return: """ self.set_flags(u'CODE_SIGN_IDENTITY[sdk=iphoneos*]', code_sign_identity, target_name, configuration_name) self.set_flags(u'DEVELOPMENT_TEAM', development_team, target_name, configuration_name) self.set_flags(u'PROVISIONING_PROFILE', provisioning_profile_uuid, target_name, configuration_name) self.set_flags(u'PROVISIONING_PROFILE_SPECIFIER', provisioning_profile_specifier, target_name, configuration_name) for target in self.objects.get_targets(target_name): self.objects[self.rootObject].set_provisioning_style(PBXProvioningTypes.MANUAL, target)
python
{ "resource": "" }
q29849
notify_handler
train
def notify_handler(verb, **kwargs): """ Handler function to create Notification instance upon action signal call. """ # Pull the options out of kwargs kwargs.pop('signal', None) recipient = kwargs.pop('recipient') actor = kwargs.pop('sender') optional_objs = [ (kwargs.pop(opt, None), opt) for opt in ('target', 'action_object') ] public = bool(kwargs.pop('public', True)) description = kwargs.pop('description', None) timestamp = kwargs.pop('timestamp', timezone.now()) level = kwargs.pop('level', Notification.LEVELS.info) # Check if User or Group if isinstance(recipient, Group): recipients = recipient.user_set.all() elif isinstance(recipient, (QuerySet, list)): recipients = recipient else: recipients = [recipient] new_notifications = [] for recipient in recipients: newnotify = Notification( recipient=recipient, actor_content_type=ContentType.objects.get_for_model(actor), actor_object_id=actor.pk, verb=text_type(verb), public=public, description=description, timestamp=timestamp, level=level, ) # Set optional objects for obj, opt in optional_objs: if obj is not None: setattr(newnotify, '%s_object_id' % opt, obj.pk) setattr(newnotify, '%s_content_type' % opt, ContentType.objects.get_for_model(obj)) if kwargs and EXTRA_DATA: newnotify.data = kwargs newnotify.save() new_notifications.append(newnotify) return new_notifications
python
{ "resource": "" }
q29850
NotificationQuerySet.unread
train
def unread(self, include_deleted=False): """Return only unread items in the current queryset""" if is_soft_delete() and not include_deleted: return self.filter(unread=True, deleted=False) # When SOFT_DELETE=False, developers are supposed NOT to touch 'deleted' field. # In this case, to improve query performance, don't filter by 'deleted' field return self.filter(unread=True)
python
{ "resource": "" }
q29851
NotificationQuerySet.read
train
def read(self, include_deleted=False): """Return only read items in the current queryset""" if is_soft_delete() and not include_deleted: return self.filter(unread=False, deleted=False) # When SOFT_DELETE=False, developers are supposed NOT to touch 'deleted' field. # In this case, to improve query performance, don't filter by 'deleted' field return self.filter(unread=False)
python
{ "resource": "" }
q29852
NotificationQuerySet.mark_all_as_read
train
def mark_all_as_read(self, recipient=None): """Mark as read any unread messages in the current queryset. Optionally, filter these by recipient first. """ # We want to filter out read ones, as later we will store # the time they were marked as read. qset = self.unread(True) if recipient: qset = qset.filter(recipient=recipient) return qset.update(unread=False)
python
{ "resource": "" }
q29853
NotificationQuerySet.mark_all_as_unread
train
def mark_all_as_unread(self, recipient=None): """Mark as unread any read messages in the current queryset. Optionally, filter these by recipient first. """ qset = self.read(True) if recipient: qset = qset.filter(recipient=recipient) return qset.update(unread=True)
python
{ "resource": "" }
q29854
NotificationQuerySet.mark_all_as_deleted
train
def mark_all_as_deleted(self, recipient=None): """Mark current queryset as deleted. Optionally, filter by recipient first. """ assert_soft_delete() qset = self.active() if recipient: qset = qset.filter(recipient=recipient) return qset.update(deleted=True)
python
{ "resource": "" }
q29855
live_unread_notification_list
train
def live_unread_notification_list(request): ''' Return a json with a unread notification list ''' try: user_is_authenticated = request.user.is_authenticated() except TypeError: # Django >= 1.11 user_is_authenticated = request.user.is_authenticated if not user_is_authenticated: data = { 'unread_count': 0, 'unread_list': [] } return JsonResponse(data) default_num_to_fetch = get_config()['NUM_TO_FETCH'] try: # If they don't specify, make it 5. num_to_fetch = request.GET.get('max', default_num_to_fetch) num_to_fetch = int(num_to_fetch) if not (1 <= num_to_fetch <= 100): num_to_fetch = default_num_to_fetch except ValueError: # If casting to an int fails. num_to_fetch = default_num_to_fetch unread_list = [] for notification in request.user.notifications.unread()[0:num_to_fetch]: struct = model_to_dict(notification) struct['slug'] = id2slug(notification.id) if notification.actor: struct['actor'] = str(notification.actor) if notification.target: struct['target'] = str(notification.target) if notification.action_object: struct['action_object'] = str(notification.action_object) if notification.data: struct['data'] = notification.data unread_list.append(struct) if request.GET.get('mark_as_read'): notification.mark_as_read() data = { 'unread_count': request.user.notifications.unread().count(), 'unread_list': unread_list } return JsonResponse(data)
python
{ "resource": "" }
q29856
uni_char_code
train
def uni_char_code(a: str, b: str, c: str, d: str): """Convert unicode characters to integers. Converts four hexadecimal chars to the integer that the string represents. For example, uni_char_code('0','0','0','f') will return 15, and uni_char_code('0','0','f','f') returns 255. Returns a negative number on error, if a char was invalid. This is implemented by noting that char2hex() returns -1 on error, which means the result of ORing the char2hex() will also be negative. """ return char2hex(a) << 12 | char2hex(b) << 8 | char2hex(c) << 4 | char2hex(d)
python
{ "resource": "" }
q29857
char2hex
train
def char2hex(a: str): """Convert a hex character to its integer value. '0' becomes 0, '9' becomes 9 'A' becomes 10, 'F' becomes 15 'a' becomes 10, 'f' becomes 15 Returns -1 on error. """ if "0" <= a <= "9": return ord(a) - 48 elif "A" <= a <= "F": return ord(a) - 55 elif "a" <= a <= "f": # a-f return ord(a) - 87 return -1
python
{ "resource": "" }
q29858
Token.desc
train
def desc(self) -> str: """A helper property to describe a token as a string for debugging""" kind, value = self.kind.value, self.value return f"{kind} {value!r}" if value else kind
python
{ "resource": "" }
q29859
Lexer.read_token
train
def read_token(self, prev: Token) -> Token: """Get the next token from the source starting at the given position. This skips over whitespace until it finds the next lexable token, then lexes punctuators immediately or calls the appropriate helper function for more complicated tokens. """ source = self.source body = source.body body_length = len(body) pos = self.position_after_whitespace(body, prev.end) line = self.line col = 1 + pos - self.line_start if pos >= body_length: return Token(TokenKind.EOF, body_length, body_length, line, col, prev) char = body[pos] kind = _KIND_FOR_PUNCT.get(char) if kind: return Token(kind, pos, pos + 1, line, col, prev) if char == "#": return self.read_comment(pos, line, col, prev) elif char == ".": if body[pos + 1 : pos + 3] == "..": return Token(TokenKind.SPREAD, pos, pos + 3, line, col, prev) elif "A" <= char <= "Z" or "a" <= char <= "z" or char == "_": return self.read_name(pos, line, col, prev) elif "0" <= char <= "9" or char == "-": return self.read_number(pos, char, line, col, prev) elif char == '"': if body[pos + 1 : pos + 3] == '""': return self.read_block_string(pos, line, col, prev) return self.read_string(pos, line, col, prev) raise GraphQLSyntaxError(source, pos, unexpected_character_message(char))
python
{ "resource": "" }
q29860
Lexer.position_after_whitespace
train
def position_after_whitespace(self, body: str, start_position: int) -> int: """Go to next position after a whitespace. Reads from body starting at start_position until it finds a non-whitespace character, then returns the position of that character for lexing. """ body_length = len(body) position = start_position while position < body_length: char = body[position] if char in " \t,\ufeff": position += 1 elif char == "\n": position += 1 self.line += 1 self.line_start = position elif char == "\r": if body[position + 1 : position + 2] == "\n": position += 2 else: position += 1 self.line += 1 self.line_start = position else: break return position
python
{ "resource": "" }
q29861
Lexer.read_comment
train
def read_comment(self, start: int, line: int, col: int, prev: Token) -> Token: """Read a comment token from the source file.""" body = self.source.body body_length = len(body) position = start while True: position += 1 if position > body_length: break char = body[position] if char < " " and char != "\t": break return Token( TokenKind.COMMENT, start, position, line, col, prev, body[start + 1 : position], )
python
{ "resource": "" }
q29862
Lexer.read_number
train
def read_number( self, start: int, char: str, line: int, col: int, prev: Token ) -> Token: """Reads a number token from the source file. Either a float or an int depending on whether a decimal point appears. """ source = self.source body = source.body position = start is_float = False if char == "-": position += 1 char = body[position : position + 1] if char == "0": position += 1 char = body[position : position + 1] if "0" <= char <= "9": raise GraphQLSyntaxError( source, position, f"Invalid number, unexpected digit after 0: {print_char(char)}.", ) else: position = self.read_digits(position, char) char = body[position : position + 1] if char == ".": is_float = True position += 1 char = body[position : position + 1] position = self.read_digits(position, char) char = body[position : position + 1] if char and char in "Ee": is_float = True position += 1 char = body[position : position + 1] if char and char in "+-": position += 1 char = body[position : position + 1] position = self.read_digits(position, char) return Token( TokenKind.FLOAT if is_float else TokenKind.INT, start, position, line, col, prev, body[start:position], )
python
{ "resource": "" }
q29863
Lexer.read_digits
train
def read_digits(self, start: int, char: str) -> int: """Return the new position in the source after reading digits.""" source = self.source body = source.body position = start while "0" <= char <= "9": position += 1 char = body[position : position + 1] if position == start: raise GraphQLSyntaxError( source, position, f"Invalid number, expected digit but got: {print_char(char)}.", ) return position
python
{ "resource": "" }
q29864
Lexer.read_string
train
def read_string(self, start: int, line: int, col: int, prev: Token) -> Token: """Read a string token from the source file.""" source = self.source body = source.body body_length = len(body) position = start + 1 chunk_start = position value: List[str] = [] append = value.append while position < body_length: char = body[position] if char in "\n\r": break if char == '"': append(body[chunk_start:position]) return Token( TokenKind.STRING, start, position + 1, line, col, prev, "".join(value), ) if char < " " and char != "\t": raise GraphQLSyntaxError( source, position, f"Invalid character within String: {print_char(char)}.", ) position += 1 if char == "\\": append(body[chunk_start : position - 1]) char = body[position : position + 1] escaped = _ESCAPED_CHARS.get(char) if escaped: value.append(escaped) elif char == "u" and position + 4 < body_length: code = uni_char_code(*body[position + 1 : position + 5]) if code < 0: escape = repr(body[position : position + 5]) escape = escape[:1] + "\\" + escape[1:] raise GraphQLSyntaxError( source, position, f"Invalid character escape sequence: {escape}.", ) append(chr(code)) position += 4 else: escape = repr(char) escape = escape[:1] + "\\" + escape[1:] raise GraphQLSyntaxError( source, position, f"Invalid character escape sequence: {escape}.", ) position += 1 chunk_start = position raise GraphQLSyntaxError(source, position, "Unterminated string.")
python
{ "resource": "" }
q29865
Lexer.read_name
train
def read_name(self, start: int, line: int, col: int, prev: Token) -> Token: """Read an alphanumeric + underscore name from the source.""" body = self.source.body body_length = len(body) position = start + 1 while position < body_length: char = body[position] if not ( char == "_" or "0" <= char <= "9" or "A" <= char <= "Z" or "a" <= char <= "z" ): break position += 1 return Token( TokenKind.NAME, start, position, line, col, prev, body[start:position] )
python
{ "resource": "" }
q29866
execute
train
def execute( schema: GraphQLSchema, document: DocumentNode, root_value: Any = None, context_value: Any = None, variable_values: Dict[str, Any] = None, operation_name: str = None, field_resolver: GraphQLFieldResolver = None, type_resolver: GraphQLTypeResolver = None, middleware: Middleware = None, execution_context_class: Type["ExecutionContext"] = None, ) -> AwaitableOrValue[ExecutionResult]: """Execute a GraphQL operation. Implements the "Evaluating requests" section of the GraphQL specification. Returns an ExecutionResult (if all encountered resolvers are synchronous), or a coroutine object eventually yielding an ExecutionResult. If the arguments to this function do not result in a legal execution context, a GraphQLError will be thrown immediately explaining the invalid input. """ # If arguments are missing or incorrect, throw an error. assert_valid_execution_arguments(schema, document, variable_values) if execution_context_class is None: execution_context_class = ExecutionContext # If a valid execution context cannot be created due to incorrect arguments, # a "Response" with only errors is returned. exe_context = execution_context_class.build( schema, document, root_value, context_value, variable_values, operation_name, field_resolver, type_resolver, middleware, ) # Return early errors if execution context failed. if isinstance(exe_context, list): return ExecutionResult(data=None, errors=exe_context) # Return a possible coroutine object that will eventually yield the data described # by the "Response" section of the GraphQL specification. # # If errors are encountered while executing a GraphQL field, only that field and # its descendants will be omitted, and sibling fields will still be executed. An # execution which encounters errors will still result in a coroutine object that # can be executed without errors. data = exe_context.execute_operation(exe_context.operation, root_value) return exe_context.build_response(data)
python
{ "resource": "" }
q29867
assert_valid_execution_arguments
train
def assert_valid_execution_arguments( schema: GraphQLSchema, document: DocumentNode, raw_variable_values: Dict[str, Any] = None, ) -> None: """Check that the arguments are acceptable. Essential assertions before executing to provide developer feedback for improper use of the GraphQL library. """ if not document: raise TypeError("Must provide document") # If the schema used for execution is invalid, throw an error. assert_valid_schema(schema) # Variables, if provided, must be a dictionary. if not (raw_variable_values is None or isinstance(raw_variable_values, dict)): raise TypeError( "Variables must be provided as a dictionary where each property is a" " variable value. Perhaps look to see if an unparsed JSON string was" " provided." )
python
{ "resource": "" }
q29868
response_path_as_list
train
def response_path_as_list(path: ResponsePath) -> List[Union[str, int]]: """Get response path as a list. Given a ResponsePath (found in the `path` entry in the information provided as the last argument to a field resolver), return a list of the path keys. """ flattened: List[Union[str, int]] = [] append = flattened.append curr: Optional[ResponsePath] = path while curr: append(curr.key) curr = curr.prev return flattened[::-1]
python
{ "resource": "" }
q29869
add_path
train
def add_path(prev: Optional[ResponsePath], key: Union[str, int]) -> ResponsePath: """Add a key to a response path. Given a ResponsePath and a key, return a new ResponsePath containing the new key. """ return ResponsePath(prev, key)
python
{ "resource": "" }
q29870
invalid_return_type_error
train
def invalid_return_type_error( return_type: GraphQLObjectType, result: Any, field_nodes: List[FieldNode] ) -> GraphQLError: """Create a GraphQLError for an invalid return type.""" return GraphQLError( f"Expected value of type '{return_type.name}' but got: {inspect(result)}.", field_nodes, )
python
{ "resource": "" }
q29871
default_type_resolver
train
def default_type_resolver( value: Any, info: GraphQLResolveInfo, abstract_type: GraphQLAbstractType ) -> AwaitableOrValue[Optional[Union[GraphQLObjectType, str]]]: """Default type resolver function. If a resolve_type function is not given, then a default resolve behavior is used which attempts two strategies: First, See if the provided value has a `__typename` field defined, if so, use that value as name of the resolved type. Otherwise, test each possible type for the abstract type by calling `is_type_of` for the object being coerced, returning the first type that matches. """ # First, look for `__typename`. type_name = ( value.get("__typename") if isinstance(value, dict) # need to de-mangle the attribute assumed to be "private" in Python else getattr(value, f"_{value.__class__.__name__}__typename", None) ) if isinstance(type_name, str): return type_name # Otherwise, test each possible type. possible_types = info.schema.get_possible_types(abstract_type) awaitable_is_type_of_results: List[Awaitable] = [] append_awaitable_results = awaitable_is_type_of_results.append awaitable_types: List[GraphQLObjectType] = [] append_awaitable_types = awaitable_types.append for type_ in possible_types: if type_.is_type_of: is_type_of_result = type_.is_type_of(value, info) if isawaitable(is_type_of_result): append_awaitable_results(cast(Awaitable, is_type_of_result)) append_awaitable_types(type_) elif is_type_of_result: return type_ if awaitable_is_type_of_results: # noinspection PyShadowingNames async def get_type(): is_type_of_results = await gather(*awaitable_is_type_of_results) for is_type_of_result, type_ in zip(is_type_of_results, awaitable_types): if is_type_of_result: return type_ return get_type() return None
python
{ "resource": "" }
q29872
default_field_resolver
train
def default_field_resolver(source, info, **args): """Default field resolver. If a resolve function is not given, then a default resolve behavior is used which takes the property of the source object of the same name as the field and returns it as the result, or if it's a function, returns the result of calling that function while passing along args and context. For dictionaries, the field names are used as keys, for all other objects they are used as attribute names. """ # Ensure source is a value for which property access is acceptable. field_name = info.field_name value = ( source.get(field_name) if isinstance(source, dict) else getattr(source, field_name, None) ) if callable(value): return value(info, **args) return value
python
{ "resource": "" }
q29873
ExecutionContext.build
train
def build( cls, schema: GraphQLSchema, document: DocumentNode, root_value: Any = None, context_value: Any = None, raw_variable_values: Dict[str, Any] = None, operation_name: str = None, field_resolver: GraphQLFieldResolver = None, type_resolver: GraphQLTypeResolver = None, middleware: Middleware = None, ) -> Union[List[GraphQLError], "ExecutionContext"]: """Build an execution context Constructs a ExecutionContext object from the arguments passed to execute, which we will pass throughout the other execution methods. Throws a GraphQLError if a valid execution context cannot be created. """ errors: List[GraphQLError] = [] operation: Optional[OperationDefinitionNode] = None has_multiple_assumed_operations = False fragments: Dict[str, FragmentDefinitionNode] = {} middleware_manager: Optional[MiddlewareManager] = None if middleware is not None: if isinstance(middleware, (list, tuple)): middleware_manager = MiddlewareManager(*middleware) elif isinstance(middleware, MiddlewareManager): middleware_manager = middleware else: raise TypeError( "Middleware must be passed as a list or tuple of functions" " or objects, or as a single MiddlewareManager object." f" Got {inspect(middleware)} instead." ) for definition in document.definitions: if isinstance(definition, OperationDefinitionNode): if not operation_name and operation: has_multiple_assumed_operations = True elif not operation_name or ( definition.name and definition.name.value == operation_name ): operation = definition elif isinstance(definition, FragmentDefinitionNode): fragments[definition.name.value] = definition if not operation: if operation_name: errors.append( GraphQLError(f"Unknown operation named '{operation_name}'.") ) else: errors.append(GraphQLError("Must provide an operation.")) elif has_multiple_assumed_operations: errors.append( GraphQLError( "Must provide operation name" " if query contains multiple operations." ) ) variable_values = None if operation: coerced_variable_values = get_variable_values( schema, operation.variable_definitions or [], raw_variable_values or {} ) if coerced_variable_values.errors: errors.extend(coerced_variable_values.errors) else: variable_values = coerced_variable_values.coerced if errors: return errors if operation is None: raise TypeError("Has operation if no errors.") if variable_values is None: raise TypeError("Has variables if no errors.") return cls( schema, fragments, root_value, context_value, operation, variable_values, field_resolver or default_field_resolver, type_resolver or default_type_resolver, middleware_manager, errors, )
python
{ "resource": "" }
q29874
ExecutionContext.build_response
train
def build_response( self, data: AwaitableOrValue[Optional[Dict[str, Any]]] ) -> AwaitableOrValue[ExecutionResult]: """Build response. Given a completed execution context and data, build the (data, errors) response defined by the "Response" section of the GraphQL spec. """ if isawaitable(data): async def build_response_async(): return self.build_response(await data) return build_response_async() data = cast(Optional[Dict[str, Any]], data) errors = self.errors if not errors: return ExecutionResult(data, None) # Sort the error list in order to make it deterministic, since we might have # been using parallel execution. errors.sort(key=lambda error: (error.locations, error.path, error.message)) return ExecutionResult(data, errors)
python
{ "resource": "" }
q29875
ExecutionContext.execute_operation
train
def execute_operation( self, operation: OperationDefinitionNode, root_value: Any ) -> Optional[AwaitableOrValue[Any]]: """Execute an operation. Implements the "Evaluating operations" section of the spec. """ type_ = get_operation_root_type(self.schema, operation) fields = self.collect_fields(type_, operation.selection_set, {}, set()) path = None # Errors from sub-fields of a NonNull type may propagate to the top level, at # which point we still log the error and null the parent field, which in this # case is the entire response. # # Similar to complete_value_catching_error. try: result = ( self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields )(type_, root_value, path, fields) except GraphQLError as error: self.errors.append(error) return None except Exception as error: error = GraphQLError(str(error), original_error=error) self.errors.append(error) return None else: if isawaitable(result): # noinspection PyShadowingNames async def await_result(): try: return await result except GraphQLError as error: self.errors.append(error) except Exception as error: error = GraphQLError(str(error), original_error=error) self.errors.append(error) return await_result() return result
python
{ "resource": "" }
q29876
ExecutionContext.execute_fields_serially
train
def execute_fields_serially( self, parent_type: GraphQLObjectType, source_value: Any, path: Optional[ResponsePath], fields: Dict[str, List[FieldNode]], ) -> AwaitableOrValue[Dict[str, Any]]: """Execute the given fields serially. Implements the "Evaluating selection sets" section of the spec for "write" mode. """ results: Dict[str, Any] = {} for response_name, field_nodes in fields.items(): field_path = add_path(path, response_name) result = self.resolve_field( parent_type, source_value, field_nodes, field_path ) if result is INVALID: continue if isawaitable(results): # noinspection PyShadowingNames async def await_and_set_result(results, response_name, result): awaited_results = await results awaited_results[response_name] = ( await result if isawaitable(result) else result ) return awaited_results # noinspection PyTypeChecker results = await_and_set_result( cast(Awaitable, results), response_name, result ) elif isawaitable(result): # noinspection PyShadowingNames async def set_result(results, response_name, result): results[response_name] = await result return results # noinspection PyTypeChecker results = set_result(results, response_name, result) else: results[response_name] = result if isawaitable(results): # noinspection PyShadowingNames async def get_results(): return await cast(Awaitable, results) return get_results() return results
python
{ "resource": "" }
q29877
ExecutionContext.execute_fields
train
def execute_fields( self, parent_type: GraphQLObjectType, source_value: Any, path: Optional[ResponsePath], fields: Dict[str, List[FieldNode]], ) -> AwaitableOrValue[Dict[str, Any]]: """Execute the given fields concurrently. Implements the "Evaluating selection sets" section of the spec for "read" mode. """ results = {} awaitable_fields: List[str] = [] append_awaitable = awaitable_fields.append for response_name, field_nodes in fields.items(): field_path = add_path(path, response_name) result = self.resolve_field( parent_type, source_value, field_nodes, field_path ) if result is not INVALID: results[response_name] = result if isawaitable(result): append_awaitable(response_name) # If there are no coroutines, we can just return the object if not awaitable_fields: return results # Otherwise, results is a map from field name to the result of resolving that # field, which is possibly a coroutine object. Return a coroutine object that # will yield this same map, but with any coroutines awaited in parallel and # replaced with the values they yielded. async def get_results(): results.update( zip( awaitable_fields, await gather(*(results[field] for field in awaitable_fields)), ) ) return results return get_results()
python
{ "resource": "" }
q29878
ExecutionContext.collect_fields
train
def collect_fields( self, runtime_type: GraphQLObjectType, selection_set: SelectionSetNode, fields: Dict[str, List[FieldNode]], visited_fragment_names: Set[str], ) -> Dict[str, List[FieldNode]]: """Collect fields. Given a selection_set, adds all of the fields in that selection to the passed in map of fields, and returns it at the end. collect_fields requires the "runtime type" of an object. For a field which returns an Interface or Union type, the "runtime type" will be the actual Object type returned by that field. """ for selection in selection_set.selections: if isinstance(selection, FieldNode): if not self.should_include_node(selection): continue name = get_field_entry_key(selection) fields.setdefault(name, []).append(selection) elif isinstance(selection, InlineFragmentNode): if not self.should_include_node( selection ) or not self.does_fragment_condition_match(selection, runtime_type): continue self.collect_fields( runtime_type, selection.selection_set, fields, visited_fragment_names, ) elif isinstance(selection, FragmentSpreadNode): frag_name = selection.name.value if frag_name in visited_fragment_names or not self.should_include_node( selection ): continue visited_fragment_names.add(frag_name) fragment = self.fragments.get(frag_name) if not fragment or not self.does_fragment_condition_match( fragment, runtime_type ): continue self.collect_fields( runtime_type, fragment.selection_set, fields, visited_fragment_names ) return fields
python
{ "resource": "" }
q29879
ExecutionContext.should_include_node
train
def should_include_node( self, node: Union[FragmentSpreadNode, FieldNode, InlineFragmentNode] ) -> bool: """Check if node should be included Determines if a field should be included based on the @include and @skip directives, where @skip has higher precedence than @include. """ skip = get_directive_values(GraphQLSkipDirective, node, self.variable_values) if skip and skip["if"]: return False include = get_directive_values( GraphQLIncludeDirective, node, self.variable_values ) if include and not include["if"]: return False return True
python
{ "resource": "" }
q29880
ExecutionContext.does_fragment_condition_match
train
def does_fragment_condition_match( self, fragment: Union[FragmentDefinitionNode, InlineFragmentNode], type_: GraphQLObjectType, ) -> bool: """Determine if a fragment is applicable to the given type.""" type_condition_node = fragment.type_condition if not type_condition_node: return True conditional_type = type_from_ast(self.schema, type_condition_node) if conditional_type is type_: return True if is_abstract_type(conditional_type): return self.schema.is_possible_type( cast(GraphQLAbstractType, conditional_type), type_ ) return False
python
{ "resource": "" }
q29881
ExecutionContext.resolve_field
train
def resolve_field( self, parent_type: GraphQLObjectType, source: Any, field_nodes: List[FieldNode], path: ResponsePath, ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. In particular, this figures out the value that the field returns by calling its resolve function, then calls complete_value to await coroutine objects, serialize scalars, or execute the sub-selection-set for objects. """ field_node = field_nodes[0] field_name = field_node.name.value field_def = get_field_def(self.schema, parent_type, field_name) if not field_def: return INVALID resolve_fn = field_def.resolve or self.field_resolver if self.middleware_manager: resolve_fn = self.middleware_manager.get_field_resolver(resolve_fn) info = self.build_resolve_info(field_def, field_nodes, parent_type, path) # Get the resolve function, regardless of if its result is normal or abrupt # (error). result = self.resolve_field_value_or_error( field_def, field_nodes, resolve_fn, source, info ) return self.complete_value_catching_error( field_def.type, field_nodes, info, path, result )
python
{ "resource": "" }
q29882
ExecutionContext.complete_value_catching_error
train
def complete_value_catching_error( self, return_type: GraphQLOutputType, field_nodes: List[FieldNode], info: GraphQLResolveInfo, path: ResponsePath, result: Any, ) -> AwaitableOrValue[Any]: """Complete a value while catching an error. This is a small wrapper around completeValue which detects and logs errors in the execution context. """ try: if isawaitable(result): async def await_result(): value = self.complete_value( return_type, field_nodes, info, path, await result ) if isawaitable(value): return await value return value completed = await_result() else: completed = self.complete_value( return_type, field_nodes, info, path, result ) if isawaitable(completed): # noinspection PyShadowingNames async def await_completed(): try: return await completed except Exception as error: self.handle_field_error(error, field_nodes, path, return_type) return await_completed() return completed except Exception as error: self.handle_field_error(error, field_nodes, path, return_type) return None
python
{ "resource": "" }
q29883
ExecutionContext.complete_value
train
def complete_value( self, return_type: GraphQLOutputType, field_nodes: List[FieldNode], info: GraphQLResolveInfo, path: ResponsePath, result: Any, ) -> AwaitableOrValue[Any]: """Complete a value. Implements the instructions for completeValue as defined in the "Field entries" section of the spec. If the field type is Non-Null, then this recursively completes the value for the inner type. It throws a field error if that completion returns null, as per the "Nullability" section of the spec. If the field type is a List, then this recursively completes the value for the inner type on each item in the list. If the field type is a Scalar or Enum, ensures the completed value is a legal value of the type by calling the `serialize` method of GraphQL type definition. If the field is an abstract type, determine the runtime type of the value and then complete based on that type. Otherwise, the field type expects a sub-selection set, and will complete the value by evaluating all sub-selections. """ # If result is an Exception, throw a located error. if isinstance(result, Exception): raise result # If field type is NonNull, complete for inner type, and throw field error if # result is null. if is_non_null_type(return_type): completed = self.complete_value( cast(GraphQLNonNull, return_type).of_type, field_nodes, info, path, result, ) if completed is None: raise TypeError( "Cannot return null for non-nullable field" f" {info.parent_type.name}.{info.field_name}." ) return completed # If result value is null-ish (null, INVALID, or NaN) then return null. if is_nullish(result): return None # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( cast(GraphQLList, return_type), field_nodes, info, path, result ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, # returning null if serialization is not possible. if is_leaf_type(return_type): return self.complete_leaf_value(cast(GraphQLLeafType, return_type), result) # If field type is an abstract type, Interface or Union, determine the runtime # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( cast(GraphQLAbstractType, return_type), field_nodes, info, path, result ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( cast(GraphQLObjectType, return_type), field_nodes, info, path, result ) # Not reachable. All possible output types have been considered. raise TypeError( # pragma: no cover "Cannot complete value of unexpected output type:" f" '{inspect(return_type)}'." )
python
{ "resource": "" }
q29884
ExecutionContext.complete_list_value
train
def complete_list_value( self, return_type: GraphQLList[GraphQLOutputType], field_nodes: List[FieldNode], info: GraphQLResolveInfo, path: ResponsePath, result: Iterable[Any], ) -> AwaitableOrValue[Any]: """Complete a list value. Complete a list value by completing each item in the list with the inner type. """ if not isinstance(result, Iterable) or isinstance(result, str): raise TypeError( "Expected Iterable, but did not find one for field" f" {info.parent_type.name}.{info.field_name}." ) # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine # object. item_type = return_type.of_type awaitable_indices: List[int] = [] append_awaitable = awaitable_indices.append completed_results: List[Any] = [] append_result = completed_results.append for index, item in enumerate(result): # No need to modify the info object containing the path, since from here on # it is not ever accessed by resolver functions. field_path = add_path(path, index) completed_item = self.complete_value_catching_error( item_type, field_nodes, info, field_path, item ) if isawaitable(completed_item): append_awaitable(index) append_result(completed_item) if not awaitable_indices: return completed_results # noinspection PyShadowingNames async def get_completed_results(): for index, result in zip( awaitable_indices, await gather( *(completed_results[index] for index in awaitable_indices) ), ): completed_results[index] = result return completed_results return get_completed_results()
python
{ "resource": "" }
q29885
ExecutionContext.complete_leaf_value
train
def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: """Complete a leaf value. Complete a Scalar or Enum by serializing to a valid value, returning null if serialization is not possible. """ serialized_result = return_type.serialize(result) if is_invalid(serialized_result): raise TypeError( f"Expected a value of type '{inspect(return_type)}'" f" but received: {inspect(result)}" ) return serialized_result
python
{ "resource": "" }
q29886
ExecutionContext.complete_abstract_value
train
def complete_abstract_value( self, return_type: GraphQLAbstractType, field_nodes: List[FieldNode], info: GraphQLResolveInfo, path: ResponsePath, result: Any, ) -> AwaitableOrValue[Any]: """Complete an abstract value. Complete a value of an abstract type by determining the runtime object type of that value, then complete the value for that type. """ resolve_type_fn = return_type.resolve_type or self.type_resolver runtime_type = resolve_type_fn(result, info, return_type) # type: ignore if isawaitable(runtime_type): async def await_complete_object_value(): value = self.complete_object_value( self.ensure_valid_runtime_type( await runtime_type, return_type, field_nodes, info, result ), field_nodes, info, path, result, ) if isawaitable(value): return await value return value return await_complete_object_value() runtime_type = cast(Optional[Union[GraphQLObjectType, str]], runtime_type) return self.complete_object_value( self.ensure_valid_runtime_type( runtime_type, return_type, field_nodes, info, result ), field_nodes, info, path, result, )
python
{ "resource": "" }
q29887
ExecutionContext.complete_object_value
train
def complete_object_value( self, return_type: GraphQLObjectType, field_nodes: List[FieldNode], info: GraphQLResolveInfo, path: ResponsePath, result: Any, ) -> AwaitableOrValue[Dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" # If there is an `is_type_of()` predicate function, call it with the current # result. If `is_type_of()` returns False, then raise an error rather than # continuing execution. if return_type.is_type_of: is_type_of = return_type.is_type_of(result, info) if isawaitable(is_type_of): async def collect_and_execute_subfields_async(): if not await is_type_of: raise invalid_return_type_error( return_type, result, field_nodes ) return self.collect_and_execute_subfields( return_type, field_nodes, path, result ) return collect_and_execute_subfields_async() if not is_type_of: raise invalid_return_type_error(return_type, result, field_nodes) return self.collect_and_execute_subfields( return_type, field_nodes, path, result )
python
{ "resource": "" }
q29888
ExecutionContext.collect_and_execute_subfields
train
def collect_and_execute_subfields( self, return_type: GraphQLObjectType, field_nodes: List[FieldNode], path: ResponsePath, result: Any, ) -> AwaitableOrValue[Dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" sub_field_nodes = self.collect_subfields(return_type, field_nodes) return self.execute_fields(return_type, result, path, sub_field_nodes)
python
{ "resource": "" }
q29889
ExecutionContext.collect_subfields
train
def collect_subfields( self, return_type: GraphQLObjectType, field_nodes: List[FieldNode] ) -> Dict[str, List[FieldNode]]: """Collect subfields. A cached collection of relevant subfields with regard to the return type is kept in the execution context as `_subfields_cache`. This ensures the subfields are not repeatedly calculated, which saves overhead when resolving lists of values. """ cache_key = return_type, tuple(field_nodes) sub_field_nodes = self._subfields_cache.get(cache_key) if sub_field_nodes is None: sub_field_nodes = {} visited_fragment_names: Set[str] = set() for field_node in field_nodes: selection_set = field_node.selection_set if selection_set: sub_field_nodes = self.collect_fields( return_type, selection_set, sub_field_nodes, visited_fragment_names, ) self._subfields_cache[cache_key] = sub_field_nodes return sub_field_nodes
python
{ "resource": "" }
q29890
get_operation_root_type
train
def get_operation_root_type( schema: GraphQLSchema, operation: Union[OperationDefinitionNode, OperationTypeDefinitionNode], ) -> GraphQLObjectType: """Extract the root type of the operation from the schema.""" operation_type = operation.operation if operation_type == OperationType.QUERY: query_type = schema.query_type if not query_type: raise GraphQLError( "Schema does not define the required query root type.", operation ) return query_type elif operation_type == OperationType.MUTATION: mutation_type = schema.mutation_type if not mutation_type: raise GraphQLError("Schema is not configured for mutations.", operation) return mutation_type elif operation_type == OperationType.SUBSCRIPTION: subscription_type = schema.subscription_type if not subscription_type: raise GraphQLError("Schema is not configured for subscriptions.", operation) return subscription_type else: raise GraphQLError( "Can only have query, mutation and subscription operations.", operation )
python
{ "resource": "" }
q29891
is_specified_directive
train
def is_specified_directive(directive: GraphQLDirective): """Check whether the given directive is one of the specified directives.""" return isinstance(directive, GraphQLDirective) and any( specified_directive.name == directive.name for specified_directive in specified_directives )
python
{ "resource": "" }
q29892
print_error
train
def print_error(error: "GraphQLError") -> str: """Print a GraphQLError to a string. The printed string will contain useful location information about the error's position in the source. """ printed_locations: List[str] = [] print_location = printed_locations.append if error.nodes: for node in error.nodes: if node.loc: print_location( highlight_source_at_location( node.loc.source, node.loc.source.get_location(node.loc.start) ) ) elif error.source and error.locations: source = error.source for location in error.locations: print_location(highlight_source_at_location(source, location)) if printed_locations: return "\n\n".join([error.message] + printed_locations) + "\n" return error.message
python
{ "resource": "" }
q29893
highlight_source_at_location
train
def highlight_source_at_location(source: "Source", location: "SourceLocation") -> str: """Highlight source at given location. This renders a helpful description of the location of the error in the GraphQL Source document. """ first_line_column_offset = source.location_offset.column - 1 body = " " * first_line_column_offset + source.body line_index = location.line - 1 line_offset = source.location_offset.line - 1 line_num = location.line + line_offset column_offset = first_line_column_offset if location.line == 1 else 0 column_num = location.column + column_offset lines = _re_newline.split(body) # works a bit different from splitlines() len_lines = len(lines) def get_line(index: int) -> Optional[str]: return lines[index] if 0 <= index < len_lines else None return f"{source.name} ({line_num}:{column_num})\n" + print_prefixed_lines( [ (f"{line_num - 1}: ", get_line(line_index - 1)), (f"{line_num}: ", get_line(line_index)), ("", " " * (column_num - 1) + "^"), (f"{line_num + 1}: ", get_line(line_index + 1)), ] )
python
{ "resource": "" }
q29894
trunc_str
train
def trunc_str(s: str) -> str: """Truncate strings to maximum length.""" if len(s) > max_str_size: i = max(0, (max_str_size - 3) // 2) j = max(0, max_str_size - 3 - i) s = s[:i] + "..." + s[-j:] return s
python
{ "resource": "" }
q29895
trunc_list
train
def trunc_list(s: List) -> List: """Truncate lists to maximum length.""" if len(s) > max_list_size: i = max_list_size // 2 j = i - 1 s = s[:i] + [ELLIPSIS] + s[-j:] return s
python
{ "resource": "" }
q29896
dedent_block_string_value
train
def dedent_block_string_value(raw_string: str) -> str: """Produce the value of a block string from its parsed raw value. Similar to CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc. This implements the GraphQL spec's BlockStringValue() static algorithm. """ lines = raw_string.splitlines() common_indent = None for line in lines[1:]: indent = leading_whitespace(line) if indent < len(line) and (common_indent is None or indent < common_indent): common_indent = indent if common_indent == 0: break if common_indent: lines[1:] = [line[common_indent:] for line in lines[1:]] while lines and not lines[0].strip(): lines = lines[1:] while lines and not lines[-1].strip(): lines = lines[:-1] return "\n".join(lines)
python
{ "resource": "" }
q29897
print_block_string
train
def print_block_string( value: str, indentation: str = "", prefer_multiple_lines: bool = False ) -> str: """Print a block string in the indented block form. Prints a block string in the indented block form by adding a leading and trailing blank line. However, if a block string starts with whitespace and is a single-line, adding a leading blank line would strip that whitespace. """ is_single_line = "\n" not in value has_leading_space = value.startswith(" ") or value.startswith("\t") has_trailing_quote = value.endswith('"') print_as_multiple_lines = ( not is_single_line or has_trailing_quote or prefer_multiple_lines ) # Format a multi-line block quote to account for leading space. if print_as_multiple_lines and not (is_single_line and has_leading_space): result = "\n" + indentation else: result = "" result += value.replace("\n", "\n" + indentation) if indentation else value if print_as_multiple_lines: result += "\n" return '"""' + result.replace('"""', '\\"""') + '"""'
python
{ "resource": "" }
q29898
is_equal_type
train
def is_equal_type(type_a: GraphQLType, type_b: GraphQLType): """Check whether two types are equal. Provided two types, return true if the types are equal (invariant).""" # Equivalent types are equal. if type_a is type_b: return True # If either type is non-null, the other must also be non-null. if is_non_null_type(type_a) and is_non_null_type(type_b): # noinspection PyUnresolvedReferences return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore # If either type is a list, the other must also be a list. if is_list_type(type_a) and is_list_type(type_b): # noinspection PyUnresolvedReferences return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore # Otherwise the types are not equal. return False
python
{ "resource": "" }
q29899
is_type_sub_type_of
train
def is_type_sub_type_of( schema: GraphQLSchema, maybe_subtype: GraphQLType, super_type: GraphQLType ) -> bool: """Check whether a type is subtype of another type in a given schema. Provided a type and a super type, return true if the first type is either equal or a subset of the second super type (covariant). """ # Equivalent type is a valid subtype if maybe_subtype is super_type: return True # If super_type is non-null, maybe_subtype must also be non-null. if is_non_null_type(super_type): if is_non_null_type(maybe_subtype): return is_type_sub_type_of( schema, cast(GraphQLNonNull, maybe_subtype).of_type, cast(GraphQLNonNull, super_type).of_type, ) return False elif is_non_null_type(maybe_subtype): # If super_type is nullable, maybe_subtype may be non-null or nullable. return is_type_sub_type_of( schema, cast(GraphQLNonNull, maybe_subtype).of_type, super_type ) # If superType type is a list, maybeSubType type must also be a list. if is_list_type(super_type): if is_list_type(maybe_subtype): return is_type_sub_type_of( schema, cast(GraphQLList, maybe_subtype).of_type, cast(GraphQLList, super_type).of_type, ) return False elif is_list_type(maybe_subtype): # If super_type is not a list, maybe_subtype must also be not a list. return False # If super_type type is an abstract type, maybe_subtype type may be a currently # possible object type. # noinspection PyTypeChecker if ( is_abstract_type(super_type) and is_object_type(maybe_subtype) and schema.is_possible_type( cast(GraphQLAbstractType, super_type), cast(GraphQLObjectType, maybe_subtype), ) ): return True # Otherwise, the child type is not a valid subtype of the parent type. return False
python
{ "resource": "" }